Compare commits

..

5 Commits

Author SHA1 Message Date
Elizabeth Thompson
44d3f769cb fix: Use DuckDB versions that match known-working motherduck config
Updated to use duckdb==0.10.2 and duckdb-engine==0.12.1, which are the
same versions used in the motherduck configuration that are known to work
with Superset's DuckDB integration.
2025-09-25 18:01:50 -07:00
Elizabeth Thompson
1cf14f1081 fix: Pin compatible DuckDB versions to resolve showtime examples loading
This fixes the 'unhashable type: _duckdb.typing.DuckDBPyType' error that
prevents DuckDB connections from working in showtime ephemeral environments.

Problem: The current DuckDB versions (duckdb==1.3.2, duckdb-engine==0.17.0)
have type system compatibility issues with Superset's DuckDB integration.

Solution: Pin to older, stable versions that are known to work:
- duckdb==1.0.0 (stable core library)
- duckdb-engine==0.11.2 (compatible SQLAlchemy driver)

This enables showtime environments to successfully connect to DuckDB databases
and load examples from the pre-built examples.duckdb file.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-25 18:00:36 -07:00
Elizabeth Thompson
482b7108ae Revert "fix: Add DuckDB examples URI to ECS task definition for showtime environments"
This reverts commit 55e3da478a.
2025-09-25 09:50:04 -07:00
Elizabeth Thompson
b2129b4277 fix: Move LOAD_EXAMPLES_DUCKDB ARG to global scope in Dockerfile
This fixes an issue where the LOAD_EXAMPLES_DUCKDB build argument was
being ignored during multi-stage Docker builds, causing showtime
ephemeral environments to fail loading examples.

Problem: The ARG was declared inside the python-common stage, making it
unavailable when passed via --build-arg to the showtime target.

Solution: Move the ARG declaration to global scope (before any FROM) and
re-declare it in the python-common stage where it's used. This follows
Docker's multi-stage build ARG scope rules.

This fix ensures that when showtime builds with:
  docker buildx build --build-arg LOAD_EXAMPLES_DUCKDB=true --target showtime

The DuckDB examples file will actually be downloaded during the build.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-24 15:10:30 -07:00
Elizabeth Thompson
55e3da478a fix: Add DuckDB examples URI to ECS task definition for showtime environments
This adds the missing SUPERSET__SQLALCHEMY_EXAMPLES_URI environment variable
to the ECS task definition used by showtime ephemeral environments.

Without this configuration, showtime environments were unable to load examples
because the container didn't know where to find the DuckDB examples file that
was downloaded during Docker build (via LOAD_EXAMPLES_DUCKDB=true).

The URI matches what's used in docker-compose.yml and docker-compose-light.yml,
with read-only access mode for safety in ephemeral environments.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-23 18:12:57 -07:00
15 changed files with 68 additions and 117 deletions

View File

@@ -26,6 +26,9 @@ ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64}
# Include translations in the final build
ARG BUILD_TRANSLATIONS="false"
# Build arg to pre-populate examples DuckDB file
ARG LOAD_EXAMPLES_DUCKDB="false"
######################################################################
# superset-node-ci used as a base for building frontend assets and CI
######################################################################
@@ -143,8 +146,8 @@ RUN if [ "${BUILD_TRANSLATIONS}" = "true" ]; then \
######################################################################
FROM python-base AS python-common
# Build arg to pre-populate examples DuckDB file
ARG LOAD_EXAMPLES_DUCKDB="false"
# Re-declare build arg to receive it in this stage
ARG LOAD_EXAMPLES_DUCKDB
ENV SUPERSET_HOME="/app/superset_home" \
HOME="/app/superset_home" \

View File

@@ -163,7 +163,7 @@ services:
# configuring the dev-server to use the host.docker.internal to connect to the backend
superset: "http://superset-light:8088"
# Webpack dev server configuration
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-0.0.0.0}"
WEBPACK_DEVSERVER_HOST: "${WEBPACK_DEVSERVER_HOST:-127.0.0.1}"
WEBPACK_DEVSERVER_PORT: "${WEBPACK_DEVSERVER_PORT:-9000}"
ports:
- "${NODE_PORT:-9001}:9000" # Parameterized port, accessible on all interfaces

View File

@@ -76,7 +76,7 @@ dependencies = [
"packaging",
# --------------------------
# pandas and related (wanting pandas[performance] without numba as it's 100+MB and not needed)
"pandas[excel]>=2.0.3, <2.2",
"pandas[excel]>=2.0.3, <2.1",
"bottleneck", # recommended performance dependency for pandas, see https://pandas.pydata.org/docs/getting_started/install.html#performance-dependencies-recommended
# --------------------------
"parsedatetime",
@@ -133,7 +133,7 @@ denodo = ["denodo-sqlalchemy~=1.0.6"]
dremio = ["sqlalchemy-dremio>=1.2.1, <4"]
drill = ["sqlalchemy-drill>=1.1.4, <2"]
druid = ["pydruid>=0.6.5,<0.7"]
duckdb = ["duckdb-engine>=0.17.0"]
duckdb = ["duckdb==0.10.2", "duckdb-engine==0.12.1"]
dynamodb = ["pydynamodb>=0.4.2"]
solr = ["sqlalchemy-solr >= 0.2.0"]
elasticsearch = ["elasticsearch-dbapi>=0.2.9, <0.3.0"]

View File

@@ -160,7 +160,6 @@ greenlet==3.1.1
# via
# apache-superset (pyproject.toml)
# shillelagh
# sqlalchemy
gunicorn==23.0.0
# via apache-superset (pyproject.toml)
h11==0.16.0
@@ -267,7 +266,7 @@ packaging==25.0
# limits
# marshmallow
# shillelagh
pandas==2.1.4
pandas==2.0.3
# via apache-superset (pyproject.toml)
paramiko==3.5.1
# via

View File

@@ -331,7 +331,6 @@ greenlet==3.1.1
# apache-superset
# gevent
# shillelagh
# sqlalchemy
grpcio==1.71.0
# via
# apache-superset
@@ -537,7 +536,7 @@ packaging==25.0
# pytest
# shillelagh
# sqlalchemy-bigquery
pandas==2.1.4
pandas==2.0.3
# via
# -c requirements/base-constraint.txt
# apache-superset

View File

@@ -19,6 +19,7 @@
import { t } from '@superset-ui/core';
import {
ControlPanelConfig,
ControlStateMapping,
ControlSubSectionHeader,
D3_FORMAT_DOCS,
D3_FORMAT_OPTIONS,
@@ -196,6 +197,15 @@ const config: ControlPanelConfig = {
],
},
],
onInit(state: ControlStateMapping) {
return {
...state,
row_limit: {
...state.row_limit,
value: state.row_limit.default,
},
};
},
formDataOverrides: formData => ({
...formData,
metric: getStandardizedControls().shiftMetric(),

View File

@@ -325,7 +325,6 @@ export default function transformProps(chartProps: EchartsGanttChartProps) {
show: true,
position: 'start',
formatter: '{b}',
color: theme.colorText,
},
data: categoryLines,
},

View File

@@ -47,10 +47,7 @@ import {
isDerivedSeries,
} from '@superset-ui/chart-controls';
import type { EChartsCoreOption } from 'echarts/core';
import type {
LineStyleOption,
CallbackDataParams,
} from 'echarts/types/src/util/types';
import type { LineStyleOption } from 'echarts/types/src/util/types';
import type { SeriesOption } from 'echarts';
import {
EchartsTimeseriesChartProps,
@@ -578,31 +575,16 @@ export default function transformProps(
const xValue: number = richTooltip
? params[0].value[xIndex]
: params.value[xIndex];
const forecastValue: CallbackDataParams[] = richTooltip
? params
: [params];
const forecastValue: any[] = richTooltip ? params : [params];
const sortedKeys = extractTooltipKeys(
forecastValue,
yIndex,
richTooltip,
tooltipSortByMetric,
);
const filteredForecastValue = forecastValue.filter(
(item: CallbackDataParams) =>
!annotationLayers.some(
(annotation: AnnotationLayer) =>
item.seriesName === annotation.name,
),
);
const forecastValues: Record<string, ForecastValue> =
extractForecastValuesFromTooltipParams(forecastValue, isHorizontal);
const filteredForecastValues: Record<string, ForecastValue> =
extractForecastValuesFromTooltipParams(
filteredForecastValue,
isHorizontal,
);
const isForecast = Object.values(forecastValues).some(
value =>
value.forecastTrend || value.forecastLower || value.forecastUpper,
@@ -613,7 +595,7 @@ export default function transformProps(
: (getCustomFormatter(customFormatters, metrics) ?? defaultFormatter);
const rows: string[][] = [];
const total = Object.values(filteredForecastValues).reduce(
const total = Object.values(forecastValues).reduce(
(acc, value) =>
value.observation !== undefined ? acc + value.observation : acc,
0,
@@ -635,16 +617,7 @@ export default function transformProps(
seriesName: key,
formatter,
});
const annotationRow = annotationLayers.some(
item => item.name === key,
);
if (
showPercentage &&
value.observation !== undefined &&
!annotationRow
) {
if (showPercentage && value.observation !== undefined) {
row.push(
percentFormatter.format(value.observation / (total || 1)),
);

View File

@@ -257,7 +257,6 @@ describe('Gantt transformProps', () => {
show: true,
position: 'start',
formatter: '{b}',
color: 'rgba(0,0,0,0.88)',
},
lineStyle: expect.objectContaining({
color: '#00000000',

View File

@@ -460,26 +460,48 @@ const ExtraOptions = ({
),
children: (
<>
<StyledInputContainer
css={!isFileUploadSupportedByEngine ? no_margin_bottom : {}}
>
<StyledInputContainer>
<div className="control-label">{t('Secure extra')}</div>
<div className="input-container">
<Checkbox
id="per_user_caching"
name="per_user_caching"
indeterminate={false}
checked={!!extraJson?.per_user_caching}
onChange={onExtraInputChange}
>
{t('Per user caching')}
</Checkbox>
<InfoTooltip
tooltip={t(
'Cache data separately for each user based on their data access roles and permissions. ' +
'When disabled, a single cache will be used for all users.',
)}
<StyledJsonEditor
name="masked_encrypted_extra"
value={db?.masked_encrypted_extra || ''}
placeholder={t('Secure extra')}
onChange={(json: string) =>
onEditorChange({ json, name: 'masked_encrypted_extra' })
}
width="100%"
height="160px"
annotations={secureExtraAnnotations}
/>
</div>
<div className="helper">
<div>
{t(
'JSON string containing additional connection configuration. ' +
'This is used to provide connection information for systems ' +
'like Hive, Presto and BigQuery which do not conform to the ' +
'username:password syntax normally used by SQLAlchemy.',
)}
</div>
</div>
</StyledInputContainer>
<StyledInputContainer>
<div className="control-label">{t('Root certificate')}</div>
<div className="input-container">
<Input.TextArea
name="server_cert"
value={db?.server_cert || ''}
placeholder={t('Enter CA_BUNDLE')}
onChange={onTextChange}
/>
</div>
<div className="helper">
{t(
'Optional CA_BUNDLE contents to validate HTTPS requests. Only ' +
'available on certain database engines.',
)}
</div>
</StyledInputContainer>
<StyledInputContainer
css={!isFileUploadSupportedByEngine ? no_margin_bottom : {}}
@@ -547,49 +569,6 @@ const ExtraOptions = ({
</div>
</StyledInputContainer>
)}
<StyledInputContainer>
<div className="control-label">{t('Secure extra')}</div>
<div className="input-container">
<StyledJsonEditor
name="masked_encrypted_extra"
value={db?.masked_encrypted_extra || ''}
placeholder={t('Secure extra')}
onChange={(json: string) =>
onEditorChange({ json, name: 'masked_encrypted_extra' })
}
width="100%"
height="160px"
annotations={secureExtraAnnotations}
/>
</div>
<div className="helper">
<div>
{t(
'JSON string containing additional connection configuration. ' +
'This is used to provide connection information for systems ' +
'like Hive, Presto and BigQuery which do not conform to the ' +
'username:password syntax normally used by SQLAlchemy.',
)}
</div>
</div>
</StyledInputContainer>
<StyledInputContainer>
<div className="control-label">{t('Root certificate')}</div>
<div className="input-container">
<Input.TextArea
name="server_cert"
value={db?.server_cert || ''}
placeholder={t('Enter CA_BUNDLE')}
onChange={onTextChange}
/>
</div>
<div className="helper">
{t(
'Optional CA_BUNDLE contents to validate HTTPS requests. Only ' +
'available on certain database engines.',
)}
</div>
</StyledInputContainer>
</>
),
},

View File

@@ -246,7 +246,6 @@ export interface ExtraJson {
disable_data_preview?: boolean; // in SQL Lab
disable_drill_to_detail?: boolean;
allow_multi_catalog?: boolean;
per_user_caching?: boolean; // in Security
engine_params?: {
catalog?: Record<string, string>;
connect_args?: {

View File

@@ -72,7 +72,7 @@ class ExcelReader(BaseDataReader):
"na_values": self._options.get("null_values")
if self._options.get("null_values") # None if an empty list
else None,
"parse_dates": self._options.get("column_dates") or False,
"parse_dates": self._options.get("column_dates"),
"skiprows": self._options.get("skip_rows", 0),
"sheet_name": self._options.get("sheet_name", 0),
"nrows": self._options.get("rows_to_read"),

View File

@@ -454,19 +454,13 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
cache_dict["annotation_layers"] = annotation_layers
# Add an impersonation key to cache if impersonation is enabled on the db
# or if the CACHE_QUERY_BY_USER flag is on or per_user_caching is enabled on
# the database
# or if the CACHE_QUERY_BY_USER flag is on
try:
database = self.datasource.database # type: ignore
extra = json.loads(database.extra or "{}")
if (
(
feature_flag_manager.is_feature_enabled("CACHE_IMPERSONATION")
and database.impersonate_user
)
or feature_flag_manager.is_feature_enabled("CACHE_QUERY_BY_USER")
or extra.get("per_user_caching", False)
):
feature_flag_manager.is_feature_enabled("CACHE_IMPERSONATION")
and database.impersonate_user
) or feature_flag_manager.is_feature_enabled("CACHE_QUERY_BY_USER"):
if key := database.db_engine_spec.get_impersonation_key(
getattr(g, "user", None)
):

View File

@@ -831,7 +831,6 @@ class ImportV1DatabaseExtraSchema(Schema):
disable_data_preview = fields.Boolean(required=False)
disable_drill_to_detail = fields.Boolean(required=False)
allow_multi_catalog = fields.Boolean(required=False)
per_user_caching = fields.Boolean(required=False)
version = fields.String(required=False, allow_none=True)
schema_options = fields.Dict(keys=fields.Str(), values=fields.Raw())

View File

@@ -105,8 +105,6 @@ def data_loader(
pandas_loader_configuration: PandasLoaderConfigurations,
table_to_df_convertor: TableToDfConvertor,
) -> DataLoader:
if example_db_engine.dialect.name == PRESTO:
example_db_engine.dialect.get_view_names = Mock(return_value=[])
return PandasDataLoader(
example_db_engine, pandas_loader_configuration, table_to_df_convertor
)