Compare commits

...

32 Commits

Author SHA1 Message Date
Beto Dealmeida
0917424473 Small fixes 2025-10-10 15:06:10 -04:00
Beto Dealmeida
4e9ea4b17a Fix cache 2025-07-23 22:43:14 -04:00
Beto Dealmeida
30deae5a86 Small fixes 2025-07-21 18:23:39 -04:00
Beto Dealmeida
cbda9b7e2f Move dialect to shillelagh 1.4.1 2025-07-21 17:43:14 -04:00
Beto Dealmeida
92aa057787 Checkpoint 2025-07-18 14:49:11 -04:00
Beto Dealmeida
983227da39 Checkpoint 2025-07-18 14:37:13 -04:00
Beto Dealmeida
e3f1ff71af Checkpoint 2025-07-18 14:31:01 -04:00
Beto Dealmeida
6a3ee1f44b Sync metrics working 2025-07-18 11:07:20 -04:00
Beto Dealmeida
37d2ab7afc Checkpoint 2025-07-18 10:19:25 -04:00
Beto Dealmeida
428ac7a370 Checkpoint 2025-07-18 10:05:45 -04:00
Beto Dealmeida
daee150811 Sync metrics 2025-07-18 09:55:32 -04:00
Beto Dealmeida
1166193af7 Checkpoint 2025-07-18 09:15:49 -04:00
Beto Dealmeida
a2c8856592 Checkpoint 2025-07-17 22:20:19 -04:00
Beto Dealmeida
17c1fff32e Checkpoint 2025-07-17 22:12:15 -04:00
Beto Dealmeida
8432ee47d6 Add cache 2025-07-17 22:02:08 -04:00
Beto Dealmeida
a8ccce5762 Checkpoint 2025-07-17 21:49:22 -04:00
Beto Dealmeida
78ce22d751 Checkpoint 2025-07-17 21:39:02 -04:00
Beto Dealmeida
e51b352cff WIP 2025-07-17 21:30:27 -04:00
Beto Dealmeida
ff87aa155a Checkpoint 2025-07-17 20:13:40 -04:00
Beto Dealmeida
1bdfb7db5f Checkpoint 2025-07-17 19:28:31 -04:00
Beto Dealmeida
37b3b7e03a Checkpoint 2025-07-17 18:41:14 -04:00
Beto Dealmeida
1d82e85a55 More charts 2025-07-17 18:31:03 -04:00
Beto Dealmeida
3430d69972 Checkpoint 2025-07-17 18:25:17 -04:00
Beto Dealmeida
81cf3ca024 Checkpoint 2025-07-17 18:21:34 -04:00
Beto Dealmeida
b17ea1c875 Checkpoint 2025-07-17 16:58:16 -04:00
Beto Dealmeida
25b06dbedb Checkpoint 2025-07-17 16:40:45 -04:00
Beto Dealmeida
b27d6dc9b6 Checkpoint 2025-07-17 16:13:01 -04:00
Beto Dealmeida
de13b1cf44 Disable instead of hide 2025-07-17 15:52:51 -04:00
Beto Dealmeida
83c8c4d7e5 Works 2025-07-17 12:02:48 -04:00
Beto Dealmeida
7dea14a0c6 wIP 2025-07-17 11:03:37 -04:00
Beto Dealmeida
8b5bd0f58f DAO and API 2025-07-17 11:03:37 -04:00
Beto Dealmeida
214f0fa5a5 WIP 2025-07-17 11:03:37 -04:00
26 changed files with 2304 additions and 125 deletions

View File

@@ -30,7 +30,9 @@ with open(PACKAGE_JSON) as package_file:
def get_git_sha() -> str:
try:
output = subprocess.check_output(["git", "rev-parse", "HEAD"]) # noqa: S603, S607
output = subprocess.check_output(
["git", "rev-parse", "HEAD"]
) # noqa: S603, S607
return output.decode().strip()
except Exception: # pylint: disable=broad-except
return ""
@@ -67,7 +69,7 @@ setup(
"superset = superset.extensions.metadb:SupersetAPSWDialect",
],
"shillelagh.adapter": [
"superset=superset.extensions.metadb:SupersetShillelaghAdapter"
"superset = superset.extensions.metadb:SupersetShillelaghAdapter",
],
},
download_url="https://www.apache.org/dist/superset/" + version_string,

View File

@@ -47,6 +47,149 @@ type Control = {
default?: unknown;
};
// Semantic layer verification functions - will be set from main app
let withAsyncVerification: any = null;
let createMetricsVerification: any = null;
let createColumnsVerification: any = null;
let createSemanticLayerOnChange: any = null;
let SEMANTIC_LAYER_CONTROL_FIELDS: any = null;
// Notification system for when utilities are set
const enhancedControls: Array<{
controlName: string;
invalidateCache: () => void;
}> = [];
// Export function to set semantic layer utilities from main app
export function setSemanticLayerUtilities(utilities: {
withAsyncVerification: any;
createMetricsVerification: any;
createColumnsVerification: any;
createSemanticLayerOnChange: any;
SEMANTIC_LAYER_CONTROL_FIELDS: any;
}) {
({
withAsyncVerification,
createMetricsVerification,
createColumnsVerification,
createSemanticLayerOnChange,
SEMANTIC_LAYER_CONTROL_FIELDS,
} = utilities);
// Notify all enhanced controls that utilities are now available
enhancedControls.forEach(control => {
control.invalidateCache();
});
}
/**
* Check if a datasource supports semantic layer verification
*/
function needsSemanticLayerVerification(datasource: Dataset): boolean {
if (!datasource || !('database' in datasource) || !datasource.database) {
return false;
}
const database = datasource.database as any;
return Boolean(database.engine_information?.supports_dynamic_columns);
}
/**
* Enhance a control with semantic layer verification if available
* This creates a lazy-enhanced control that checks for utilities at runtime
*/
function enhanceControlWithSemanticLayer(
baseControl: any,
controlName: string,
verificationType: 'metrics' | 'columns',
) {
// Cache for the enhanced control type
let cachedEnhancedType: any = null;
let utilitiesWereAvailable = false;
// Register with notification system
enhancedControls.push({
controlName,
invalidateCache: () => {
cachedEnhancedType = null;
utilitiesWereAvailable = false;
},
});
// Return a control that will be enhanced at runtime if utilities are available
return {
...baseControl,
// Override the type to use a function that checks for enhancement at runtime
get type() {
// Check if utilities became available since last call
const utilitiesAvailableNow = !!withAsyncVerification;
if (utilitiesAvailableNow) {
// If utilities just became available or we haven't cached yet, create enhanced control
if (!utilitiesWereAvailable || !cachedEnhancedType) {
const verificationFn =
verificationType === 'metrics'
? createMetricsVerification(controlName)
: createColumnsVerification(controlName);
cachedEnhancedType = withAsyncVerification({
baseControl: baseControl.type,
verify: verificationFn,
onChange: createSemanticLayerOnChange(
controlName,
SEMANTIC_LAYER_CONTROL_FIELDS,
),
showLoadingState: true,
});
utilitiesWereAvailable = true;
}
return cachedEnhancedType;
}
utilitiesWereAvailable = false;
return baseControl.type;
},
mapStateToProps: (state: any, controlState: any) => {
// Call the original mapStateToProps if it exists
const originalProps = baseControl.mapStateToProps
? baseControl.mapStateToProps(state, controlState)
: {};
// Only add semantic layer props if utilities are available
if (withAsyncVerification) {
const needsVerification = needsSemanticLayerVerification(
state.datasource,
);
// Check if there's existing data that needs verification
const hasExistingData =
controlState?.value &&
((Array.isArray(controlState.value) &&
controlState.value.length > 0) ||
(!Array.isArray(controlState.value) &&
controlState.value !== null &&
controlState.value !== undefined));
return {
...originalProps,
needAsyncVerification: needsVerification,
// Only enable initial verification if there's existing data (like saved charts)
// For new charts, rely only on onChange to prevent duplicate requests
skipEffectVerification: !hasExistingData,
form_data: state.form_data,
datasource: state.datasource, // Pass datasource to verification function
// Add a flag to indicate this is a fresh chart that needs initial verification
triggerInitialVerification: needsVerification && !hasExistingData,
};
}
return originalProps;
},
};
}
/*
* Note: Previous to the commit that introduced this comment, the shared controls module
* would check feature flags at module execution time and expose a different control
@@ -70,7 +213,7 @@ function filterOptions(
);
}
export const dndGroupByControl: SharedControlConfig<
const baseDndGroupByControl: SharedControlConfig<
'DndColumnSelect' | 'SelectControl',
ColumnMeta
> = {
@@ -123,14 +266,26 @@ export const dndGroupByControl: SharedControlConfig<
commaChoosesOption: false,
};
export const dndColumnsControl: typeof dndGroupByControl = {
...dndGroupByControl,
export const dndGroupByControl = enhanceControlWithSemanticLayer(
baseDndGroupByControl,
'groupby',
'columns',
);
const baseDndColumnsControl: typeof baseDndGroupByControl = {
...baseDndGroupByControl,
label: t('Columns'),
description: t('Add dataset columns here to group the pivot table columns.'),
};
export const dndSeriesControl: typeof dndGroupByControl = {
...dndGroupByControl,
export const dndColumnsControl = enhanceControlWithSemanticLayer(
baseDndColumnsControl,
'columns',
'columns',
);
const baseDndSeriesControl: typeof baseDndGroupByControl = {
...baseDndGroupByControl,
label: t('Dimension'),
multi: false,
default: null,
@@ -140,8 +295,14 @@ export const dndSeriesControl: typeof dndGroupByControl = {
),
};
export const dndEntityControl: typeof dndGroupByControl = {
...dndGroupByControl,
export const dndSeriesControl = enhanceControlWithSemanticLayer(
baseDndSeriesControl,
'series',
'columns',
);
const baseDndEntityControl: typeof baseDndGroupByControl = {
...baseDndGroupByControl,
label: t('Entity'),
default: null,
multi: false,
@@ -149,6 +310,12 @@ export const dndEntityControl: typeof dndGroupByControl = {
description: t('This defines the element to be plotted on the chart'),
};
export const dndEntityControl = enhanceControlWithSemanticLayer(
baseDndEntityControl,
'entity',
'columns',
);
export const dndAdhocFilterControl: SharedControlConfig<
'DndFilterSelect' | 'AdhocFilterControl'
> = {
@@ -170,7 +337,7 @@ export const dndAdhocFilterControl: SharedControlConfig<
...datePickerInAdhocFilterMixin,
};
export const dndAdhocMetricsControl: SharedControlConfig<
const baseDndAdhocMetricsControl: SharedControlConfig<
'DndMetricSelect' | 'MetricsControl'
> = {
type: 'DndMetricSelect',
@@ -190,8 +357,14 @@ export const dndAdhocMetricsControl: SharedControlConfig<
),
};
export const dndAdhocMetricControl: typeof dndAdhocMetricsControl = {
...dndAdhocMetricsControl,
export const dndAdhocMetricsControl = enhanceControlWithSemanticLayer(
baseDndAdhocMetricsControl,
'metrics',
'metrics',
);
const baseDndAdhocMetricControl: typeof baseDndAdhocMetricsControl = {
...baseDndAdhocMetricsControl,
multi: false,
label: t('Metric'),
description: t(
@@ -201,6 +374,12 @@ export const dndAdhocMetricControl: typeof dndAdhocMetricsControl = {
),
};
export const dndAdhocMetricControl = enhanceControlWithSemanticLayer(
baseDndAdhocMetricControl,
'metric',
'metrics',
);
export const dndTooltipColumnsControl: typeof dndColumnsControl = {
...dndColumnsControl,
label: t('Tooltip (columns)'),
@@ -214,13 +393,19 @@ export const dndTooltipMetricsControl: typeof dndAdhocMetricsControl = {
validators: [],
};
export const dndAdhocMetricControl2: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
const baseDndAdhocMetricControl2: typeof baseDndAdhocMetricControl = {
...baseDndAdhocMetricControl,
label: t('Right Axis Metric'),
clearable: true,
description: t('Select a metric to display on the right axis'),
};
export const dndAdhocMetricControl2 = enhanceControlWithSemanticLayer(
baseDndAdhocMetricControl2,
'metric_2',
'metrics',
);
export const dndSortByControl: SharedControlConfig<
'DndMetricSelect' | 'MetricsControl'
> = {
@@ -240,15 +425,21 @@ export const dndSortByControl: SharedControlConfig<
}),
};
export const dndSizeControl: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
const baseDndSizeControl: typeof baseDndAdhocMetricControl = {
...baseDndAdhocMetricControl,
label: t('Bubble Size'),
description: t('Metric used to calculate bubble size'),
default: null,
};
export const dndXControl: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
export const dndSizeControl = enhanceControlWithSemanticLayer(
baseDndSizeControl,
'size',
'metrics',
);
const baseDndXControl: typeof baseDndAdhocMetricControl = {
...baseDndAdhocMetricControl,
label: t('X Axis'),
description: t(
"The dataset column/metric that returns the values on your chart's x-axis.",
@@ -256,8 +447,14 @@ export const dndXControl: typeof dndAdhocMetricControl = {
default: null,
};
export const dndYControl: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
export const dndXControl = enhanceControlWithSemanticLayer(
baseDndXControl,
'x',
'metrics',
);
const baseDndYControl: typeof baseDndAdhocMetricControl = {
...baseDndAdhocMetricControl,
label: t('Y Axis'),
description: t(
"The dataset column/metric that returns the values on your chart's y-axis.",
@@ -265,14 +462,26 @@ export const dndYControl: typeof dndAdhocMetricControl = {
default: null,
};
export const dndSecondaryMetricControl: typeof dndAdhocMetricControl = {
...dndAdhocMetricControl,
export const dndYControl = enhanceControlWithSemanticLayer(
baseDndYControl,
'y',
'metrics',
);
const baseDndSecondaryMetricControl: typeof baseDndAdhocMetricControl = {
...baseDndAdhocMetricControl,
label: t('Color Metric'),
default: null,
validators: [],
description: t('A metric to use for color'),
};
export const dndSecondaryMetricControl = enhanceControlWithSemanticLayer(
baseDndSecondaryMetricControl,
'secondary_metric',
'metrics',
);
export const dndGranularitySqlaControl: typeof dndSeriesControl = {
...dndSeriesControl,
...temporalColumnMixin,
@@ -293,7 +502,13 @@ export const dndGranularitySqlaControl: typeof dndSeriesControl = {
valueKey: 'column_name',
};
export const dndXAxisControl: typeof dndGroupByControl = {
...dndGroupByControl,
const baseDndXAxisControl: typeof baseDndGroupByControl = {
...baseDndGroupByControl,
...xAxisMixin,
};
export const dndXAxisControl = enhanceControlWithSemanticLayer(
baseDndXAxisControl,
'x_axis',
'columns',
);

View File

@@ -0,0 +1,99 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
dndAdhocMetricsControl,
dndAdhocMetricControl,
dndAdhocMetricControl2,
dndGroupByControl,
dndColumnsControl,
} from './dndControls';
// Placeholder for semantic layer controls - these would be imported from the main app
const semanticLayerDndAdhocMetricsControl = null;
const semanticLayerDndAdhocMetricControl = null;
const semanticLayerDndAdhocMetricControl2 = null;
const semanticLayerDndGroupByControl = null;
const semanticLayerDndColumnsControl = null;
/**
* Enhanced shared controls that include semantic layer verification
* when using compatible datasources.
*/
export const enhancedSharedControls = {
// Original controls
dndAdhocMetricsControl,
dndAdhocMetricControl,
dndAdhocMetricControl2,
dndGroupByControl,
dndColumnsControl,
// Enhanced controls with semantic layer verification
semanticLayerDndAdhocMetricsControl,
semanticLayerDndAdhocMetricControl,
semanticLayerDndAdhocMetricControl2,
semanticLayerDndGroupByControl,
semanticLayerDndColumnsControl,
};
/**
* Get the appropriate control based on datasource capabilities
*/
export function getSemanticLayerControl(
controlName: string,
datasource?: any,
): any {
// Check if datasource supports semantic layer verification
const supportsSemanticLayer =
datasource &&
'database' in datasource &&
datasource.database?.engine_information?.supports_dynamic_columns;
if (supportsSemanticLayer) {
switch (controlName) {
case 'dndAdhocMetricsControl':
return semanticLayerDndAdhocMetricsControl;
case 'dndAdhocMetricControl':
return semanticLayerDndAdhocMetricControl;
case 'dndAdhocMetricControl2':
return semanticLayerDndAdhocMetricControl2;
case 'dndGroupByControl':
return semanticLayerDndGroupByControl;
case 'dndColumnsControl':
return semanticLayerDndColumnsControl;
default:
break;
}
}
// Return original control for non-semantic layer datasources
switch (controlName) {
case 'dndAdhocMetricsControl':
return dndAdhocMetricsControl;
case 'dndAdhocMetricControl':
return dndAdhocMetricControl;
case 'dndAdhocMetricControl2':
return dndAdhocMetricControl2;
case 'dndGroupByControl':
return dndGroupByControl;
case 'dndColumnsControl':
return dndColumnsControl;
default:
return null;
}
}

View File

@@ -244,10 +244,12 @@ const config: ControlPanelConfig = {
controlState: ControlState,
) => {
const { controls } = state;
// Get the enhanced mapStateToProps from the DND control (includes semantic layer verification)
const originalMapStateToProps =
sharedControls?.groupby?.mapStateToProps;
const newState =
originalMapStateToProps?.(state, controlState) ?? {};
// Add table-specific validation while preserving semantic layer enhancements
newState.externalValidationErrors = validateAggControlValues(
controls,
[
@@ -300,26 +302,40 @@ const config: ControlPanelConfig = {
visibility: isAggMode,
resetOnHide: false,
mapStateToProps: (
{ controls, datasource, form_data }: ControlPanelState,
state: ControlPanelState,
controlState: ControlState,
) => ({
columns: datasource?.columns[0]?.hasOwnProperty('filterable')
) => {
const { controls, datasource, form_data } = state;
// Get the enhanced mapStateToProps from the DND control (includes semantic layer verification)
const originalMapStateToProps =
sharedControls?.metrics?.mapStateToProps;
const newState =
originalMapStateToProps?.(state, controlState) ?? {};
// Add table-specific props while preserving semantic layer enhancements
newState.columns = datasource?.columns[0]?.hasOwnProperty(
'filterable',
)
? (datasource as Dataset)?.columns?.filter(
(c: ColumnMeta) => c.filterable,
)
: datasource?.columns,
savedMetrics: defineSavedMetrics(datasource),
// current active adhoc metrics
selectedMetrics:
: datasource?.columns;
newState.savedMetrics = defineSavedMetrics(datasource);
newState.selectedMetrics =
form_data.metrics ||
(form_data.metric ? [form_data.metric] : []),
datasource,
externalValidationErrors: validateAggControlValues(controls, [
controls.groupby?.value,
controls.percent_metrics?.value,
controlState.value,
]),
}),
(form_data.metric ? [form_data.metric] : []);
newState.datasource = datasource;
newState.externalValidationErrors = validateAggControlValues(
controls,
[
controls.groupby?.value,
controls.percent_metrics?.value,
controlState.value,
],
);
return newState;
},
rerender: ['groupby', 'percent_metrics'],
},
},

View File

@@ -73,7 +73,12 @@ import { DatabaseSelector } from '../DatabaseSelector';
import CollectionTable from './CollectionTable';
import Fieldset from './Fieldset';
import Field from './Field';
import { fetchSyncedColumns, updateColumns } from './utils';
import {
fetchSyncedColumns,
fetchSyncedMetrics,
updateColumns,
updateMetrics,
} from './utils';
const extensionsRegistry = getExtensionsRegistry();
@@ -654,6 +659,7 @@ class DatasourceEditor extends PureComponent {
col => !!col.expression,
),
metadataLoading: false,
metricsLoading: false,
activeTabKey: TABS_KEYS.SOURCE,
datasourceType: props.datasource.sql
? DATASOURCE_TYPES.virtual.key
@@ -667,6 +673,7 @@ class DatasourceEditor extends PureComponent {
this.tableChangeAndSyncMetadata =
this.tableChangeAndSyncMetadata.bind(this);
this.syncMetadata = this.syncMetadata.bind(this);
this.syncMetrics = this.syncMetrics.bind(this);
this.setColumns = this.setColumns.bind(this);
this.validateAndChange = this.validateAndChange.bind(this);
this.handleTabSelect = this.handleTabSelect.bind(this);
@@ -844,6 +851,32 @@ class DatasourceEditor extends PureComponent {
}
}
async syncMetrics() {
const { datasource } = this.state;
this.setState({ metricsLoading: true });
try {
const newMetrics = await fetchSyncedMetrics(datasource);
const metricChanges = updateMetrics(
datasource.metrics,
newMetrics,
this.props.addSuccessToast,
);
this.onDatasourceChange({
...datasource,
metrics: metricChanges.finalMetrics,
});
this.props.addSuccessToast(t('Metrics have been synced'));
this.setState({ metricsLoading: false });
} catch (error) {
const { error: clientError, statusText } =
await getClientErrorObject(error);
this.props.addDangerToast(
clientError || statusText || t('An error has occurred'),
);
this.setState({ metricsLoading: false });
}
}
findDuplicates(arr, accessor) {
const seen = {};
const dups = [];
@@ -1702,7 +1735,29 @@ class DatasourceEditor extends PureComponent {
title={t('Metrics')}
/>
),
children: this.renderMetricCollection(),
children: (
<div>
{this.state.datasource.database?.backend === 'metricflow' && (
<ColumnButtonWrapper>
<StyledButtonWrapper>
<Button
buttonSize="small"
buttonStyle="tertiary"
onClick={this.syncMetrics}
className="sync-metrics-from-source"
disabled={this.state.isEditMode}
loading={this.state.metricsLoading}
>
<Icons.DatabaseOutlined iconSize="m" />
{t('Sync metrics from source')}
</Button>
</StyledButtonWrapper>
</ColumnButtonWrapper>
)}
{this.renderMetricCollection()}
{this.state.metricsLoading && <Loading />}
</div>
),
},
{
key: TABS_KEYS.COLUMNS,

View File

@@ -132,6 +132,81 @@ export function updateColumns(prevCols, newCols, addSuccessToast) {
return columnChanges;
}
export function updateMetrics(prevMetrics, newMetrics, addSuccessToast) {
// metrics: Array<{metric_name: string; expression: string; verbose_name?: string; ...}>
const sourceMetricNames = newMetrics.map(metric => metric.metric_name);
const currentMetrics = prevMetrics.reduce((agg, metric) => {
// eslint-disable-next-line no-param-reassign
agg[metric.metric_name] = metric;
return agg;
}, {});
const newOrUpdatedMetrics = newMetrics.filter(metric => {
const currentMetric = currentMetrics[metric.metric_name];
if (!currentMetric) {
// New metric
return true;
}
// Check if metric has been updated
return (
metric.expression !== currentMetric.expression ||
metric.verbose_name !== currentMetric.verbose_name ||
metric.description !== currentMetric.description
);
});
const deletedMetrics = prevMetrics.filter(
metric => !sourceMetricNames.includes(metric.metric_name),
);
const finalMetrics = [
...prevMetrics.filter(metric =>
sourceMetricNames.includes(metric.metric_name),
),
...newOrUpdatedMetrics.filter(
metric => !currentMetrics[metric.metric_name],
),
];
// Update existing metrics with new data
finalMetrics.forEach(metric => {
const sourceMetric = newMetrics.find(
m => m.metric_name === metric.metric_name,
);
if (sourceMetric) {
Object.assign(metric, sourceMetric);
}
});
if (newOrUpdatedMetrics.length > 0) {
addSuccessToast(
tn(
'Metric %s was added',
'Metrics %s were added',
newOrUpdatedMetrics.length,
newOrUpdatedMetrics.map(metric => metric.metric_name).join(', '),
),
);
}
if (deletedMetrics.length > 0) {
addSuccessToast(
tn(
'Metric %s was deleted',
'Metrics %s were deleted',
deletedMetrics.length,
deletedMetrics.map(metric => metric.metric_name).join(', '),
),
);
}
return {
finalMetrics,
newOrUpdatedMetrics,
deletedMetrics,
};
}
export async function fetchSyncedColumns(datasource) {
const params = {
datasource_type: datasource.type || datasource.datasource_type,
@@ -155,3 +230,9 @@ export async function fetchSyncedColumns(datasource) {
const { json } = await SupersetClient.get({ endpoint });
return json;
}
export async function fetchSyncedMetrics(datasource) {
const endpoint = `/api/v1/dataset/${datasource.id}/sync_metrics`;
const { json } = await SupersetClient.put({ endpoint });
return json.result;
}

View File

@@ -29,8 +29,8 @@ import { Icons } from '@superset-ui/core/components/Icons';
import { DatasourcePanelDndItem } from '../types';
const DatasourceItemContainer = styled.div`
${({ theme }) => css`
const DatasourceItemContainer = styled.div<{ isDisabled?: boolean }>`
${({ theme, isDisabled }) => css`
display: flex;
align-items: center;
justify-content: space-between;
@@ -40,12 +40,16 @@ const DatasourceItemContainer = styled.div`
// hack to make the drag preview image corners rounded
transform: translate(0, 0);
color: ${theme.colorText};
color: ${isDisabled ? theme.colorTextSecondary : theme.colorText};
background-color: ${theme.colorBgLayout};
border-radius: 4px;
opacity: ${isDisabled ? 0.5 : 1};
cursor: ${isDisabled ? 'not-allowed' : 'grab'};
&:hover {
background-color: ${theme.colorPrimaryBgHover};
background-color: ${isDisabled
? theme.colorBgLayout
: theme.colorPrimaryBgHover};
}
> div {
@@ -58,6 +62,7 @@ const DatasourceItemContainer = styled.div`
interface DatasourcePanelDragOptionProps extends DatasourcePanelDndItem {
labelRef?: RefObject<any>;
showTooltip?: boolean;
isDisabled?: boolean;
}
type MetricOption = Omit<Metric, 'id'> & {
@@ -67,7 +72,7 @@ type MetricOption = Omit<Metric, 'id'> & {
export default function DatasourcePanelDragOption(
props: DatasourcePanelDragOptionProps,
) {
const { labelRef, showTooltip, type, value } = props;
const { labelRef, showTooltip, type, value, isDisabled } = props;
const [{ isDragging }, drag] = useDrag({
item: {
value: props.value,
@@ -76,6 +81,7 @@ export default function DatasourcePanelDragOption(
collect: monitor => ({
isDragging: monitor.isDragging(),
}),
canDrag: !isDisabled,
});
const optionProps = {
@@ -85,13 +91,17 @@ export default function DatasourcePanelDragOption(
};
return (
<DatasourceItemContainer data-test="DatasourcePanelDragOption" ref={drag}>
<DatasourceItemContainer
data-test="DatasourcePanelDragOption"
ref={isDisabled ? undefined : drag}
isDisabled={isDisabled}
>
{type === DndItemType.Column ? (
<StyledColumnOption column={value as ColumnMeta} {...optionProps} />
) : (
<StyledMetricOption metric={value as MetricOption} {...optionProps} />
)}
<Icons.Drag />
{isDisabled ? <Icons.LockOutlined /> : <Icons.Drag />}
</DatasourceItemContainer>
);
}

View File

@@ -244,6 +244,7 @@ const DatasourcePanelItem = ({
? DndItemType.Column
: DndItemType.Metric
}
isDisabled={item.item.isDisabled}
/>
</LabelWrapper>
)}

View File

@@ -62,10 +62,12 @@ export type DatasourceFolder = {
export type MetricItem = Metric & {
type: 'metric';
isDisabled?: boolean;
};
export type ColumnItem = DatasourcePanelColumn & {
type: 'column';
isDisabled?: boolean;
};
export type FolderItem = MetricItem | ColumnItem;

View File

@@ -26,7 +26,7 @@ import {
useRef,
useState,
} from 'react';
import { useSelector } from 'react-redux';
import { useSelector, useStore } from 'react-redux';
import {
AdhocColumn,
isAdhocColumn,
@@ -44,11 +44,16 @@ import {
Select,
SQLEditor,
EmptyState,
Tooltip,
} from '@superset-ui/core/components';
import sqlKeywords from 'src/SqlLab/utils/sqlKeywords';
import { getColumnKeywords } from 'src/explore/controlUtils/getColumnKeywords';
import { StyledColumnOption } from 'src/explore/components/optionRenderers';
import {
collectQueryFields,
callValidationAPI,
} from 'src/explore/components/controls/SemanticLayerVerification';
import {
POPOVER_INITIAL_HEIGHT,
POPOVER_INITIAL_WIDTH,
@@ -119,6 +124,34 @@ const ColumnSelectPopover = ({
const datasourceType = useSelector<ExplorePageState, string | undefined>(
state => state.explore.datasource.type,
);
const datasource = useSelector<ExplorePageState, any>(
state => state.explore.datasource,
);
const formData = useSelector<ExplorePageState, any>(
state => state.explore.form_data,
);
const store = useStore();
// Check if this is a semantic layer dataset
const isSemanticLayer = useMemo(() => {
if (!datasource || !('database' in datasource) || !datasource.database) {
return false;
}
return Boolean(
datasource.database.engine_information?.supports_dynamic_columns,
);
}, [datasource]);
// For semantic layers, disable Saved and Custom SQL tabs
const effectiveDisabledTabs = useMemo(() => {
const tabs = new Set(disabledTabs);
if (isSemanticLayer) {
tabs.add('saved');
tabs.add('sqlExpression');
}
return tabs;
}, [disabledTabs, isSemanticLayer]);
const [initialLabel] = useState(label);
const [initialAdhocColumn, initialCalculatedColumn, initialSimpleColumn] =
getInitialColumnValues(editedColumn);
@@ -133,6 +166,8 @@ const ColumnSelectPopover = ({
ColumnMeta | undefined
>(initialSimpleColumn);
const [selectedTab, setSelectedTab] = useState<string | null>(null);
const [validDimensions, setValidDimensions] = useState<string[] | null>(null);
const previousFormDataRef = useRef<string>('');
const [resizeButton, width, height] = useResizeButton(
POPOVER_INITIAL_WIDTH,
@@ -141,21 +176,31 @@ const ColumnSelectPopover = ({
const sqlEditorRef = useRef(null);
const [calculatedColumns, simpleColumns] = useMemo(
() =>
columns?.reduce(
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
if (column.expression) {
acc[0].push(column);
} else {
acc[1].push(column);
}
return acc;
},
[[], []],
),
[columns],
);
const [calculatedColumns, simpleColumns] = useMemo(() => {
// Use columns from Redux datasource state (which includes disabled states) instead of props
const columnsToUse = datasource?.columns || columns || [];
const [calculated, simple] = columnsToUse.reduce(
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
if (column.expression) {
acc[0].push(column);
} else {
acc[1].push(column);
}
return acc;
},
[[], []],
) || [[], []];
// For semantic layer datasets, filter simple columns to show only valid dimensions
// Use the isDisabled state set by the main verification system instead of separate API calls
if (isSemanticLayer) {
const filteredSimple = simple.filter(column => !column.isDisabled);
return [calculated, filteredSimple];
}
return [calculated, simple];
}, [datasource?.columns, columns, isSemanticLayer]);
const onSqlExpressionChange = useCallback(
sqlExpression => {
@@ -196,17 +241,151 @@ const ColumnSelectPopover = ({
[setLabel, simpleColumns],
);
const defaultActiveTabKey = initialAdhocColumn
? 'sqlExpression'
: selectedCalculatedColumn
? 'saved'
: 'simple';
const defaultActiveTabKey = useMemo(() => {
// For semantic layer datasets, always default to Simple tab
if (isSemanticLayer) {
return TABS_KEYS.SIMPLE;
}
// Original logic for non-semantic layer datasets
return initialAdhocColumn
? TABS_KEYS.SQL_EXPRESSION
: selectedCalculatedColumn
? TABS_KEYS.SAVED
: TABS_KEYS.SIMPLE;
}, [isSemanticLayer, initialAdhocColumn, selectedCalculatedColumn]);
useEffect(() => {
getCurrentTab(defaultActiveTabKey);
setSelectedTab(defaultActiveTabKey);
}, [defaultActiveTabKey, getCurrentTab, setSelectedTab]);
// Fetch valid dimensions for semantic layer datasets
// Only trigger when actually needed (tab is Simple or modal opens after delay)
useEffect(() => {
// Disable column modal API calls - semantic layer verification handles disabled states automatically
if (
false &&
isSemanticLayer &&
formData &&
datasource &&
(selectedTab === TABS_KEYS.SIMPLE || selectedTab === null)
) {
const fetchValidDimensions = async () => {
try {
// Use the same 50ms delay that fixed the main verification timing issue
await new Promise(resolve => setTimeout(resolve, 50));
// Get the most current form data from store
const currentState = store.getState() as ExplorePageState;
let currentFormData = currentState.explore.form_data;
// If we're in a table and don't have metrics/dimensions, try to get from controls state
if (
(!currentFormData.metrics &&
!currentFormData.groupby &&
!currentFormData.all_columns) ||
(Array.isArray(currentFormData.metrics) &&
currentFormData.metrics.length === 0 &&
Array.isArray(currentFormData.groupby) &&
currentFormData.groupby.length === 0)
) {
// Try to get from the controls state instead
const controlsState = (currentState as any).explore?.controls;
if (controlsState) {
const enhancedFormData = { ...currentFormData };
// Get metrics from controls
if (controlsState.metrics?.value) {
enhancedFormData.metrics = controlsState.metrics.value;
}
if (controlsState.percent_metrics?.value) {
enhancedFormData.percent_metrics =
controlsState.percent_metrics.value;
}
// Get dimensions from controls
if (controlsState.groupby?.value) {
enhancedFormData.groupby = controlsState.groupby.value;
}
if (controlsState.all_columns?.value) {
enhancedFormData.all_columns = controlsState.all_columns.value;
}
currentFormData = enhancedFormData;
}
}
const queryFields = collectQueryFields(currentFormData);
const validationResult = await callValidationAPI(
datasource,
queryFields.dimensions,
queryFields.metrics,
);
if (validationResult) {
setValidDimensions(validationResult.dimensions);
} else {
setValidDimensions(null);
}
} catch (error) {
console.warn('Failed to fetch valid dimensions:', error);
setValidDimensions(null);
} finally {
// Cleanup
}
};
// Trigger API call after a delay to ensure state is current
const timeoutId = setTimeout(() => {
fetchValidDimensions();
}, 100);
return () => clearTimeout(timeoutId);
}
setValidDimensions(null);
return undefined;
}, [isSemanticLayer, selectedTab, datasource, store]);
// Also trigger when form data changes (for subsequent updates)
useEffect(() => {
if (isSemanticLayer && validDimensions !== null && formData && datasource) {
const currentFormDataString = JSON.stringify(formData);
// Only make API call if form data actually changed and we already have loaded once
if (currentFormDataString !== previousFormDataRef.current) {
previousFormDataRef.current = currentFormDataString;
const fetchValidDimensions = async () => {
try {
await new Promise(resolve => setTimeout(resolve, 50));
const currentState = store.getState() as ExplorePageState;
const currentFormData = currentState.explore.form_data;
const queryFields = collectQueryFields(currentFormData);
const validationResult = await callValidationAPI(
datasource,
queryFields.dimensions,
queryFields.metrics,
);
if (validationResult) {
setValidDimensions(validationResult.dimensions);
}
} catch (error) {
console.warn('Failed to fetch valid dimensions:', error);
} finally {
// Cleanup
}
};
setTimeout(() => {
fetchValidDimensions();
}, 50);
}
}
}, [isSemanticLayer, formData, datasource, store, validDimensions]);
useEffect(() => {
/* if the adhoc column is not set (because it was never edited) but the
* tab is selected and the label has changed, then we need to set the
@@ -318,8 +497,19 @@ const ColumnSelectPopover = ({
items={[
{
key: TABS_KEYS.SAVED,
label: t('Saved'),
disabled: disabledTabs.has('saved'),
label:
isSemanticLayer && effectiveDisabledTabs.has('saved') ? (
<Tooltip
title={t(
'Saved expressions are not supported for semantic layer datasets',
)}
>
{t('Saved')}
</Tooltip>
) : (
t('Saved')
),
disabled: effectiveDisabledTabs.has('saved'),
children: (
<>
{calculatedColumns.length > 0 ? (
@@ -404,7 +594,7 @@ const ColumnSelectPopover = ({
{
key: TABS_KEYS.SIMPLE,
label: t('Simple'),
disabled: disabledTabs.has('simple'),
disabled: effectiveDisabledTabs.has('simple'),
children: (
<>
{isTemporal && simpleColumns.length === 0 ? (
@@ -455,8 +645,19 @@ const ColumnSelectPopover = ({
},
{
key: TABS_KEYS.SQL_EXPRESSION,
label: t('Custom SQL'),
disabled: disabledTabs.has('sqlExpression'),
label:
isSemanticLayer && effectiveDisabledTabs.has('sqlExpression') ? (
<Tooltip
title={t(
'Custom SQL expressions are not supported for semantic layer datasets',
)}
>
{t('Custom SQL')}
</Tooltip>
) : (
t('Custom SQL')
),
disabled: effectiveDisabledTabs.has('sqlExpression'),
children: (
<>
<SQLEditor

View File

@@ -19,6 +19,7 @@
/* eslint-disable camelcase */
import { PureComponent } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import {
isDefined,
t,
@@ -68,6 +69,8 @@ const propTypes = {
datasource: PropTypes.object,
isNewMetric: PropTypes.bool,
isLabelModified: PropTypes.bool,
// Props from Redux
reduxDatasource: PropTypes.object,
};
const defaultProps = {
@@ -90,7 +93,7 @@ const StyledSelect = styled(Select)`
export const SAVED_TAB_KEY = 'SAVED';
export default class AdhocMetricEditPopover extends PureComponent {
class AdhocMetricEditPopover extends PureComponent {
// "Saved" is a default tab unless there are no saved metrics for dataset
defaultActiveTabKey = this.getDefaultTab();
@@ -149,6 +152,19 @@ export default class AdhocMetricEditPopover extends PureComponent {
getDefaultTab() {
const { adhocMetric, savedMetric, savedMetricsOptions, isNewMetric } =
this.props;
// For semantic layer datasets, always default to Saved tab if available
if (this.isSemanticLayer()) {
if (
Array.isArray(savedMetricsOptions) &&
savedMetricsOptions.length > 0
) {
return SAVED_TAB_KEY;
}
// If no saved metrics available, still return SAVED_TAB_KEY to show empty state
return SAVED_TAB_KEY;
}
if (isDefined(adhocMetric.column) || isDefined(adhocMetric.sqlExpression)) {
return adhocMetric.expressionType;
}
@@ -162,6 +178,16 @@ export default class AdhocMetricEditPopover extends PureComponent {
return adhocMetric.expressionType;
}
isSemanticLayer() {
const { datasource } = this.props;
if (!datasource || !('database' in datasource) || !datasource.database) {
return false;
}
return Boolean(
datasource.database.engine_information?.supports_dynamic_columns,
);
}
onSave() {
const { adhocMetric, savedMetric } = this.state;
@@ -306,11 +332,29 @@ export default class AdhocMetricEditPopover extends PureComponent {
datasource,
isNewMetric,
isLabelModified,
reduxDatasource,
...popoverProps
} = this.props;
const { adhocMetric, savedMetric } = this.state;
const keywords = sqlKeywords.concat(getColumnKeywords(columns));
// For semantic layer datasets, filter saved metrics to show only valid ones
// Use the isDisabled state set by the main verification system instead of all metrics
let filteredSavedMetricsOptions = savedMetricsOptions;
if (this.isSemanticLayer() && reduxDatasource?.metrics) {
// Create a set of metric names that are NOT disabled in Redux state
const validMetricNames = new Set(
reduxDatasource.metrics
.filter(metric => !metric.isDisabled)
.map(metric => metric.metric_name),
);
// Filter savedMetricsOptions to only include valid metrics
filteredSavedMetricsOptions = ensureIsArray(savedMetricsOptions).filter(
metric => validMetricNames.has(metric.metric_name),
);
}
const columnValue =
(adhocMetric.column && adhocMetric.column.column_name) ||
adhocMetric.inferSqlExpressionColumn();
@@ -336,7 +380,10 @@ export default class AdhocMetricEditPopover extends PureComponent {
const savedSelectProps = {
ariaLabel: t('Select saved metrics'),
placeholder: t('%s saved metric(s)', savedMetricsOptions?.length ?? 0),
placeholder: t(
'%s saved metric(s)',
filteredSavedMetricsOptions?.length ?? 0,
),
value: savedMetric?.metric_name,
onChange: this.onSavedMetricChange,
allowClear: true,
@@ -381,10 +428,10 @@ export default class AdhocMetricEditPopover extends PureComponent {
key: SAVED_TAB_KEY,
label: t('Saved'),
children:
ensureIsArray(savedMetricsOptions).length > 0 ? (
ensureIsArray(filteredSavedMetricsOptions).length > 0 ? (
<FormItem label={t('Saved metric')}>
<StyledSelect
options={ensureIsArray(savedMetricsOptions).map(
options={ensureIsArray(filteredSavedMetricsOptions).map(
savedMetric => ({
value: savedMetric.metric_name,
label: this.renderMetricOption(savedMetric),
@@ -428,18 +475,25 @@ export default class AdhocMetricEditPopover extends PureComponent {
},
{
key: EXPRESSION_TYPES.SIMPLE,
label: extra.disallow_adhoc_metrics ? (
<Tooltip
title={t(
'Simple ad-hoc metrics are not enabled for this dataset',
)}
>
{t('Simple')}
</Tooltip>
) : (
t('Simple')
),
disabled: extra.disallow_adhoc_metrics,
label:
extra.disallow_adhoc_metrics || this.isSemanticLayer() ? (
<Tooltip
title={
this.isSemanticLayer()
? t(
'Simple ad-hoc metrics are not supported for semantic layer datasets',
)
: t(
'Simple ad-hoc metrics are not enabled for this dataset',
)
}
>
{t('Simple')}
</Tooltip>
) : (
t('Simple')
),
disabled: extra.disallow_adhoc_metrics || this.isSemanticLayer(),
children: (
<>
<FormItem label={t('column')}>
@@ -467,18 +521,25 @@ export default class AdhocMetricEditPopover extends PureComponent {
},
{
key: EXPRESSION_TYPES.SQL,
label: extra.disallow_adhoc_metrics ? (
<Tooltip
title={t(
'Custom SQL ad-hoc metrics are not enabled for this dataset',
)}
>
{t('Custom SQL')}
</Tooltip>
) : (
t('Custom SQL')
),
disabled: extra.disallow_adhoc_metrics,
label:
extra.disallow_adhoc_metrics || this.isSemanticLayer() ? (
<Tooltip
title={
this.isSemanticLayer()
? t(
'Custom SQL ad-hoc metrics are not supported for semantic layer datasets',
)
: t(
'Custom SQL ad-hoc metrics are not enabled for this dataset',
)
}
>
{t('Custom SQL')}
</Tooltip>
) : (
t('Custom SQL')
),
disabled: extra.disallow_adhoc_metrics || this.isSemanticLayer(),
children: (
<SQLEditor
data-test="sql-editor"
@@ -536,3 +597,10 @@ export default class AdhocMetricEditPopover extends PureComponent {
}
AdhocMetricEditPopover.propTypes = propTypes;
AdhocMetricEditPopover.defaultProps = defaultProps;
// Map Redux state to props to get access to datasource with disabled states
const mapStateToProps = state => ({
reduxDatasource: state.explore?.datasource,
});
export default connect(mapStateToProps)(AdhocMetricEditPopover);

View File

@@ -0,0 +1,225 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
dndAdhocMetricsControl,
dndAdhocMetricControl,
dndAdhocMetricControl2,
dndGroupByControl,
dndColumnsControl,
Dataset,
} from '@superset-ui/chart-controls';
import withAsyncVerification from './withAsyncVerification';
import {
createMetricsVerification,
createColumnsVerification,
createSemanticLayerOnChange,
SEMANTIC_LAYER_CONTROL_FIELDS,
} from './SemanticLayerVerification';
/**
* Check if a datasource supports semantic layer verification
*/
function needsSemanticLayerVerification(datasource: Dataset): boolean {
if (!datasource || !('database' in datasource) || !datasource.database) {
return false;
}
const database = datasource.database as any;
return Boolean(database.engine_information?.supports_dynamic_columns);
}
/**
* Enhanced metrics control with semantic layer verification
*/
export const semanticLayerDndAdhocMetricsControl = {
...dndAdhocMetricsControl,
type: withAsyncVerification({
baseControl: 'DndMetricSelect',
verify: createMetricsVerification(),
onChange: createSemanticLayerOnChange(
'metrics',
SEMANTIC_LAYER_CONTROL_FIELDS,
),
showLoadingState: true,
}),
mapStateToProps: (state: any, controlState: any) => {
// Call the original mapStateToProps if it exists
const originalProps = dndAdhocMetricsControl.mapStateToProps
? dndAdhocMetricsControl.mapStateToProps(state, controlState)
: {};
return {
...originalProps,
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
form_data: state.form_data,
};
},
};
/**
* Enhanced single metric control with semantic layer verification
*/
export const semanticLayerDndAdhocMetricControl = {
...dndAdhocMetricControl,
type: withAsyncVerification({
baseControl: 'DndMetricSelect',
verify: createMetricsVerification(),
onChange: createSemanticLayerOnChange(
'metric',
SEMANTIC_LAYER_CONTROL_FIELDS,
),
showLoadingState: true,
}),
mapStateToProps: (state: any, controlState: any) => {
// Call the original mapStateToProps if it exists
const originalProps = dndAdhocMetricControl.mapStateToProps
? dndAdhocMetricControl.mapStateToProps(state, controlState)
: {};
return {
...originalProps,
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
form_data: state.form_data,
};
},
};
/**
* Enhanced secondary metric control with semantic layer verification
*/
export const semanticLayerDndAdhocMetricControl2 = {
...dndAdhocMetricControl2,
type: withAsyncVerification({
baseControl: 'DndMetricSelect',
verify: createMetricsVerification(),
onChange: createSemanticLayerOnChange(
'metric_2',
SEMANTIC_LAYER_CONTROL_FIELDS,
),
showLoadingState: true,
}),
mapStateToProps: (state: any, controlState: any) => {
// Call the original mapStateToProps if it exists
const originalProps = dndAdhocMetricControl2.mapStateToProps
? dndAdhocMetricControl2.mapStateToProps(state, controlState)
: {};
return {
...originalProps,
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
form_data: state.form_data,
};
},
};
/**
* Enhanced group by control with semantic layer verification
*/
export const semanticLayerDndGroupByControl = {
...dndGroupByControl,
type: withAsyncVerification({
baseControl: 'DndColumnSelect',
verify: createColumnsVerification(),
onChange: createSemanticLayerOnChange(
'groupby',
SEMANTIC_LAYER_CONTROL_FIELDS,
),
showLoadingState: true,
}),
mapStateToProps: (state: any, controlState: any) => {
// Call the original mapStateToProps if it exists
const originalProps = dndGroupByControl.mapStateToProps
? dndGroupByControl.mapStateToProps(state, controlState)
: {};
return {
...originalProps,
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
form_data: state.form_data,
};
},
};
/**
* Enhanced columns control with semantic layer verification
*/
export const semanticLayerDndColumnsControl = {
...dndColumnsControl,
type: withAsyncVerification({
baseControl: 'DndColumnSelect',
verify: createColumnsVerification(),
onChange: createSemanticLayerOnChange(
'columns',
SEMANTIC_LAYER_CONTROL_FIELDS,
),
showLoadingState: true,
}),
mapStateToProps: (state: any, controlState: any) => {
// Call the original mapStateToProps if it exists
const originalProps = dndColumnsControl.mapStateToProps
? dndColumnsControl.mapStateToProps(state, controlState)
: {};
return {
...originalProps,
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
form_data: state.form_data,
};
},
};
/**
* Create override function for semantic layer controls
*/
function createSemanticLayerControlOverride(enhancedControl: any) {
return (originalConfig: any) =>
// For semantic layer datasources, use the enhanced control
// For regular datasources, use the original control
({
...originalConfig,
...enhancedControl,
});
}
/**
* Control overrides mapping
*/
export const semanticLayerControlOverrides = {
metrics: createSemanticLayerControlOverride(
semanticLayerDndAdhocMetricsControl,
),
metric: createSemanticLayerControlOverride(
semanticLayerDndAdhocMetricControl,
),
metric_2: createSemanticLayerControlOverride(
semanticLayerDndAdhocMetricControl2,
),
percent_metrics: createSemanticLayerControlOverride(
semanticLayerDndAdhocMetricsControl,
),
timeseries_limit_metric: createSemanticLayerControlOverride(
semanticLayerDndAdhocMetricControl,
),
groupby: createSemanticLayerControlOverride(semanticLayerDndGroupByControl),
columns: createSemanticLayerControlOverride(semanticLayerDndColumnsControl),
series_columns: createSemanticLayerControlOverride(
semanticLayerDndColumnsControl,
),
};

View File

@@ -0,0 +1,658 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SupersetClient, JsonValue } from '@superset-ui/core';
import { Dataset } from '@superset-ui/chart-controls';
import { AsyncVerify, ControlPropsWithExtras } from './withAsyncVerification';
/**
* Utility to extract current form fields from form data
*/
export function collectQueryFields(formData: any): {
dimensions: string[];
metrics: string[];
} {
const dimensions: string[] = [];
const metrics: string[] = [];
// Extract dimensions from various field types
if (formData.groupby) {
dimensions.push(
...(Array.isArray(formData.groupby)
? formData.groupby
: [formData.groupby]),
);
}
if (formData.columns) {
dimensions.push(
...(Array.isArray(formData.columns)
? formData.columns
: [formData.columns]),
);
}
if (formData.all_columns) {
dimensions.push(
...(Array.isArray(formData.all_columns)
? formData.all_columns
: [formData.all_columns]),
);
}
if (formData.series_columns) {
dimensions.push(
...(Array.isArray(formData.series_columns)
? formData.series_columns
: [formData.series_columns]),
);
}
if (formData.series) {
dimensions.push(
...(Array.isArray(formData.series) ? formData.series : [formData.series]),
);
}
if (formData.entity) {
dimensions.push(
...(Array.isArray(formData.entity) ? formData.entity : [formData.entity]),
);
}
if (formData.x_axis) {
dimensions.push(
...(Array.isArray(formData.x_axis) ? formData.x_axis : [formData.x_axis]),
);
}
// Extract metrics from various field types
if (formData.metrics) {
metrics.push(
...(Array.isArray(formData.metrics)
? formData.metrics
: [formData.metrics]),
);
}
if (formData.metric) {
metrics.push(formData.metric);
}
if (formData.metric_2) {
metrics.push(formData.metric_2);
}
if (formData.percent_metrics) {
metrics.push(
...(Array.isArray(formData.percent_metrics)
? formData.percent_metrics
: [formData.percent_metrics]),
);
}
if (formData.timeseries_limit_metric) {
metrics.push(formData.timeseries_limit_metric);
}
if (formData.x) {
metrics.push(formData.x);
}
if (formData.y) {
metrics.push(formData.y);
}
if (formData.size) {
metrics.push(formData.size);
}
if (formData.secondary_metric) {
metrics.push(formData.secondary_metric);
}
// Filter out null/undefined values and convert objects to strings if needed
const cleanDimensions = dimensions
.filter(dim => dim != null)
.map(dim =>
typeof dim === 'string' ? dim : (dim as any)?.column_name || String(dim),
);
const cleanMetrics = metrics
.filter(metric => metric != null)
.map(metric =>
typeof metric === 'string'
? metric
: (metric as any)?.metric_name || String(metric),
);
const result = {
dimensions: [...new Set(cleanDimensions)], // Remove duplicates
metrics: [...new Set(cleanMetrics)], // Remove duplicates
};
return result;
}
/**
* Check if a datasource supports semantic layer verification
*/
function supportsSemanticLayerVerification(datasource: Dataset): boolean {
if (!datasource || !('database' in datasource) || !datasource.database) {
return false;
}
const database = datasource.database as any;
return Boolean(database.engine_information?.supports_dynamic_columns);
}
// Cache for API calls to prevent duplicates
const apiCallCache = new Map<
string,
Promise<{ dimensions: string[]; metrics: string[] } | null>
>();
// Request debouncing - keyed by datasource + control combination
const pendingRequests = new Map<string, Promise<any>>();
const lastRequestTime = new Map<string, number>();
// Track which controls have had their initial verification
const initialVerificationDone = new Set<string>();
/**
* Create verification result from API response
*/
function createVerificationResult(
validationResult: { dimensions: string[]; metrics: string[] },
savedMetrics: any[],
props: ControlPropsWithExtras,
controlName?: string,
) {
const { datasource, actions } = props;
// Filter saved metrics to only include valid ones
const validMetricNames = new Set(validationResult.metrics);
const filteredSavedMetrics = savedMetrics.filter((metric: any) =>
validMetricNames.has(metric.metric_name || metric),
);
// Mark datasource metrics and columns as disabled if invalid (for left panel)
const dataset = datasource as Dataset;
let updatedDatasourceMetrics = dataset.metrics;
let updatedDatasourceColumns = dataset.columns;
// Filter valid names to only include those that exist in the original datasource
const originalDimensionNames = new Set(
dataset.columns?.map((col: any) => col.column_name) || [],
);
const originalMetricNames = new Set(
dataset.metrics?.map((metric: any) => metric.metric_name) || [],
);
const filteredValidMetricNames = new Set(
validationResult.metrics.filter(metric => originalMetricNames.has(metric)),
);
const filteredValidDimensionNames = new Set(
validationResult.dimensions.filter(dim => originalDimensionNames.has(dim)),
);
if (dataset.metrics) {
updatedDatasourceMetrics = dataset.metrics.map((metric: any) => ({
...metric,
isDisabled: !filteredValidMetricNames.has(metric.metric_name || metric),
}));
}
// Also update columns using the same validation result
if (dataset.columns) {
updatedDatasourceColumns = dataset.columns.map((column: any) => ({
...column,
isDisabled: !filteredValidDimensionNames.has(
column.column_name || column,
),
}));
}
// Create updated datasource for left panel
const updatedDatasource = {
...dataset,
metrics: updatedDatasourceMetrics,
columns: updatedDatasourceColumns,
};
// Update the Redux store's datasource to affect the left panel
if (actions && typeof actions.syncDatasourceMetadata === 'function') {
actions.syncDatasourceMetadata(updatedDatasource);
}
return {
savedMetrics: filteredSavedMetrics,
datasource: updatedDatasource,
};
}
/**
* Call the validation API
*/
export async function callValidationAPI(
datasource: Dataset,
selectedDimensions: string[],
selectedMetrics: string[],
controlName?: string,
): Promise<{ dimensions: string[]; metrics: string[] } | null> {
const databaseId = (datasource.database as any)?.id;
if (!datasource?.id || !databaseId) {
return null;
}
// Create cache key based on the request parameters
const cacheKey = JSON.stringify({
datasource_id: datasource.id,
dimensions: selectedDimensions.sort(),
metrics: selectedMetrics.sort(),
});
// Create a key for this specific control to prevent duplicate requests
const controlKey = `${datasource.id}_${controlName || 'unknown'}`;
const now = Date.now();
// Check if we already have a pending request for the same parameters
if (apiCallCache.has(cacheKey)) {
console.log(`[API] Reusing cached request for control: ${controlName}`);
return apiCallCache.get(cacheKey)!;
}
// Check if we have a pending request for this specific control
if (pendingRequests.has(controlKey)) {
console.log(
`[API] Request already pending for control: ${controlName}, waiting...`,
);
return pendingRequests.get(controlKey)!;
}
// Enhanced deduplication: check if we have an identical request in flight
const requestSignature = `${datasource.id}_${selectedDimensions.join(',')}_${selectedMetrics.join(',')}`;
// If we have an identical request already cached, return it
if (apiCallCache.has(requestSignature)) {
console.log(
`[API] Identical request found for control: ${controlName}, reusing...`,
);
return apiCallCache.get(requestSignature)!;
}
// Time-based deduplication: if we just made a request for this control, wait a bit
const lastTime = lastRequestTime.get(controlKey) || 0;
if (now - lastTime < 50) {
// 50ms debounce
console.log(
`[API] Request too soon for control: ${controlName}, debouncing...`,
);
return new Promise(resolve => {
setTimeout(async () => {
// Try again after debounce
const result = await callValidationAPI(
datasource,
selectedDimensions,
selectedMetrics,
controlName,
);
resolve(result);
}, 50);
});
}
lastRequestTime.set(controlKey, now);
try {
console.log(`[API] Making request for control: ${controlName}`, {
datasource_id: datasource.id,
dimensions: selectedDimensions,
metrics: selectedMetrics,
});
const apiPromise = SupersetClient.post({
endpoint: `/api/v1/database/${databaseId}/valid_metrics_and_dimensions/`,
jsonPayload: {
datasource_id: datasource.id,
dimensions: selectedDimensions,
metrics: selectedMetrics,
},
}).then(
response => response.json as { dimensions: string[]; metrics: string[] },
);
// Cache the promise for the exact same parameters
apiCallCache.set(cacheKey, apiPromise);
// Cache by request signature for identical requests
apiCallCache.set(requestSignature, apiPromise);
// Also track this request for this specific control
pendingRequests.set(controlKey, apiPromise);
// Clean up on completion
const result = await apiPromise;
apiCallCache.delete(cacheKey);
apiCallCache.delete(requestSignature);
pendingRequests.delete(controlKey);
console.log(`[API] Request completed for control: ${controlName}`, result);
return result;
} catch (error) {
// Clean up on error
apiCallCache.delete(cacheKey);
apiCallCache.delete(requestSignature);
pendingRequests.delete(controlKey);
console.warn('Failed to fetch valid metrics and dimensions:', error);
return null;
}
}
/**
* Create verification function for metrics controls
*/
export function createMetricsVerification(controlName?: string): AsyncVerify {
return async (props: ControlPropsWithExtras) => {
const { datasource, form_data, savedMetrics = [], value } = props;
// Only verify for semantic layer datasources
if (!supportsSemanticLayerVerification(datasource as Dataset)) {
return null;
}
console.log(`[MetricsVerification] Triggered for control: ${controlName}`, {
datasource: datasource?.id,
form_data,
value,
savedMetrics: savedMetrics.length,
stackTrace: new Error().stack?.split('\n').slice(1, 4).join('\n'),
});
// Create form data with the current value for this control
const syntheticFormData = { ...form_data };
if (controlName) {
syntheticFormData[controlName] = value;
}
// Extract query fields using the complete form data approach
const queryFields = collectQueryFields(syntheticFormData);
console.log(`[MetricsVerification] Query fields:`, queryFields);
console.log(`[MetricsVerification] Form data:`, form_data);
console.log(
`[MetricsVerification] Synthetic form data:`,
syntheticFormData,
);
// If no metrics or dimensions are selected, enable all options
if (
queryFields.dimensions.length === 0 &&
queryFields.metrics.length === 0
) {
console.log(`[MetricsVerification] No selections, enabling all options`);
const dataset = datasource as Dataset;
// Enable all metrics
const updatedDatasourceMetrics = dataset.metrics?.map((metric: any) => ({
...metric,
isDisabled: false,
}));
// Enable all columns
const updatedDatasourceColumns = dataset.columns?.map((column: any) => ({
...column,
isDisabled: false,
}));
const updatedDatasource = {
...dataset,
metrics: updatedDatasourceMetrics,
columns: updatedDatasourceColumns,
};
// Update Redux store
if (
props.actions &&
typeof props.actions.syncDatasourceMetadata === 'function'
) {
props.actions.syncDatasourceMetadata(updatedDatasource);
}
return {
savedMetrics,
datasource: updatedDatasource,
};
}
const validationResult = await callValidationAPI(
datasource as Dataset,
queryFields.dimensions,
queryFields.metrics,
controlName,
);
if (!validationResult) {
return null;
}
return createVerificationResult(
validationResult,
savedMetrics,
props,
controlName,
);
};
}
/**
* Create verification function for dimensions controls
*/
export function createColumnsVerification(controlName?: string): AsyncVerify {
return async (props: ControlPropsWithExtras) => {
const { datasource, form_data, options = [], actions, value } = props;
// Only verify for semantic layer datasources
if (!supportsSemanticLayerVerification(datasource as Dataset)) {
return null;
}
// Handle initial verification for fresh charts
const { triggerInitialVerification } = props as any;
const datasourceControlKey = `${datasource?.id}_${controlName}`;
if (
triggerInitialVerification &&
!initialVerificationDone.has(datasourceControlKey)
) {
console.log(
`[ColumnsVerification] Triggering initial verification for control: ${controlName}`,
);
initialVerificationDone.add(datasourceControlKey);
// Trigger initial verification with empty form data
const initialResult = await callValidationAPI(
datasource as Dataset,
[],
[],
controlName,
);
if (initialResult) {
// Mark all options as enabled/disabled based on initial result
const validDimensionNames = new Set(initialResult.dimensions);
const updatedOptions = options.map((option: any) => ({
...option,
isDisabled: !validDimensionNames.has(option.column_name || option),
}));
// Update left panel disabled states
const verificationResult = createVerificationResult(
initialResult,
[],
props,
controlName,
);
return {
options: updatedOptions,
datasource: verificationResult.datasource,
};
}
}
console.log(`[ColumnsVerification] Triggered for control: ${controlName}`, {
datasource: datasource?.id,
form_data,
value,
options: options.length,
stackTrace: new Error().stack?.split('\n').slice(1, 6).join('\n'),
});
// Create form data with the current value
const syntheticFormData = { ...form_data };
if (controlName) {
syntheticFormData[controlName] = value;
}
// Extract query fields using the complete form data approach
const queryFields = collectQueryFields(syntheticFormData);
console.log(`[ColumnsVerification] Query fields:`, queryFields);
console.log(`[ColumnsVerification] Form data:`, form_data);
console.log(
`[ColumnsVerification] Synthetic form data:`,
syntheticFormData,
);
// If no metrics or dimensions are selected, enable all options
if (
queryFields.dimensions.length === 0 &&
queryFields.metrics.length === 0
) {
console.log(`[ColumnsVerification] No selections, enabling all options`);
const dataset = datasource as Dataset;
// Enable all options
const updatedOptions = options.map((option: any) => ({
...option,
isDisabled: false,
}));
// Enable all metrics
const updatedDatasourceMetrics = dataset.metrics?.map((metric: any) => ({
...metric,
isDisabled: false,
}));
// Enable all columns
const updatedDatasourceColumns = dataset.columns?.map((column: any) => ({
...column,
isDisabled: false,
}));
const updatedDatasource = {
...dataset,
metrics: updatedDatasourceMetrics,
columns: updatedDatasourceColumns,
};
// Update Redux store
if (actions && typeof actions.syncDatasourceMetadata === 'function') {
actions.syncDatasourceMetadata(updatedDatasource);
}
return {
options: updatedOptions,
datasource: updatedDatasource,
};
}
const validationResult = await callValidationAPI(
datasource as Dataset,
queryFields.dimensions,
queryFields.metrics,
controlName,
);
if (!validationResult) {
return null;
}
// Mark dimension options as disabled if invalid
const validDimensionNames = new Set(validationResult.dimensions);
const updatedOptions = options.map((option: any) => ({
...option,
isDisabled: !validDimensionNames.has(option.column_name || option),
}));
// Use createVerificationResult helper for consistent processing
const verificationResult = createVerificationResult(
validationResult,
[], // savedMetrics not used for columns verification
props,
controlName,
);
return {
options: updatedOptions,
datasource: verificationResult.datasource,
};
};
}
/**
* Create onChange handler that triggers re-rendering of other controls when values change
*/
export function createSemanticLayerOnChange(
controlName: string,
affectedControls: string[],
) {
return (value: JsonValue, props: ControlPropsWithExtras) => {
const { actions, form_data } = props;
// Delay re-verification to allow Redux state to propagate first
// This prevents race conditions where other controls verify with stale form_data
setTimeout(() => {
// Trigger re-rendering of affected controls by updating their values
// This forces the verification to run again
affectedControls.forEach(controlField => {
if (
controlField !== controlName &&
form_data &&
form_data[controlField]
) {
actions.setControlValue(controlField, form_data[controlField], []);
}
});
}, 0);
};
}
/**
* Get list of control fields that should trigger re-rendering
*/
export const SEMANTIC_LAYER_CONTROL_FIELDS = [
// Metric controls
'metrics',
'metric',
'metric_2',
'percent_metrics',
'timeseries_limit_metric',
'x',
'y',
'size',
'secondary_metric',
// Dimension controls
'groupby',
'columns',
'all_columns',
'series_columns',
'series',
'entity',
'x_axis',
];

View File

@@ -54,6 +54,10 @@ export type FullControlProps = ControlPropsWithExtras & {
* An extra flag for triggering async verification. Set it in mapStateToProps.
*/
needAsyncVerification?: boolean;
/**
* Whether to skip useEffect verification and only use onChange verification.
*/
skipEffectVerification?: boolean;
/**
* Whether to show loading state when verification is still loading.
*/
@@ -136,6 +140,7 @@ export default function withAsyncVerification({
hovered,
onChange: basicOnChange,
needAsyncVerification = false,
skipEffectVerification = false,
isLoading: initialIsLoading = false,
showLoadingState = defaultShowLoadingState,
verify = defaultVerify,
@@ -145,6 +150,7 @@ export default function withAsyncVerification({
const [verifiedProps, setVerifiedProps] = useState({});
const [isLoading, setIsLoading] = useState<boolean>(initialIsLoading);
const { addWarningToast } = restProps.actions;
const verificationTriggeredByChange = useRef(false);
// memoize `restProps`, so that verification only triggers when material
// props are actually updated.
@@ -153,19 +159,6 @@ export default function withAsyncVerification({
otherProps = otherPropsRef.current = restProps;
}
const handleChange = useCallback(
(value: JsonValue) => {
// the default onChange handler, triggers the `setControlValue` action
if (basicOnChange) {
basicOnChange(value);
}
if (onChange) {
onChange(value, { ...otherProps, ...verifiedProps });
}
},
[basicOnChange, otherProps, verifiedProps],
);
const verifyProps = useEffectEvent(
(verifyFunc: AsyncVerify, props: typeof otherProps) => {
if (showLoadingState) {
@@ -202,11 +195,49 @@ export default function withAsyncVerification({
},
);
const handleChange = useCallback(
(value: JsonValue) => {
// the default onChange handler, triggers the `setControlValue` action
if (basicOnChange) {
basicOnChange(value);
}
if (onChange) {
onChange(value, { ...otherProps, ...verifiedProps });
}
// Trigger verification with the new value if verification is enabled
if (needAsyncVerification && verify) {
verificationTriggeredByChange.current = true;
const propsWithNewValue = { ...otherProps, ...verifiedProps, value };
verifyProps(verify, propsWithNewValue);
}
},
[
basicOnChange,
otherProps,
verifiedProps,
needAsyncVerification,
verify,
verifyProps,
],
);
useEffect(() => {
if (needAsyncVerification && verify) {
if (needAsyncVerification && verify && !skipEffectVerification) {
// Skip verification if it was just triggered by onChange
if (verificationTriggeredByChange.current) {
verificationTriggeredByChange.current = false;
return;
}
verifyProps(verify, otherProps);
}
}, [needAsyncVerification, verify, otherProps, verifyProps]);
}, [
needAsyncVerification,
verify,
otherProps,
verifyProps,
skipEffectVerification,
]);
return (
<ControlComponent

View File

@@ -24,6 +24,7 @@ import {
ClientErrorObject,
} from '@superset-ui/core';
import setupErrorMessages from 'src/setup/setupErrorMessages';
import setupSemanticLayer from 'src/setup/setupSemanticLayer';
// eslint-disable-next-line @typescript-eslint/no-unused-vars
declare global {
@@ -96,4 +97,7 @@ export default function setupApp() {
// set up app wide custom error messages
setupErrorMessages();
// set up semantic layer controls
setupSemanticLayer();
}

View File

@@ -0,0 +1,41 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { setSemanticLayerUtilities } from '@superset-ui/chart-controls';
import withAsyncVerification from 'src/explore/components/controls/withAsyncVerification';
import {
createMetricsVerification,
createColumnsVerification,
createSemanticLayerOnChange,
SEMANTIC_LAYER_CONTROL_FIELDS,
} from 'src/explore/components/controls/SemanticLayerVerification';
/**
* Initialize semantic layer controls by setting up the utilities
* in the chart controls package.
*/
export default function setupSemanticLayer() {
setSemanticLayerUtilities({
withAsyncVerification,
createMetricsVerification,
createColumnsVerification,
createSemanticLayerOnChange,
SEMANTIC_LAYER_CONTROL_FIELDS,
});
}

View File

@@ -174,6 +174,8 @@ MODEL_API_RW_METHOD_PERMISSION_MAP = {
"put_filters": "write",
"put_colors": "write",
"sync_permissions": "write",
"valid_metrics_and_dimensions": "read",
"sync_metrics": "write",
}
EXTRA_FORM_DATA_APPEND_KEYS = {

View File

@@ -23,11 +23,13 @@ from superset.connectors.sqla.models import SqlaTable
from superset.daos.base import BaseDAO
from superset.databases.filters import DatabaseFilter
from superset.databases.ssh_tunnel.models import SSHTunnel
from superset.extensions import db
from superset.db_engine_specs.base import ValidColumnsType
from superset.extensions import cache_manager, db
from superset.models.core import Database, DatabaseUserOAuth2Tokens
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import TabState
from superset.sql.parse import Table
from superset.utils.core import DatasourceType
from superset.utils.ssh_tunnel import unmask_password_info
@@ -166,6 +168,65 @@ class DatabaseDAO(BaseDAO[Database]):
return ssh_tunnel
@classmethod
@cache_manager.data_cache.memoize(timeout=86400) # 1 day cache
def get_valid_metrics_and_dimensions(
cls,
database_id: int,
datasource_id: int,
dimensions: set[str],
metrics: set[str],
) -> ValidColumnsType:
"""
Get valid metrics and dimensions for a datasource using the database engine spec.
Results are cached for 1 day to improve performance.
:param database_id: The database ID
:param datasource_id: The datasource ID
:param dimensions: Set of selected column names
:param metrics: Set of selected metric names
:return: Dictionary with 'dimensions' and 'metrics' keys containing valid sets
:raises ValueError: If database or datasource not found, or invalid type
"""
database = cls.find_by_id(database_id)
if not database:
raise ValueError(f"Database with id {database_id} not found")
datasource = db.session.query(SqlaTable).get(datasource_id)
if not datasource:
raise ValueError(f"Table with id {datasource_id} not found")
return database.db_engine_spec.get_valid_metrics_and_dimensions(
database,
datasource,
dimensions,
metrics,
)
@classmethod
def get_metrics(
cls,
database_id: int,
table: Table,
) -> list[dict[str, Any]]:
"""
Get table metrics from the source system for semantic layer datasets.
:param database_id: The database ID
:param table: The table object with name, schema, and catalog
:return: List of metrics from the source system
:raises ValueError: If database not found or doesn't support dynamic metrics
"""
database = cls.find_by_id(database_id)
if not database:
raise ValueError(f"Database with id {database_id} not found")
# Check if database supports dynamic metrics (semantic layer)
if not database.db_engine_spec.supports_dynamic_columns:
raise ValueError("Database does not support dynamic metrics")
return database.get_metrics(table)
class SSHTunnelDAO(BaseDAO[SSHTunnel]):
@classmethod

View File

@@ -103,6 +103,7 @@ from superset.databases.schemas import (
UploadPostSchema,
ValidateSQLRequest,
ValidateSQLResponse,
ValidMetricsAndDimensionsRequestSchema,
)
from superset.databases.utils import get_table_metadata
from superset.db_engine_specs import get_available_engine_specs
@@ -164,6 +165,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"available",
"validate_parameters",
"validate_sql",
"valid_metrics_and_dimensions",
"delete_ssh_tunnel",
"schemas_access_for_file_upload",
"get_connection",
@@ -2098,3 +2100,70 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
database, database.get_default_catalog(), schemas_allowed, True
)
return self.response(200, schemas=schemas_allowed_processed)
@expose("/<int:pk>/valid_metrics_and_dimensions/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
f".valid_metrics_and_dimensions",
log_to_statsd=False,
)
@requires_json
def valid_metrics_and_dimensions(self, pk: int) -> FlaskResponse:
"""Get valid metrics and dimensions for a datasource.
---
post:
summary: Get valid metrics and dimensions for a datasource
parameters:
- in: path
schema:
type: integer
name: pk
description: The database ID
requestBody:
description: Valid metrics and dimensions request
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ValidMetricsAndDimensionsRequestSchema"
responses:
200:
description: Valid metrics and dimensions
content:
application/json:
schema:
$ref: "#/components/schemas/ValidMetricsAndDimensionsResponseSchema"
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
request_schema = ValidMetricsAndDimensionsRequestSchema()
try:
item = request_schema.load(request.json)
except ValidationError as error:
return self.response_400(message=error.messages)
datasource_id = item["datasource_id"]
dimensions = set(item["dimensions"])
metrics = set(item["metrics"])
result = DatabaseDAO.get_valid_metrics_and_dimensions(
pk,
datasource_id,
dimensions,
metrics,
)
response_data = {
"dimensions": list(result["dimensions"]),
"metrics": list(result["metrics"]),
}
return self.response(200, **response_data)

View File

@@ -707,7 +707,9 @@ class TableMetadataResponseSchema(Schema):
TableMetadataPrimaryKeyResponseSchema,
metadata={"description": "Primary keys metadata"},
)
selectStar = fields.String(metadata={"description": "SQL select star"}) # noqa: N815
selectStar = fields.String(
metadata={"description": "SQL select star"}
) # noqa: N815
class TableExtraMetadataResponseSchema(Schema):
@@ -1341,3 +1343,31 @@ class QualifiedTableSchema(Schema):
load_default=None,
metadata={"description": "The table catalog"},
)
class ValidMetricsAndDimensionsRequestSchema(Schema):
datasource_id = fields.Integer(
required=True,
metadata={"description": "The datasource ID"},
)
dimensions = fields.List(
fields.String(),
required=True,
metadata={"description": "List of selected dimension names"},
)
metrics = fields.List(
fields.String(),
required=True,
metadata={"description": "List of selected metric names"},
)
class ValidMetricsAndDimensionsResponseSchema(Schema):
dimensions = fields.List(
fields.String(),
metadata={"description": "List of valid dimension names"},
)
metrics = fields.List(
fields.String(),
metadata={"description": "List of valid metric names"},
)

View File

@@ -34,6 +34,7 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
from jinja2.exceptions import TemplateSyntaxError
from marshmallow import ValidationError
from sqlalchemy.exc import NoResultFound, NoSuchTableError
from superset import event_logger, is_feature_enabled
from superset.commands.dataset.create import CreateDatasetCommand
@@ -58,6 +59,7 @@ from superset.commands.importers.exceptions import NoValidFilesFoundError
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.connectors.sqla.models import SqlaTable
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.daos.database import DatabaseDAO
from superset.daos.dataset import DatasetDAO
from superset.databases.filters import DatabaseFilter
from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter
@@ -75,6 +77,7 @@ from superset.datasets.schemas import (
)
from superset.exceptions import SupersetTemplateException
from superset.jinja_context import BaseTemplateProcessor, get_template_processor
from superset.sql.parse import Table
from superset.utils import json
from superset.utils.core import parse_boolean_string
from superset.views.base import DatasourceFilter
@@ -110,6 +113,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"duplicate",
"get_or_create_dataset",
"warm_up_cache",
"sync_metrics",
}
list_columns = [
"id",
@@ -215,6 +219,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"columns.type_generic",
"database.backend",
"database.allow_multi_catalog",
"database.engine_information",
"columns.advanced_data_type",
"is_managed_externally",
"uid",
@@ -1174,6 +1179,71 @@ class DatasetRestApi(BaseSupersetModelRestApi):
return self.response_400(message=str(ex))
return self.response(200, **response)
@expose("/<pk>/sync_metrics", methods=("PUT",))
@protect()
@safe
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.sync_metrics",
log_to_statsd=False,
)
def sync_metrics(self, pk: int) -> Response:
"""Sync table metrics from the source system for semantic layer datasets.
---
post:
summary: Sync table metrics from the source system for semantic layer datasets
parameters:
- in: path
name: pk
schema:
type: integer
description: The dataset ID
responses:
200:
description: Metrics from the source system
content:
application/json:
schema:
type: array
items:
type: object
properties:
metric_name:
type: string
expression:
type: string
description:
type: string
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
# Get the dataset
try:
dataset = DatasetDAO.find_by_id(pk)
if not dataset:
return self.response_404()
except DatasetNotFoundError:
return self.response_404()
try:
# Create table object from dataset
table = Table(dataset.table_name, dataset.schema, catalog=dataset.catalog)
# Get metrics from the source system using DatabaseDAO
metrics = DatabaseDAO.get_metrics(dataset.database_id, table)
return self.response(200, result=metrics)
except (NoResultFound, NoSuchTableError):
return self.response_404()
except ValueError as ex:
return self.response_400(message=str(ex))
@staticmethod
def render_dataset_fields(
data: dict[str, Any], processor: BaseTemplateProcessor

View File

@@ -85,7 +85,7 @@ from superset.utils.network import is_hostname_valid, is_port_open
from superset.utils.oauth2 import encode_oauth2_state
if TYPE_CHECKING:
from superset.connectors.sqla.models import TableColumn
from superset.connectors.sqla.models import SqlaTable, TableColumn
from superset.databases.schemas import TableMetadataResponse
from superset.models.core import Database
from superset.models.sql_lab import Query
@@ -143,7 +143,9 @@ builtin_time_grains: dict[str | None, str] = {
}
class TimestampExpression(ColumnClause): # pylint: disable=abstract-method, too-many-ancestors
class TimestampExpression(
ColumnClause
): # pylint: disable=abstract-method, too-many-ancestors
def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
"""Sqlalchemy class that can be used to render native column elements respecting
engine-specific quoting rules as part of a string-based expression.
@@ -186,6 +188,15 @@ class MetricType(TypedDict, total=False):
extra: str | None
class ValidColumnsType(TypedDict):
"""
Type for valid columns returned by `get_valid_metrics_and_dimensions`.
"""
dimensions: set[str]
metrics: set[str]
class BaseEngineSpec: # pylint: disable=too-many-public-methods
"""Abstract class for database engine specific configurations
@@ -384,9 +395,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
max_column_name_length: int | None = None
try_remove_schema_from_table_name = True # pylint: disable=invalid-name
run_multiple_statements_as_one = False
custom_errors: dict[
Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]
] = {}
custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = (
{}
)
# List of JSON path to fields in `encrypted_extra` that should be masked when the
# database is edited. By default everything is masked.
@@ -436,6 +447,11 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
# the `cancel_query` value in the `extra` field of the `query` object
has_query_id_before_execute = True
# This attribute is used for semantic layers, where only certain combinations of
# metrics and dimensions are valid for given datasource. For traditional databases
# this should be set to false.
supports_dynamic_columns = False
@classmethod
def get_rls_method(cls) -> RLSMethod:
"""
@@ -1501,6 +1517,31 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
}
]
@classmethod
def get_valid_metrics_and_dimensions(
cls,
database: Database,
table: SqlaTable,
dimensions: set[str],
metrics: set[str],
) -> ValidColumnsType:
"""
Given a selection of columns/metrics from a datasource, return related columns.
This is a method used for semantic layers, where tables can have columns and
metrics that cannot be computed together. When the user selects a given metric
it allows the UI to filter the remaining metrics and dimensions so that only
valid combinations are possible.
The method should only be called when ``supports_dynamic_columns`` is set to
true. The default method in the base class ignores the selected columns and
metrics, and simply returns everything, for reference.
"""
return {
"dimensions": {column.column_name for column in table.columns},
"metrics": {metric.metric_name for metric in table.metrics},
}
@classmethod
def where_latest_partition( # pylint: disable=unused-argument
cls,
@@ -2148,6 +2189,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
"supports_file_upload": cls.supports_file_upload,
"disable_ssh_tunneling": cls.disable_ssh_tunneling,
"supports_dynamic_catalog": cls.supports_dynamic_catalog,
"supports_dynamic_columns": cls.supports_dynamic_columns,
"supports_oauth2": cls.supports_oauth2,
}

View File

@@ -0,0 +1,194 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
An interface to dbt's semantic layer, Metric Flow.
"""
from __future__ import annotations
from typing import Any, TYPE_CHECKING, TypedDict
from shillelagh.backends.apsw.dialects.base import get_adapter_for_table_name
from shillelagh.backends.apsw.dialects.metricflow import TABLE_NAME
from sqlalchemy import event
from sqlalchemy.engine import Connection, Engine
from superset.connectors.sqla.models import SqlaTable
from superset.constants import TimeGrain
from superset.db_engine_specs.base import ValidColumnsType
from superset.db_engine_specs.shillelagh import ShillelaghEngineSpec
from superset.extensions import cache_manager
from superset.utils.cache import memoized_func
if TYPE_CHECKING:
from shillelagh.fields import Field
from sqlalchemy.engine.reflection import Inspector
from superset.models.core import Database
from superset.sql.parse import Table
from superset.superset_typing import ResultSetColumnType
@event.listens_for(Engine, "engine_connect")
def receive_engine_connect(conn: Connection, branch: bool) -> None:
"""
Called when a new DB connection is created.
This hook adds a cache to the `_build_column_from_dimension` method of the Metric
Flow adapter, since it's called frequently and can be expensive.
"""
engine = conn.engine
if not engine or not engine.name == "metricflow":
return
from shillelagh.adapters.api.dbt_metricflow import DbtMetricFlowAPI
if getattr(DbtMetricFlowAPI, "_patched", False):
return
original_method = DbtMetricFlowAPI._build_column_from_dimension
@memoized_func(
key="metricflow:dimension:{name}",
cache=cache_manager.data_cache,
)
def cached_build_column_from_dimension(
self: DbtMetricFlowAPI,
name: str,
*args: Any,
**kwargs: Any,
) -> Field:
return original_method(self, name)
DbtMetricFlowAPI._build_column_from_dimension = cached_build_column_from_dimension
DbtMetricFlowAPI._patched = True
SELECT_STAR_MESSAGE = (
'The dbt semantic layer does not support data preview, since the "metrics" table '
"is a virtual table that is not materialized. An administrator should configure "
'the database in Apache Superset so that the "Disable SQL Lab data preview '
'queries" option under "Advanced""SQL Lab" is enabled.'
)
class MetricType(TypedDict, total=False):
"""
Type for metrics returned by `get_metrics`.
"""
metric_name: str
expression: str
verbose_name: str | None
metric_type: str | None
description: str | None
d3format: str | None
warning_text: str | None
extra: str | None
class DbtMetricFlowEngineSpec(ShillelaghEngineSpec):
"""
Engine for the the dbt semantic layer.
"""
engine = "metricflow"
engine_name = "dbt Metric Flow"
sqlalchemy_uri_placeholder = (
"metricflow://[ab123.us1.dbt.com]/<environment_id>"
"?service_token=<service_token>"
)
supports_dynamic_columns = True
_time_grain_expressions = {
TimeGrain.DAY: "{col}__day",
TimeGrain.WEEK: "{col}__week",
TimeGrain.MONTH: "{col}__month",
TimeGrain.QUARTER: "{col}__quarter",
TimeGrain.YEAR: "{col}__year",
}
@classmethod
def select_star(cls, *args: Any, **kwargs: Any) -> str:
"""
Return a ``SELECT *`` query.
"""
message = SELECT_STAR_MESSAGE.replace("'", "''")
return f"SELECT '{message}' AS warning"
@classmethod
def get_columns(
cls,
inspector: Inspector,
table: Table,
options: dict[str, Any] | None = None,
) -> list[ResultSetColumnType]:
columns: list[ResultSetColumnType] = []
for column in inspector.get_columns(table.table, table.schema):
# ignore metrics
if "computed" in column:
continue
column["column_name"] = column["name"]
columns.append(column)
return columns
@classmethod
def get_metrics(
cls,
database: Database,
inspector: Inspector,
table: Table,
) -> list[MetricType]:
"""
Get all metrics.
"""
return [
{
"metric_name": column["name"],
"expression": column["computed"]["sqltext"],
"description": column["comment"],
}
for column in inspector.get_columns(table.table, table.schema)
if "computed" in column
]
@classmethod
def get_valid_metrics_and_dimensions(
cls,
database: Database,
table: SqlaTable,
dimensions: set[str],
metrics: set[str],
) -> ValidColumnsType:
"""
Get valid metrics and dimensions.
Given a datasource, and sets of selected metrics and dimensions, return the
sets of valid metrics and dimensions that can further be selected.
"""
with database.get_sqla_engine() as engine:
connection = engine.connect()
adapter = get_adapter_for_table_name(connection, TABLE_NAME)
return {
"metrics": adapter._get_metrics_for_dimensions(dimensions),
"dimensions": adapter._get_dimensions_for_metrics(metrics),
}

View File

@@ -112,7 +112,7 @@ class SupersetAPSWDialect(APSWDialect):
"superset": {
"prefix": None,
"allowed_dbs": self.allowed_dbs,
}
},
},
"safe": True,
"isolation_level": self.isolation_level,

View File

@@ -85,6 +85,7 @@ SQLGLOT_DIALECTS = {
# "kustosql": ???
# "kylin": ???
"mariadb": Dialects.MYSQL,
"metricflow": Dialects.SQLITE,
"motherduck": Dialects.DUCKDB,
"mssql": Dialects.TSQL,
"mysql": Dialects.MYSQL,

View File

@@ -189,6 +189,7 @@ class Datasource(BaseSupersetView):
raise DatasetNotFoundError() from ex
return self.json_response(external_metadata)
@expose("/samples", methods=("POST",))
@has_access_api
@api