mirror of
https://github.com/apache/superset.git
synced 2026-05-03 06:54:19 +00:00
Compare commits
32 Commits
docs/testi
...
canva-demo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66cb9cb40c | ||
|
|
4e9ea4b17a | ||
|
|
30deae5a86 | ||
|
|
cbda9b7e2f | ||
|
|
92aa057787 | ||
|
|
983227da39 | ||
|
|
e3f1ff71af | ||
|
|
6a3ee1f44b | ||
|
|
37d2ab7afc | ||
|
|
428ac7a370 | ||
|
|
daee150811 | ||
|
|
1166193af7 | ||
|
|
a2c8856592 | ||
|
|
17c1fff32e | ||
|
|
8432ee47d6 | ||
|
|
a8ccce5762 | ||
|
|
78ce22d751 | ||
|
|
e51b352cff | ||
|
|
ff87aa155a | ||
|
|
1bdfb7db5f | ||
|
|
37b3b7e03a | ||
|
|
1d82e85a55 | ||
|
|
3430d69972 | ||
|
|
81cf3ca024 | ||
|
|
b17ea1c875 | ||
|
|
25b06dbedb | ||
|
|
b27d6dc9b6 | ||
|
|
de13b1cf44 | ||
|
|
83c8c4d7e5 | ||
|
|
7dea14a0c6 | ||
|
|
8b5bd0f58f | ||
|
|
214f0fa5a5 |
6
setup.py
6
setup.py
@@ -30,7 +30,9 @@ with open(PACKAGE_JSON) as package_file:
|
|||||||
|
|
||||||
def get_git_sha() -> str:
|
def get_git_sha() -> str:
|
||||||
try:
|
try:
|
||||||
output = subprocess.check_output(["git", "rev-parse", "HEAD"]) # noqa: S603, S607
|
output = subprocess.check_output(
|
||||||
|
["git", "rev-parse", "HEAD"]
|
||||||
|
) # noqa: S603, S607
|
||||||
return output.decode().strip()
|
return output.decode().strip()
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
return ""
|
return ""
|
||||||
@@ -67,7 +69,7 @@ setup(
|
|||||||
"superset = superset.extensions.metadb:SupersetAPSWDialect",
|
"superset = superset.extensions.metadb:SupersetAPSWDialect",
|
||||||
],
|
],
|
||||||
"shillelagh.adapter": [
|
"shillelagh.adapter": [
|
||||||
"superset=superset.extensions.metadb:SupersetShillelaghAdapter"
|
"superset = superset.extensions.metadb:SupersetShillelaghAdapter",
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
download_url="https://www.apache.org/dist/superset/" + version_string,
|
download_url="https://www.apache.org/dist/superset/" + version_string,
|
||||||
|
|||||||
@@ -47,6 +47,149 @@ type Control = {
|
|||||||
default?: unknown;
|
default?: unknown;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Semantic layer verification functions - will be set from main app
|
||||||
|
let withAsyncVerification: any = null;
|
||||||
|
let createMetricsVerification: any = null;
|
||||||
|
let createColumnsVerification: any = null;
|
||||||
|
let createSemanticLayerOnChange: any = null;
|
||||||
|
let SEMANTIC_LAYER_CONTROL_FIELDS: any = null;
|
||||||
|
|
||||||
|
// Notification system for when utilities are set
|
||||||
|
const enhancedControls: Array<{
|
||||||
|
controlName: string;
|
||||||
|
invalidateCache: () => void;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
// Export function to set semantic layer utilities from main app
|
||||||
|
export function setSemanticLayerUtilities(utilities: {
|
||||||
|
withAsyncVerification: any;
|
||||||
|
createMetricsVerification: any;
|
||||||
|
createColumnsVerification: any;
|
||||||
|
createSemanticLayerOnChange: any;
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS: any;
|
||||||
|
}) {
|
||||||
|
({
|
||||||
|
withAsyncVerification,
|
||||||
|
createMetricsVerification,
|
||||||
|
createColumnsVerification,
|
||||||
|
createSemanticLayerOnChange,
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
} = utilities);
|
||||||
|
|
||||||
|
// Notify all enhanced controls that utilities are now available
|
||||||
|
enhancedControls.forEach(control => {
|
||||||
|
control.invalidateCache();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a datasource supports semantic layer verification
|
||||||
|
*/
|
||||||
|
function needsSemanticLayerVerification(datasource: Dataset): boolean {
|
||||||
|
if (!datasource || !('database' in datasource) || !datasource.database) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const database = datasource.database as any;
|
||||||
|
return Boolean(database.engine_information?.supports_dynamic_columns);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhance a control with semantic layer verification if available
|
||||||
|
* This creates a lazy-enhanced control that checks for utilities at runtime
|
||||||
|
*/
|
||||||
|
function enhanceControlWithSemanticLayer(
|
||||||
|
baseControl: any,
|
||||||
|
controlName: string,
|
||||||
|
verificationType: 'metrics' | 'columns',
|
||||||
|
) {
|
||||||
|
// Cache for the enhanced control type
|
||||||
|
let cachedEnhancedType: any = null;
|
||||||
|
let utilitiesWereAvailable = false;
|
||||||
|
|
||||||
|
// Register with notification system
|
||||||
|
enhancedControls.push({
|
||||||
|
controlName,
|
||||||
|
invalidateCache: () => {
|
||||||
|
cachedEnhancedType = null;
|
||||||
|
utilitiesWereAvailable = false;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Return a control that will be enhanced at runtime if utilities are available
|
||||||
|
return {
|
||||||
|
...baseControl,
|
||||||
|
// Override the type to use a function that checks for enhancement at runtime
|
||||||
|
get type() {
|
||||||
|
// Check if utilities became available since last call
|
||||||
|
const utilitiesAvailableNow = !!withAsyncVerification;
|
||||||
|
|
||||||
|
if (utilitiesAvailableNow) {
|
||||||
|
// If utilities just became available or we haven't cached yet, create enhanced control
|
||||||
|
if (!utilitiesWereAvailable || !cachedEnhancedType) {
|
||||||
|
const verificationFn =
|
||||||
|
verificationType === 'metrics'
|
||||||
|
? createMetricsVerification(controlName)
|
||||||
|
: createColumnsVerification(controlName);
|
||||||
|
|
||||||
|
cachedEnhancedType = withAsyncVerification({
|
||||||
|
baseControl: baseControl.type,
|
||||||
|
verify: verificationFn,
|
||||||
|
onChange: createSemanticLayerOnChange(
|
||||||
|
controlName,
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
),
|
||||||
|
showLoadingState: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
utilitiesWereAvailable = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return cachedEnhancedType;
|
||||||
|
}
|
||||||
|
|
||||||
|
utilitiesWereAvailable = false;
|
||||||
|
return baseControl.type;
|
||||||
|
},
|
||||||
|
mapStateToProps: (state: any, controlState: any) => {
|
||||||
|
// Call the original mapStateToProps if it exists
|
||||||
|
const originalProps = baseControl.mapStateToProps
|
||||||
|
? baseControl.mapStateToProps(state, controlState)
|
||||||
|
: {};
|
||||||
|
|
||||||
|
// Only add semantic layer props if utilities are available
|
||||||
|
if (withAsyncVerification) {
|
||||||
|
const needsVerification = needsSemanticLayerVerification(
|
||||||
|
state.datasource,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Check if there's existing data that needs verification
|
||||||
|
const hasExistingData =
|
||||||
|
controlState?.value &&
|
||||||
|
((Array.isArray(controlState.value) &&
|
||||||
|
controlState.value.length > 0) ||
|
||||||
|
(!Array.isArray(controlState.value) &&
|
||||||
|
controlState.value !== null &&
|
||||||
|
controlState.value !== undefined));
|
||||||
|
|
||||||
|
return {
|
||||||
|
...originalProps,
|
||||||
|
needAsyncVerification: needsVerification,
|
||||||
|
// Only enable initial verification if there's existing data (like saved charts)
|
||||||
|
// For new charts, rely only on onChange to prevent duplicate requests
|
||||||
|
skipEffectVerification: !hasExistingData,
|
||||||
|
form_data: state.form_data,
|
||||||
|
datasource: state.datasource, // Pass datasource to verification function
|
||||||
|
// Add a flag to indicate this is a fresh chart that needs initial verification
|
||||||
|
triggerInitialVerification: needsVerification && !hasExistingData,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return originalProps;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Note: Previous to the commit that introduced this comment, the shared controls module
|
* Note: Previous to the commit that introduced this comment, the shared controls module
|
||||||
* would check feature flags at module execution time and expose a different control
|
* would check feature flags at module execution time and expose a different control
|
||||||
@@ -70,7 +213,7 @@ function filterOptions(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export const dndGroupByControl: SharedControlConfig<
|
const baseDndGroupByControl: SharedControlConfig<
|
||||||
'DndColumnSelect' | 'SelectControl',
|
'DndColumnSelect' | 'SelectControl',
|
||||||
ColumnMeta
|
ColumnMeta
|
||||||
> = {
|
> = {
|
||||||
@@ -123,14 +266,26 @@ export const dndGroupByControl: SharedControlConfig<
|
|||||||
commaChoosesOption: false,
|
commaChoosesOption: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndColumnsControl: typeof dndGroupByControl = {
|
export const dndGroupByControl = enhanceControlWithSemanticLayer(
|
||||||
...dndGroupByControl,
|
baseDndGroupByControl,
|
||||||
|
'groupby',
|
||||||
|
'columns',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndColumnsControl: typeof baseDndGroupByControl = {
|
||||||
|
...baseDndGroupByControl,
|
||||||
label: t('Columns'),
|
label: t('Columns'),
|
||||||
description: t('Add dataset columns here to group the pivot table columns.'),
|
description: t('Add dataset columns here to group the pivot table columns.'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndSeriesControl: typeof dndGroupByControl = {
|
export const dndColumnsControl = enhanceControlWithSemanticLayer(
|
||||||
...dndGroupByControl,
|
baseDndColumnsControl,
|
||||||
|
'columns',
|
||||||
|
'columns',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndSeriesControl: typeof baseDndGroupByControl = {
|
||||||
|
...baseDndGroupByControl,
|
||||||
label: t('Dimension'),
|
label: t('Dimension'),
|
||||||
multi: false,
|
multi: false,
|
||||||
default: null,
|
default: null,
|
||||||
@@ -140,8 +295,14 @@ export const dndSeriesControl: typeof dndGroupByControl = {
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndEntityControl: typeof dndGroupByControl = {
|
export const dndSeriesControl = enhanceControlWithSemanticLayer(
|
||||||
...dndGroupByControl,
|
baseDndSeriesControl,
|
||||||
|
'series',
|
||||||
|
'columns',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndEntityControl: typeof baseDndGroupByControl = {
|
||||||
|
...baseDndGroupByControl,
|
||||||
label: t('Entity'),
|
label: t('Entity'),
|
||||||
default: null,
|
default: null,
|
||||||
multi: false,
|
multi: false,
|
||||||
@@ -149,6 +310,12 @@ export const dndEntityControl: typeof dndGroupByControl = {
|
|||||||
description: t('This defines the element to be plotted on the chart'),
|
description: t('This defines the element to be plotted on the chart'),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const dndEntityControl = enhanceControlWithSemanticLayer(
|
||||||
|
baseDndEntityControl,
|
||||||
|
'entity',
|
||||||
|
'columns',
|
||||||
|
);
|
||||||
|
|
||||||
export const dndAdhocFilterControl: SharedControlConfig<
|
export const dndAdhocFilterControl: SharedControlConfig<
|
||||||
'DndFilterSelect' | 'AdhocFilterControl'
|
'DndFilterSelect' | 'AdhocFilterControl'
|
||||||
> = {
|
> = {
|
||||||
@@ -170,7 +337,7 @@ export const dndAdhocFilterControl: SharedControlConfig<
|
|||||||
...datePickerInAdhocFilterMixin,
|
...datePickerInAdhocFilterMixin,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndAdhocMetricsControl: SharedControlConfig<
|
const baseDndAdhocMetricsControl: SharedControlConfig<
|
||||||
'DndMetricSelect' | 'MetricsControl'
|
'DndMetricSelect' | 'MetricsControl'
|
||||||
> = {
|
> = {
|
||||||
type: 'DndMetricSelect',
|
type: 'DndMetricSelect',
|
||||||
@@ -190,8 +357,14 @@ export const dndAdhocMetricsControl: SharedControlConfig<
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndAdhocMetricControl: typeof dndAdhocMetricsControl = {
|
export const dndAdhocMetricsControl = enhanceControlWithSemanticLayer(
|
||||||
...dndAdhocMetricsControl,
|
baseDndAdhocMetricsControl,
|
||||||
|
'metrics',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndAdhocMetricControl: typeof baseDndAdhocMetricsControl = {
|
||||||
|
...baseDndAdhocMetricsControl,
|
||||||
multi: false,
|
multi: false,
|
||||||
label: t('Metric'),
|
label: t('Metric'),
|
||||||
description: t(
|
description: t(
|
||||||
@@ -201,6 +374,12 @@ export const dndAdhocMetricControl: typeof dndAdhocMetricsControl = {
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const dndAdhocMetricControl = enhanceControlWithSemanticLayer(
|
||||||
|
baseDndAdhocMetricControl,
|
||||||
|
'metric',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
export const dndTooltipColumnsControl: typeof dndColumnsControl = {
|
export const dndTooltipColumnsControl: typeof dndColumnsControl = {
|
||||||
...dndColumnsControl,
|
...dndColumnsControl,
|
||||||
label: t('Tooltip (columns)'),
|
label: t('Tooltip (columns)'),
|
||||||
@@ -214,13 +393,19 @@ export const dndTooltipMetricsControl: typeof dndAdhocMetricsControl = {
|
|||||||
validators: [],
|
validators: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndAdhocMetricControl2: typeof dndAdhocMetricControl = {
|
const baseDndAdhocMetricControl2: typeof baseDndAdhocMetricControl = {
|
||||||
...dndAdhocMetricControl,
|
...baseDndAdhocMetricControl,
|
||||||
label: t('Right Axis Metric'),
|
label: t('Right Axis Metric'),
|
||||||
clearable: true,
|
clearable: true,
|
||||||
description: t('Select a metric to display on the right axis'),
|
description: t('Select a metric to display on the right axis'),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const dndAdhocMetricControl2 = enhanceControlWithSemanticLayer(
|
||||||
|
baseDndAdhocMetricControl2,
|
||||||
|
'metric_2',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
export const dndSortByControl: SharedControlConfig<
|
export const dndSortByControl: SharedControlConfig<
|
||||||
'DndMetricSelect' | 'MetricsControl'
|
'DndMetricSelect' | 'MetricsControl'
|
||||||
> = {
|
> = {
|
||||||
@@ -240,15 +425,21 @@ export const dndSortByControl: SharedControlConfig<
|
|||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndSizeControl: typeof dndAdhocMetricControl = {
|
const baseDndSizeControl: typeof baseDndAdhocMetricControl = {
|
||||||
...dndAdhocMetricControl,
|
...baseDndAdhocMetricControl,
|
||||||
label: t('Bubble Size'),
|
label: t('Bubble Size'),
|
||||||
description: t('Metric used to calculate bubble size'),
|
description: t('Metric used to calculate bubble size'),
|
||||||
default: null,
|
default: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndXControl: typeof dndAdhocMetricControl = {
|
export const dndSizeControl = enhanceControlWithSemanticLayer(
|
||||||
...dndAdhocMetricControl,
|
baseDndSizeControl,
|
||||||
|
'size',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndXControl: typeof baseDndAdhocMetricControl = {
|
||||||
|
...baseDndAdhocMetricControl,
|
||||||
label: t('X Axis'),
|
label: t('X Axis'),
|
||||||
description: t(
|
description: t(
|
||||||
"The dataset column/metric that returns the values on your chart's x-axis.",
|
"The dataset column/metric that returns the values on your chart's x-axis.",
|
||||||
@@ -256,8 +447,14 @@ export const dndXControl: typeof dndAdhocMetricControl = {
|
|||||||
default: null,
|
default: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndYControl: typeof dndAdhocMetricControl = {
|
export const dndXControl = enhanceControlWithSemanticLayer(
|
||||||
...dndAdhocMetricControl,
|
baseDndXControl,
|
||||||
|
'x',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndYControl: typeof baseDndAdhocMetricControl = {
|
||||||
|
...baseDndAdhocMetricControl,
|
||||||
label: t('Y Axis'),
|
label: t('Y Axis'),
|
||||||
description: t(
|
description: t(
|
||||||
"The dataset column/metric that returns the values on your chart's y-axis.",
|
"The dataset column/metric that returns the values on your chart's y-axis.",
|
||||||
@@ -265,14 +462,26 @@ export const dndYControl: typeof dndAdhocMetricControl = {
|
|||||||
default: null,
|
default: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndSecondaryMetricControl: typeof dndAdhocMetricControl = {
|
export const dndYControl = enhanceControlWithSemanticLayer(
|
||||||
...dndAdhocMetricControl,
|
baseDndYControl,
|
||||||
|
'y',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
|
const baseDndSecondaryMetricControl: typeof baseDndAdhocMetricControl = {
|
||||||
|
...baseDndAdhocMetricControl,
|
||||||
label: t('Color Metric'),
|
label: t('Color Metric'),
|
||||||
default: null,
|
default: null,
|
||||||
validators: [],
|
validators: [],
|
||||||
description: t('A metric to use for color'),
|
description: t('A metric to use for color'),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const dndSecondaryMetricControl = enhanceControlWithSemanticLayer(
|
||||||
|
baseDndSecondaryMetricControl,
|
||||||
|
'secondary_metric',
|
||||||
|
'metrics',
|
||||||
|
);
|
||||||
|
|
||||||
export const dndGranularitySqlaControl: typeof dndSeriesControl = {
|
export const dndGranularitySqlaControl: typeof dndSeriesControl = {
|
||||||
...dndSeriesControl,
|
...dndSeriesControl,
|
||||||
...temporalColumnMixin,
|
...temporalColumnMixin,
|
||||||
@@ -293,7 +502,13 @@ export const dndGranularitySqlaControl: typeof dndSeriesControl = {
|
|||||||
valueKey: 'column_name',
|
valueKey: 'column_name',
|
||||||
};
|
};
|
||||||
|
|
||||||
export const dndXAxisControl: typeof dndGroupByControl = {
|
const baseDndXAxisControl: typeof baseDndGroupByControl = {
|
||||||
...dndGroupByControl,
|
...baseDndGroupByControl,
|
||||||
...xAxisMixin,
|
...xAxisMixin,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const dndXAxisControl = enhanceControlWithSemanticLayer(
|
||||||
|
baseDndXAxisControl,
|
||||||
|
'x_axis',
|
||||||
|
'columns',
|
||||||
|
);
|
||||||
|
|||||||
@@ -0,0 +1,99 @@
|
|||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
dndAdhocMetricsControl,
|
||||||
|
dndAdhocMetricControl,
|
||||||
|
dndAdhocMetricControl2,
|
||||||
|
dndGroupByControl,
|
||||||
|
dndColumnsControl,
|
||||||
|
} from './dndControls';
|
||||||
|
// Placeholder for semantic layer controls - these would be imported from the main app
|
||||||
|
const semanticLayerDndAdhocMetricsControl = null;
|
||||||
|
const semanticLayerDndAdhocMetricControl = null;
|
||||||
|
const semanticLayerDndAdhocMetricControl2 = null;
|
||||||
|
const semanticLayerDndGroupByControl = null;
|
||||||
|
const semanticLayerDndColumnsControl = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced shared controls that include semantic layer verification
|
||||||
|
* when using compatible datasources.
|
||||||
|
*/
|
||||||
|
export const enhancedSharedControls = {
|
||||||
|
// Original controls
|
||||||
|
dndAdhocMetricsControl,
|
||||||
|
dndAdhocMetricControl,
|
||||||
|
dndAdhocMetricControl2,
|
||||||
|
dndGroupByControl,
|
||||||
|
dndColumnsControl,
|
||||||
|
|
||||||
|
// Enhanced controls with semantic layer verification
|
||||||
|
semanticLayerDndAdhocMetricsControl,
|
||||||
|
semanticLayerDndAdhocMetricControl,
|
||||||
|
semanticLayerDndAdhocMetricControl2,
|
||||||
|
semanticLayerDndGroupByControl,
|
||||||
|
semanticLayerDndColumnsControl,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the appropriate control based on datasource capabilities
|
||||||
|
*/
|
||||||
|
export function getSemanticLayerControl(
|
||||||
|
controlName: string,
|
||||||
|
datasource?: any,
|
||||||
|
): any {
|
||||||
|
// Check if datasource supports semantic layer verification
|
||||||
|
const supportsSemanticLayer =
|
||||||
|
datasource &&
|
||||||
|
'database' in datasource &&
|
||||||
|
datasource.database?.engine_information?.supports_dynamic_columns;
|
||||||
|
|
||||||
|
if (supportsSemanticLayer) {
|
||||||
|
switch (controlName) {
|
||||||
|
case 'dndAdhocMetricsControl':
|
||||||
|
return semanticLayerDndAdhocMetricsControl;
|
||||||
|
case 'dndAdhocMetricControl':
|
||||||
|
return semanticLayerDndAdhocMetricControl;
|
||||||
|
case 'dndAdhocMetricControl2':
|
||||||
|
return semanticLayerDndAdhocMetricControl2;
|
||||||
|
case 'dndGroupByControl':
|
||||||
|
return semanticLayerDndGroupByControl;
|
||||||
|
case 'dndColumnsControl':
|
||||||
|
return semanticLayerDndColumnsControl;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return original control for non-semantic layer datasources
|
||||||
|
switch (controlName) {
|
||||||
|
case 'dndAdhocMetricsControl':
|
||||||
|
return dndAdhocMetricsControl;
|
||||||
|
case 'dndAdhocMetricControl':
|
||||||
|
return dndAdhocMetricControl;
|
||||||
|
case 'dndAdhocMetricControl2':
|
||||||
|
return dndAdhocMetricControl2;
|
||||||
|
case 'dndGroupByControl':
|
||||||
|
return dndGroupByControl;
|
||||||
|
case 'dndColumnsControl':
|
||||||
|
return dndColumnsControl;
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -244,10 +244,12 @@ const config: ControlPanelConfig = {
|
|||||||
controlState: ControlState,
|
controlState: ControlState,
|
||||||
) => {
|
) => {
|
||||||
const { controls } = state;
|
const { controls } = state;
|
||||||
|
// Get the enhanced mapStateToProps from the DND control (includes semantic layer verification)
|
||||||
const originalMapStateToProps =
|
const originalMapStateToProps =
|
||||||
sharedControls?.groupby?.mapStateToProps;
|
sharedControls?.groupby?.mapStateToProps;
|
||||||
const newState =
|
const newState =
|
||||||
originalMapStateToProps?.(state, controlState) ?? {};
|
originalMapStateToProps?.(state, controlState) ?? {};
|
||||||
|
// Add table-specific validation while preserving semantic layer enhancements
|
||||||
newState.externalValidationErrors = validateAggControlValues(
|
newState.externalValidationErrors = validateAggControlValues(
|
||||||
controls,
|
controls,
|
||||||
[
|
[
|
||||||
@@ -300,26 +302,40 @@ const config: ControlPanelConfig = {
|
|||||||
visibility: isAggMode,
|
visibility: isAggMode,
|
||||||
resetOnHide: false,
|
resetOnHide: false,
|
||||||
mapStateToProps: (
|
mapStateToProps: (
|
||||||
{ controls, datasource, form_data }: ControlPanelState,
|
state: ControlPanelState,
|
||||||
controlState: ControlState,
|
controlState: ControlState,
|
||||||
) => ({
|
) => {
|
||||||
columns: datasource?.columns[0]?.hasOwnProperty('filterable')
|
const { controls, datasource, form_data } = state;
|
||||||
|
// Get the enhanced mapStateToProps from the DND control (includes semantic layer verification)
|
||||||
|
const originalMapStateToProps =
|
||||||
|
sharedControls?.metrics?.mapStateToProps;
|
||||||
|
const newState =
|
||||||
|
originalMapStateToProps?.(state, controlState) ?? {};
|
||||||
|
|
||||||
|
// Add table-specific props while preserving semantic layer enhancements
|
||||||
|
newState.columns = datasource?.columns[0]?.hasOwnProperty(
|
||||||
|
'filterable',
|
||||||
|
)
|
||||||
? (datasource as Dataset)?.columns?.filter(
|
? (datasource as Dataset)?.columns?.filter(
|
||||||
(c: ColumnMeta) => c.filterable,
|
(c: ColumnMeta) => c.filterable,
|
||||||
)
|
)
|
||||||
: datasource?.columns,
|
: datasource?.columns;
|
||||||
savedMetrics: defineSavedMetrics(datasource),
|
newState.savedMetrics = defineSavedMetrics(datasource);
|
||||||
// current active adhoc metrics
|
newState.selectedMetrics =
|
||||||
selectedMetrics:
|
|
||||||
form_data.metrics ||
|
form_data.metrics ||
|
||||||
(form_data.metric ? [form_data.metric] : []),
|
(form_data.metric ? [form_data.metric] : []);
|
||||||
datasource,
|
newState.datasource = datasource;
|
||||||
externalValidationErrors: validateAggControlValues(controls, [
|
newState.externalValidationErrors = validateAggControlValues(
|
||||||
|
controls,
|
||||||
|
[
|
||||||
controls.groupby?.value,
|
controls.groupby?.value,
|
||||||
controls.percent_metrics?.value,
|
controls.percent_metrics?.value,
|
||||||
controlState.value,
|
controlState.value,
|
||||||
]),
|
],
|
||||||
}),
|
);
|
||||||
|
|
||||||
|
return newState;
|
||||||
|
},
|
||||||
rerender: ['groupby', 'percent_metrics'],
|
rerender: ['groupby', 'percent_metrics'],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -73,7 +73,12 @@ import { DatabaseSelector } from '../DatabaseSelector';
|
|||||||
import CollectionTable from './CollectionTable';
|
import CollectionTable from './CollectionTable';
|
||||||
import Fieldset from './Fieldset';
|
import Fieldset from './Fieldset';
|
||||||
import Field from './Field';
|
import Field from './Field';
|
||||||
import { fetchSyncedColumns, updateColumns } from './utils';
|
import {
|
||||||
|
fetchSyncedColumns,
|
||||||
|
fetchSyncedMetrics,
|
||||||
|
updateColumns,
|
||||||
|
updateMetrics,
|
||||||
|
} from './utils';
|
||||||
|
|
||||||
const extensionsRegistry = getExtensionsRegistry();
|
const extensionsRegistry = getExtensionsRegistry();
|
||||||
|
|
||||||
@@ -654,6 +659,7 @@ class DatasourceEditor extends PureComponent {
|
|||||||
col => !!col.expression,
|
col => !!col.expression,
|
||||||
),
|
),
|
||||||
metadataLoading: false,
|
metadataLoading: false,
|
||||||
|
metricsLoading: false,
|
||||||
activeTabKey: TABS_KEYS.SOURCE,
|
activeTabKey: TABS_KEYS.SOURCE,
|
||||||
datasourceType: props.datasource.sql
|
datasourceType: props.datasource.sql
|
||||||
? DATASOURCE_TYPES.virtual.key
|
? DATASOURCE_TYPES.virtual.key
|
||||||
@@ -667,6 +673,7 @@ class DatasourceEditor extends PureComponent {
|
|||||||
this.tableChangeAndSyncMetadata =
|
this.tableChangeAndSyncMetadata =
|
||||||
this.tableChangeAndSyncMetadata.bind(this);
|
this.tableChangeAndSyncMetadata.bind(this);
|
||||||
this.syncMetadata = this.syncMetadata.bind(this);
|
this.syncMetadata = this.syncMetadata.bind(this);
|
||||||
|
this.syncMetrics = this.syncMetrics.bind(this);
|
||||||
this.setColumns = this.setColumns.bind(this);
|
this.setColumns = this.setColumns.bind(this);
|
||||||
this.validateAndChange = this.validateAndChange.bind(this);
|
this.validateAndChange = this.validateAndChange.bind(this);
|
||||||
this.handleTabSelect = this.handleTabSelect.bind(this);
|
this.handleTabSelect = this.handleTabSelect.bind(this);
|
||||||
@@ -844,6 +851,32 @@ class DatasourceEditor extends PureComponent {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async syncMetrics() {
|
||||||
|
const { datasource } = this.state;
|
||||||
|
this.setState({ metricsLoading: true });
|
||||||
|
try {
|
||||||
|
const newMetrics = await fetchSyncedMetrics(datasource);
|
||||||
|
const metricChanges = updateMetrics(
|
||||||
|
datasource.metrics,
|
||||||
|
newMetrics,
|
||||||
|
this.props.addSuccessToast,
|
||||||
|
);
|
||||||
|
this.onDatasourceChange({
|
||||||
|
...datasource,
|
||||||
|
metrics: metricChanges.finalMetrics,
|
||||||
|
});
|
||||||
|
this.props.addSuccessToast(t('Metrics have been synced'));
|
||||||
|
this.setState({ metricsLoading: false });
|
||||||
|
} catch (error) {
|
||||||
|
const { error: clientError, statusText } =
|
||||||
|
await getClientErrorObject(error);
|
||||||
|
this.props.addDangerToast(
|
||||||
|
clientError || statusText || t('An error has occurred'),
|
||||||
|
);
|
||||||
|
this.setState({ metricsLoading: false });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
findDuplicates(arr, accessor) {
|
findDuplicates(arr, accessor) {
|
||||||
const seen = {};
|
const seen = {};
|
||||||
const dups = [];
|
const dups = [];
|
||||||
@@ -1702,7 +1735,29 @@ class DatasourceEditor extends PureComponent {
|
|||||||
title={t('Metrics')}
|
title={t('Metrics')}
|
||||||
/>
|
/>
|
||||||
),
|
),
|
||||||
children: this.renderMetricCollection(),
|
children: (
|
||||||
|
<div>
|
||||||
|
{this.state.datasource.database?.backend === 'metricflow' && (
|
||||||
|
<ColumnButtonWrapper>
|
||||||
|
<StyledButtonWrapper>
|
||||||
|
<Button
|
||||||
|
buttonSize="small"
|
||||||
|
buttonStyle="tertiary"
|
||||||
|
onClick={this.syncMetrics}
|
||||||
|
className="sync-metrics-from-source"
|
||||||
|
disabled={this.state.isEditMode}
|
||||||
|
loading={this.state.metricsLoading}
|
||||||
|
>
|
||||||
|
<Icons.DatabaseOutlined iconSize="m" />
|
||||||
|
{t('Sync metrics from source')}
|
||||||
|
</Button>
|
||||||
|
</StyledButtonWrapper>
|
||||||
|
</ColumnButtonWrapper>
|
||||||
|
)}
|
||||||
|
{this.renderMetricCollection()}
|
||||||
|
{this.state.metricsLoading && <Loading />}
|
||||||
|
</div>
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: TABS_KEYS.COLUMNS,
|
key: TABS_KEYS.COLUMNS,
|
||||||
|
|||||||
@@ -132,6 +132,81 @@ export function updateColumns(prevCols, newCols, addSuccessToast) {
|
|||||||
return columnChanges;
|
return columnChanges;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function updateMetrics(prevMetrics, newMetrics, addSuccessToast) {
|
||||||
|
// metrics: Array<{metric_name: string; expression: string; verbose_name?: string; ...}>
|
||||||
|
const sourceMetricNames = newMetrics.map(metric => metric.metric_name);
|
||||||
|
const currentMetrics = prevMetrics.reduce((agg, metric) => {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
agg[metric.metric_name] = metric;
|
||||||
|
return agg;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const newOrUpdatedMetrics = newMetrics.filter(metric => {
|
||||||
|
const currentMetric = currentMetrics[metric.metric_name];
|
||||||
|
if (!currentMetric) {
|
||||||
|
// New metric
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// Check if metric has been updated
|
||||||
|
return (
|
||||||
|
metric.expression !== currentMetric.expression ||
|
||||||
|
metric.verbose_name !== currentMetric.verbose_name ||
|
||||||
|
metric.description !== currentMetric.description
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
const deletedMetrics = prevMetrics.filter(
|
||||||
|
metric => !sourceMetricNames.includes(metric.metric_name),
|
||||||
|
);
|
||||||
|
|
||||||
|
const finalMetrics = [
|
||||||
|
...prevMetrics.filter(metric =>
|
||||||
|
sourceMetricNames.includes(metric.metric_name),
|
||||||
|
),
|
||||||
|
...newOrUpdatedMetrics.filter(
|
||||||
|
metric => !currentMetrics[metric.metric_name],
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Update existing metrics with new data
|
||||||
|
finalMetrics.forEach(metric => {
|
||||||
|
const sourceMetric = newMetrics.find(
|
||||||
|
m => m.metric_name === metric.metric_name,
|
||||||
|
);
|
||||||
|
if (sourceMetric) {
|
||||||
|
Object.assign(metric, sourceMetric);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (newOrUpdatedMetrics.length > 0) {
|
||||||
|
addSuccessToast(
|
||||||
|
tn(
|
||||||
|
'Metric %s was added',
|
||||||
|
'Metrics %s were added',
|
||||||
|
newOrUpdatedMetrics.length,
|
||||||
|
newOrUpdatedMetrics.map(metric => metric.metric_name).join(', '),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (deletedMetrics.length > 0) {
|
||||||
|
addSuccessToast(
|
||||||
|
tn(
|
||||||
|
'Metric %s was deleted',
|
||||||
|
'Metrics %s were deleted',
|
||||||
|
deletedMetrics.length,
|
||||||
|
deletedMetrics.map(metric => metric.metric_name).join(', '),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
finalMetrics,
|
||||||
|
newOrUpdatedMetrics,
|
||||||
|
deletedMetrics,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export async function fetchSyncedColumns(datasource) {
|
export async function fetchSyncedColumns(datasource) {
|
||||||
const params = {
|
const params = {
|
||||||
datasource_type: datasource.type || datasource.datasource_type,
|
datasource_type: datasource.type || datasource.datasource_type,
|
||||||
@@ -155,3 +230,9 @@ export async function fetchSyncedColumns(datasource) {
|
|||||||
const { json } = await SupersetClient.get({ endpoint });
|
const { json } = await SupersetClient.get({ endpoint });
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function fetchSyncedMetrics(datasource) {
|
||||||
|
const endpoint = `/api/v1/dataset/${datasource.id}/sync_metrics`;
|
||||||
|
const { json } = await SupersetClient.put({ endpoint });
|
||||||
|
return json.result;
|
||||||
|
}
|
||||||
|
|||||||
@@ -29,8 +29,8 @@ import { Icons } from '@superset-ui/core/components/Icons';
|
|||||||
|
|
||||||
import { DatasourcePanelDndItem } from '../types';
|
import { DatasourcePanelDndItem } from '../types';
|
||||||
|
|
||||||
const DatasourceItemContainer = styled.div`
|
const DatasourceItemContainer = styled.div<{ isDisabled?: boolean }>`
|
||||||
${({ theme }) => css`
|
${({ theme, isDisabled }) => css`
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
@@ -40,12 +40,16 @@ const DatasourceItemContainer = styled.div`
|
|||||||
|
|
||||||
// hack to make the drag preview image corners rounded
|
// hack to make the drag preview image corners rounded
|
||||||
transform: translate(0, 0);
|
transform: translate(0, 0);
|
||||||
color: ${theme.colorText};
|
color: ${isDisabled ? theme.colorTextSecondary : theme.colorText};
|
||||||
background-color: ${theme.colorBgLayout};
|
background-color: ${theme.colorBgLayout};
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
|
opacity: ${isDisabled ? 0.5 : 1};
|
||||||
|
cursor: ${isDisabled ? 'not-allowed' : 'grab'};
|
||||||
|
|
||||||
&:hover {
|
&:hover {
|
||||||
background-color: ${theme.colorPrimaryBgHover};
|
background-color: ${isDisabled
|
||||||
|
? theme.colorBgLayout
|
||||||
|
: theme.colorPrimaryBgHover};
|
||||||
}
|
}
|
||||||
|
|
||||||
> div {
|
> div {
|
||||||
@@ -58,6 +62,7 @@ const DatasourceItemContainer = styled.div`
|
|||||||
interface DatasourcePanelDragOptionProps extends DatasourcePanelDndItem {
|
interface DatasourcePanelDragOptionProps extends DatasourcePanelDndItem {
|
||||||
labelRef?: RefObject<any>;
|
labelRef?: RefObject<any>;
|
||||||
showTooltip?: boolean;
|
showTooltip?: boolean;
|
||||||
|
isDisabled?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
type MetricOption = Omit<Metric, 'id'> & {
|
type MetricOption = Omit<Metric, 'id'> & {
|
||||||
@@ -67,7 +72,7 @@ type MetricOption = Omit<Metric, 'id'> & {
|
|||||||
export default function DatasourcePanelDragOption(
|
export default function DatasourcePanelDragOption(
|
||||||
props: DatasourcePanelDragOptionProps,
|
props: DatasourcePanelDragOptionProps,
|
||||||
) {
|
) {
|
||||||
const { labelRef, showTooltip, type, value } = props;
|
const { labelRef, showTooltip, type, value, isDisabled } = props;
|
||||||
const [{ isDragging }, drag] = useDrag({
|
const [{ isDragging }, drag] = useDrag({
|
||||||
item: {
|
item: {
|
||||||
value: props.value,
|
value: props.value,
|
||||||
@@ -76,6 +81,7 @@ export default function DatasourcePanelDragOption(
|
|||||||
collect: monitor => ({
|
collect: monitor => ({
|
||||||
isDragging: monitor.isDragging(),
|
isDragging: monitor.isDragging(),
|
||||||
}),
|
}),
|
||||||
|
canDrag: !isDisabled,
|
||||||
});
|
});
|
||||||
|
|
||||||
const optionProps = {
|
const optionProps = {
|
||||||
@@ -85,13 +91,17 @@ export default function DatasourcePanelDragOption(
|
|||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<DatasourceItemContainer data-test="DatasourcePanelDragOption" ref={drag}>
|
<DatasourceItemContainer
|
||||||
|
data-test="DatasourcePanelDragOption"
|
||||||
|
ref={isDisabled ? undefined : drag}
|
||||||
|
isDisabled={isDisabled}
|
||||||
|
>
|
||||||
{type === DndItemType.Column ? (
|
{type === DndItemType.Column ? (
|
||||||
<StyledColumnOption column={value as ColumnMeta} {...optionProps} />
|
<StyledColumnOption column={value as ColumnMeta} {...optionProps} />
|
||||||
) : (
|
) : (
|
||||||
<StyledMetricOption metric={value as MetricOption} {...optionProps} />
|
<StyledMetricOption metric={value as MetricOption} {...optionProps} />
|
||||||
)}
|
)}
|
||||||
<Icons.Drag />
|
{isDisabled ? <Icons.LockOutlined /> : <Icons.Drag />}
|
||||||
</DatasourceItemContainer>
|
</DatasourceItemContainer>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -244,6 +244,7 @@ const DatasourcePanelItem = ({
|
|||||||
? DndItemType.Column
|
? DndItemType.Column
|
||||||
: DndItemType.Metric
|
: DndItemType.Metric
|
||||||
}
|
}
|
||||||
|
isDisabled={item.item.isDisabled}
|
||||||
/>
|
/>
|
||||||
</LabelWrapper>
|
</LabelWrapper>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -62,10 +62,12 @@ export type DatasourceFolder = {
|
|||||||
|
|
||||||
export type MetricItem = Metric & {
|
export type MetricItem = Metric & {
|
||||||
type: 'metric';
|
type: 'metric';
|
||||||
|
isDisabled?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ColumnItem = DatasourcePanelColumn & {
|
export type ColumnItem = DatasourcePanelColumn & {
|
||||||
type: 'column';
|
type: 'column';
|
||||||
|
isDisabled?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type FolderItem = MetricItem | ColumnItem;
|
export type FolderItem = MetricItem | ColumnItem;
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ import {
|
|||||||
useRef,
|
useRef,
|
||||||
useState,
|
useState,
|
||||||
} from 'react';
|
} from 'react';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector, useStore } from 'react-redux';
|
||||||
import {
|
import {
|
||||||
AdhocColumn,
|
AdhocColumn,
|
||||||
isAdhocColumn,
|
isAdhocColumn,
|
||||||
@@ -44,11 +44,16 @@ import {
|
|||||||
Select,
|
Select,
|
||||||
SQLEditor,
|
SQLEditor,
|
||||||
EmptyState,
|
EmptyState,
|
||||||
|
Tooltip,
|
||||||
} from '@superset-ui/core/components';
|
} from '@superset-ui/core/components';
|
||||||
|
|
||||||
import sqlKeywords from 'src/SqlLab/utils/sqlKeywords';
|
import sqlKeywords from 'src/SqlLab/utils/sqlKeywords';
|
||||||
import { getColumnKeywords } from 'src/explore/controlUtils/getColumnKeywords';
|
import { getColumnKeywords } from 'src/explore/controlUtils/getColumnKeywords';
|
||||||
import { StyledColumnOption } from 'src/explore/components/optionRenderers';
|
import { StyledColumnOption } from 'src/explore/components/optionRenderers';
|
||||||
|
import {
|
||||||
|
collectQueryFields,
|
||||||
|
callValidationAPI,
|
||||||
|
} from 'src/explore/components/controls/SemanticLayerVerification';
|
||||||
import {
|
import {
|
||||||
POPOVER_INITIAL_HEIGHT,
|
POPOVER_INITIAL_HEIGHT,
|
||||||
POPOVER_INITIAL_WIDTH,
|
POPOVER_INITIAL_WIDTH,
|
||||||
@@ -119,6 +124,34 @@ const ColumnSelectPopover = ({
|
|||||||
const datasourceType = useSelector<ExplorePageState, string | undefined>(
|
const datasourceType = useSelector<ExplorePageState, string | undefined>(
|
||||||
state => state.explore.datasource.type,
|
state => state.explore.datasource.type,
|
||||||
);
|
);
|
||||||
|
const datasource = useSelector<ExplorePageState, any>(
|
||||||
|
state => state.explore.datasource,
|
||||||
|
);
|
||||||
|
const formData = useSelector<ExplorePageState, any>(
|
||||||
|
state => state.explore.form_data,
|
||||||
|
);
|
||||||
|
const store = useStore();
|
||||||
|
|
||||||
|
// Check if this is a semantic layer dataset
|
||||||
|
const isSemanticLayer = useMemo(() => {
|
||||||
|
if (!datasource || !('database' in datasource) || !datasource.database) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return Boolean(
|
||||||
|
datasource.database.engine_information?.supports_dynamic_columns,
|
||||||
|
);
|
||||||
|
}, [datasource]);
|
||||||
|
|
||||||
|
// For semantic layers, disable Saved and Custom SQL tabs
|
||||||
|
const effectiveDisabledTabs = useMemo(() => {
|
||||||
|
const tabs = new Set(disabledTabs);
|
||||||
|
if (isSemanticLayer) {
|
||||||
|
tabs.add('saved');
|
||||||
|
tabs.add('sqlExpression');
|
||||||
|
}
|
||||||
|
return tabs;
|
||||||
|
}, [disabledTabs, isSemanticLayer]);
|
||||||
|
|
||||||
const [initialLabel] = useState(label);
|
const [initialLabel] = useState(label);
|
||||||
const [initialAdhocColumn, initialCalculatedColumn, initialSimpleColumn] =
|
const [initialAdhocColumn, initialCalculatedColumn, initialSimpleColumn] =
|
||||||
getInitialColumnValues(editedColumn);
|
getInitialColumnValues(editedColumn);
|
||||||
@@ -133,6 +166,8 @@ const ColumnSelectPopover = ({
|
|||||||
ColumnMeta | undefined
|
ColumnMeta | undefined
|
||||||
>(initialSimpleColumn);
|
>(initialSimpleColumn);
|
||||||
const [selectedTab, setSelectedTab] = useState<string | null>(null);
|
const [selectedTab, setSelectedTab] = useState<string | null>(null);
|
||||||
|
const [validDimensions, setValidDimensions] = useState<string[] | null>(null);
|
||||||
|
const previousFormDataRef = useRef<string>('');
|
||||||
|
|
||||||
const [resizeButton, width, height] = useResizeButton(
|
const [resizeButton, width, height] = useResizeButton(
|
||||||
POPOVER_INITIAL_WIDTH,
|
POPOVER_INITIAL_WIDTH,
|
||||||
@@ -141,9 +176,11 @@ const ColumnSelectPopover = ({
|
|||||||
|
|
||||||
const sqlEditorRef = useRef(null);
|
const sqlEditorRef = useRef(null);
|
||||||
|
|
||||||
const [calculatedColumns, simpleColumns] = useMemo(
|
const [calculatedColumns, simpleColumns] = useMemo(() => {
|
||||||
() =>
|
// Use columns from Redux datasource state (which includes disabled states) instead of props
|
||||||
columns?.reduce(
|
const columnsToUse = datasource?.columns || columns || [];
|
||||||
|
|
||||||
|
const [calculated, simple] = columnsToUse.reduce(
|
||||||
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
|
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
|
||||||
if (column.expression) {
|
if (column.expression) {
|
||||||
acc[0].push(column);
|
acc[0].push(column);
|
||||||
@@ -153,9 +190,17 @@ const ColumnSelectPopover = ({
|
|||||||
return acc;
|
return acc;
|
||||||
},
|
},
|
||||||
[[], []],
|
[[], []],
|
||||||
),
|
) || [[], []];
|
||||||
[columns],
|
|
||||||
);
|
// For semantic layer datasets, filter simple columns to show only valid dimensions
|
||||||
|
// Use the isDisabled state set by the main verification system instead of separate API calls
|
||||||
|
if (isSemanticLayer) {
|
||||||
|
const filteredSimple = simple.filter(column => !column.isDisabled);
|
||||||
|
return [calculated, filteredSimple];
|
||||||
|
}
|
||||||
|
|
||||||
|
return [calculated, simple];
|
||||||
|
}, [datasource?.columns, columns, isSemanticLayer]);
|
||||||
|
|
||||||
const onSqlExpressionChange = useCallback(
|
const onSqlExpressionChange = useCallback(
|
||||||
sqlExpression => {
|
sqlExpression => {
|
||||||
@@ -196,17 +241,151 @@ const ColumnSelectPopover = ({
|
|||||||
[setLabel, simpleColumns],
|
[setLabel, simpleColumns],
|
||||||
);
|
);
|
||||||
|
|
||||||
const defaultActiveTabKey = initialAdhocColumn
|
const defaultActiveTabKey = useMemo(() => {
|
||||||
? 'sqlExpression'
|
// For semantic layer datasets, always default to Simple tab
|
||||||
|
if (isSemanticLayer) {
|
||||||
|
return TABS_KEYS.SIMPLE;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Original logic for non-semantic layer datasets
|
||||||
|
return initialAdhocColumn
|
||||||
|
? TABS_KEYS.SQL_EXPRESSION
|
||||||
: selectedCalculatedColumn
|
: selectedCalculatedColumn
|
||||||
? 'saved'
|
? TABS_KEYS.SAVED
|
||||||
: 'simple';
|
: TABS_KEYS.SIMPLE;
|
||||||
|
}, [isSemanticLayer, initialAdhocColumn, selectedCalculatedColumn]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
getCurrentTab(defaultActiveTabKey);
|
getCurrentTab(defaultActiveTabKey);
|
||||||
setSelectedTab(defaultActiveTabKey);
|
setSelectedTab(defaultActiveTabKey);
|
||||||
}, [defaultActiveTabKey, getCurrentTab, setSelectedTab]);
|
}, [defaultActiveTabKey, getCurrentTab, setSelectedTab]);
|
||||||
|
|
||||||
|
// Fetch valid dimensions for semantic layer datasets
|
||||||
|
// Only trigger when actually needed (tab is Simple or modal opens after delay)
|
||||||
|
useEffect(() => {
|
||||||
|
// Disable column modal API calls - semantic layer verification handles disabled states automatically
|
||||||
|
if (
|
||||||
|
false &&
|
||||||
|
isSemanticLayer &&
|
||||||
|
formData &&
|
||||||
|
datasource &&
|
||||||
|
(selectedTab === TABS_KEYS.SIMPLE || selectedTab === null)
|
||||||
|
) {
|
||||||
|
const fetchValidDimensions = async () => {
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Use the same 50ms delay that fixed the main verification timing issue
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Get the most current form data from store
|
||||||
|
const currentState = store.getState() as ExplorePageState;
|
||||||
|
let currentFormData = currentState.explore.form_data;
|
||||||
|
|
||||||
|
// If we're in a table and don't have metrics/dimensions, try to get from controls state
|
||||||
|
if (
|
||||||
|
(!currentFormData.metrics &&
|
||||||
|
!currentFormData.groupby &&
|
||||||
|
!currentFormData.all_columns) ||
|
||||||
|
(Array.isArray(currentFormData.metrics) &&
|
||||||
|
currentFormData.metrics.length === 0 &&
|
||||||
|
Array.isArray(currentFormData.groupby) &&
|
||||||
|
currentFormData.groupby.length === 0)
|
||||||
|
) {
|
||||||
|
// Try to get from the controls state instead
|
||||||
|
const controlsState = (currentState as any).explore?.controls;
|
||||||
|
if (controlsState) {
|
||||||
|
const enhancedFormData = { ...currentFormData };
|
||||||
|
|
||||||
|
// Get metrics from controls
|
||||||
|
if (controlsState.metrics?.value) {
|
||||||
|
enhancedFormData.metrics = controlsState.metrics.value;
|
||||||
|
}
|
||||||
|
if (controlsState.percent_metrics?.value) {
|
||||||
|
enhancedFormData.percent_metrics =
|
||||||
|
controlsState.percent_metrics.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get dimensions from controls
|
||||||
|
if (controlsState.groupby?.value) {
|
||||||
|
enhancedFormData.groupby = controlsState.groupby.value;
|
||||||
|
}
|
||||||
|
if (controlsState.all_columns?.value) {
|
||||||
|
enhancedFormData.all_columns = controlsState.all_columns.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentFormData = enhancedFormData;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryFields = collectQueryFields(currentFormData);
|
||||||
|
const validationResult = await callValidationAPI(
|
||||||
|
datasource,
|
||||||
|
queryFields.dimensions,
|
||||||
|
queryFields.metrics,
|
||||||
|
);
|
||||||
|
if (validationResult) {
|
||||||
|
setValidDimensions(validationResult.dimensions);
|
||||||
|
} else {
|
||||||
|
setValidDimensions(null);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to fetch valid dimensions:', error);
|
||||||
|
setValidDimensions(null);
|
||||||
|
} finally {
|
||||||
|
// Cleanup
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Trigger API call after a delay to ensure state is current
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
fetchValidDimensions();
|
||||||
|
}, 100);
|
||||||
|
|
||||||
|
return () => clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
setValidDimensions(null);
|
||||||
|
return undefined;
|
||||||
|
}, [isSemanticLayer, selectedTab, datasource, store]);
|
||||||
|
|
||||||
|
// Also trigger when form data changes (for subsequent updates)
|
||||||
|
useEffect(() => {
|
||||||
|
if (isSemanticLayer && validDimensions !== null && formData && datasource) {
|
||||||
|
const currentFormDataString = JSON.stringify(formData);
|
||||||
|
|
||||||
|
// Only make API call if form data actually changed and we already have loaded once
|
||||||
|
if (currentFormDataString !== previousFormDataRef.current) {
|
||||||
|
previousFormDataRef.current = currentFormDataString;
|
||||||
|
|
||||||
|
const fetchValidDimensions = async () => {
|
||||||
|
try {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
const currentState = store.getState() as ExplorePageState;
|
||||||
|
const currentFormData = currentState.explore.form_data;
|
||||||
|
|
||||||
|
const queryFields = collectQueryFields(currentFormData);
|
||||||
|
const validationResult = await callValidationAPI(
|
||||||
|
datasource,
|
||||||
|
queryFields.dimensions,
|
||||||
|
queryFields.metrics,
|
||||||
|
);
|
||||||
|
if (validationResult) {
|
||||||
|
setValidDimensions(validationResult.dimensions);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to fetch valid dimensions:', error);
|
||||||
|
} finally {
|
||||||
|
// Cleanup
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
fetchValidDimensions();
|
||||||
|
}, 50);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isSemanticLayer, formData, datasource, store, validDimensions]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
/* if the adhoc column is not set (because it was never edited) but the
|
/* if the adhoc column is not set (because it was never edited) but the
|
||||||
* tab is selected and the label has changed, then we need to set the
|
* tab is selected and the label has changed, then we need to set the
|
||||||
@@ -318,8 +497,19 @@ const ColumnSelectPopover = ({
|
|||||||
items={[
|
items={[
|
||||||
{
|
{
|
||||||
key: TABS_KEYS.SAVED,
|
key: TABS_KEYS.SAVED,
|
||||||
label: t('Saved'),
|
label:
|
||||||
disabled: disabledTabs.has('saved'),
|
isSemanticLayer && effectiveDisabledTabs.has('saved') ? (
|
||||||
|
<Tooltip
|
||||||
|
title={t(
|
||||||
|
'Saved expressions are not supported for semantic layer datasets',
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{t('Saved')}
|
||||||
|
</Tooltip>
|
||||||
|
) : (
|
||||||
|
t('Saved')
|
||||||
|
),
|
||||||
|
disabled: effectiveDisabledTabs.has('saved'),
|
||||||
children: (
|
children: (
|
||||||
<>
|
<>
|
||||||
{calculatedColumns.length > 0 ? (
|
{calculatedColumns.length > 0 ? (
|
||||||
@@ -404,7 +594,7 @@ const ColumnSelectPopover = ({
|
|||||||
{
|
{
|
||||||
key: TABS_KEYS.SIMPLE,
|
key: TABS_KEYS.SIMPLE,
|
||||||
label: t('Simple'),
|
label: t('Simple'),
|
||||||
disabled: disabledTabs.has('simple'),
|
disabled: effectiveDisabledTabs.has('simple'),
|
||||||
children: (
|
children: (
|
||||||
<>
|
<>
|
||||||
{isTemporal && simpleColumns.length === 0 ? (
|
{isTemporal && simpleColumns.length === 0 ? (
|
||||||
@@ -455,8 +645,19 @@ const ColumnSelectPopover = ({
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: TABS_KEYS.SQL_EXPRESSION,
|
key: TABS_KEYS.SQL_EXPRESSION,
|
||||||
label: t('Custom SQL'),
|
label:
|
||||||
disabled: disabledTabs.has('sqlExpression'),
|
isSemanticLayer && effectiveDisabledTabs.has('sqlExpression') ? (
|
||||||
|
<Tooltip
|
||||||
|
title={t(
|
||||||
|
'Custom SQL expressions are not supported for semantic layer datasets',
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{t('Custom SQL')}
|
||||||
|
</Tooltip>
|
||||||
|
) : (
|
||||||
|
t('Custom SQL')
|
||||||
|
),
|
||||||
|
disabled: effectiveDisabledTabs.has('sqlExpression'),
|
||||||
children: (
|
children: (
|
||||||
<>
|
<>
|
||||||
<SQLEditor
|
<SQLEditor
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
/* eslint-disable camelcase */
|
/* eslint-disable camelcase */
|
||||||
import { PureComponent } from 'react';
|
import { PureComponent } from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
|
import { connect } from 'react-redux';
|
||||||
import {
|
import {
|
||||||
isDefined,
|
isDefined,
|
||||||
t,
|
t,
|
||||||
@@ -68,6 +69,8 @@ const propTypes = {
|
|||||||
datasource: PropTypes.object,
|
datasource: PropTypes.object,
|
||||||
isNewMetric: PropTypes.bool,
|
isNewMetric: PropTypes.bool,
|
||||||
isLabelModified: PropTypes.bool,
|
isLabelModified: PropTypes.bool,
|
||||||
|
// Props from Redux
|
||||||
|
reduxDatasource: PropTypes.object,
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaultProps = {
|
const defaultProps = {
|
||||||
@@ -90,7 +93,7 @@ const StyledSelect = styled(Select)`
|
|||||||
|
|
||||||
export const SAVED_TAB_KEY = 'SAVED';
|
export const SAVED_TAB_KEY = 'SAVED';
|
||||||
|
|
||||||
export default class AdhocMetricEditPopover extends PureComponent {
|
class AdhocMetricEditPopover extends PureComponent {
|
||||||
// "Saved" is a default tab unless there are no saved metrics for dataset
|
// "Saved" is a default tab unless there are no saved metrics for dataset
|
||||||
defaultActiveTabKey = this.getDefaultTab();
|
defaultActiveTabKey = this.getDefaultTab();
|
||||||
|
|
||||||
@@ -149,6 +152,19 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
getDefaultTab() {
|
getDefaultTab() {
|
||||||
const { adhocMetric, savedMetric, savedMetricsOptions, isNewMetric } =
|
const { adhocMetric, savedMetric, savedMetricsOptions, isNewMetric } =
|
||||||
this.props;
|
this.props;
|
||||||
|
|
||||||
|
// For semantic layer datasets, always default to Saved tab if available
|
||||||
|
if (this.isSemanticLayer()) {
|
||||||
|
if (
|
||||||
|
Array.isArray(savedMetricsOptions) &&
|
||||||
|
savedMetricsOptions.length > 0
|
||||||
|
) {
|
||||||
|
return SAVED_TAB_KEY;
|
||||||
|
}
|
||||||
|
// If no saved metrics available, still return SAVED_TAB_KEY to show empty state
|
||||||
|
return SAVED_TAB_KEY;
|
||||||
|
}
|
||||||
|
|
||||||
if (isDefined(adhocMetric.column) || isDefined(adhocMetric.sqlExpression)) {
|
if (isDefined(adhocMetric.column) || isDefined(adhocMetric.sqlExpression)) {
|
||||||
return adhocMetric.expressionType;
|
return adhocMetric.expressionType;
|
||||||
}
|
}
|
||||||
@@ -162,6 +178,16 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
return adhocMetric.expressionType;
|
return adhocMetric.expressionType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
isSemanticLayer() {
|
||||||
|
const { datasource } = this.props;
|
||||||
|
if (!datasource || !('database' in datasource) || !datasource.database) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return Boolean(
|
||||||
|
datasource.database.engine_information?.supports_dynamic_columns,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
onSave() {
|
onSave() {
|
||||||
const { adhocMetric, savedMetric } = this.state;
|
const { adhocMetric, savedMetric } = this.state;
|
||||||
|
|
||||||
@@ -306,11 +332,29 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
datasource,
|
datasource,
|
||||||
isNewMetric,
|
isNewMetric,
|
||||||
isLabelModified,
|
isLabelModified,
|
||||||
|
reduxDatasource,
|
||||||
...popoverProps
|
...popoverProps
|
||||||
} = this.props;
|
} = this.props;
|
||||||
const { adhocMetric, savedMetric } = this.state;
|
const { adhocMetric, savedMetric } = this.state;
|
||||||
const keywords = sqlKeywords.concat(getColumnKeywords(columns));
|
const keywords = sqlKeywords.concat(getColumnKeywords(columns));
|
||||||
|
|
||||||
|
// For semantic layer datasets, filter saved metrics to show only valid ones
|
||||||
|
// Use the isDisabled state set by the main verification system instead of all metrics
|
||||||
|
let filteredSavedMetricsOptions = savedMetricsOptions;
|
||||||
|
if (this.isSemanticLayer() && reduxDatasource?.metrics) {
|
||||||
|
// Create a set of metric names that are NOT disabled in Redux state
|
||||||
|
const validMetricNames = new Set(
|
||||||
|
reduxDatasource.metrics
|
||||||
|
.filter(metric => !metric.isDisabled)
|
||||||
|
.map(metric => metric.metric_name),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Filter savedMetricsOptions to only include valid metrics
|
||||||
|
filteredSavedMetricsOptions = ensureIsArray(savedMetricsOptions).filter(
|
||||||
|
metric => validMetricNames.has(metric.metric_name),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const columnValue =
|
const columnValue =
|
||||||
(adhocMetric.column && adhocMetric.column.column_name) ||
|
(adhocMetric.column && adhocMetric.column.column_name) ||
|
||||||
adhocMetric.inferSqlExpressionColumn();
|
adhocMetric.inferSqlExpressionColumn();
|
||||||
@@ -336,7 +380,10 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
|
|
||||||
const savedSelectProps = {
|
const savedSelectProps = {
|
||||||
ariaLabel: t('Select saved metrics'),
|
ariaLabel: t('Select saved metrics'),
|
||||||
placeholder: t('%s saved metric(s)', savedMetricsOptions?.length ?? 0),
|
placeholder: t(
|
||||||
|
'%s saved metric(s)',
|
||||||
|
filteredSavedMetricsOptions?.length ?? 0,
|
||||||
|
),
|
||||||
value: savedMetric?.metric_name,
|
value: savedMetric?.metric_name,
|
||||||
onChange: this.onSavedMetricChange,
|
onChange: this.onSavedMetricChange,
|
||||||
allowClear: true,
|
allowClear: true,
|
||||||
@@ -381,10 +428,10 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
key: SAVED_TAB_KEY,
|
key: SAVED_TAB_KEY,
|
||||||
label: t('Saved'),
|
label: t('Saved'),
|
||||||
children:
|
children:
|
||||||
ensureIsArray(savedMetricsOptions).length > 0 ? (
|
ensureIsArray(filteredSavedMetricsOptions).length > 0 ? (
|
||||||
<FormItem label={t('Saved metric')}>
|
<FormItem label={t('Saved metric')}>
|
||||||
<StyledSelect
|
<StyledSelect
|
||||||
options={ensureIsArray(savedMetricsOptions).map(
|
options={ensureIsArray(filteredSavedMetricsOptions).map(
|
||||||
savedMetric => ({
|
savedMetric => ({
|
||||||
value: savedMetric.metric_name,
|
value: savedMetric.metric_name,
|
||||||
label: this.renderMetricOption(savedMetric),
|
label: this.renderMetricOption(savedMetric),
|
||||||
@@ -428,18 +475,25 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: EXPRESSION_TYPES.SIMPLE,
|
key: EXPRESSION_TYPES.SIMPLE,
|
||||||
label: extra.disallow_adhoc_metrics ? (
|
label:
|
||||||
|
extra.disallow_adhoc_metrics || this.isSemanticLayer() ? (
|
||||||
<Tooltip
|
<Tooltip
|
||||||
title={t(
|
title={
|
||||||
|
this.isSemanticLayer()
|
||||||
|
? t(
|
||||||
|
'Simple ad-hoc metrics are not supported for semantic layer datasets',
|
||||||
|
)
|
||||||
|
: t(
|
||||||
'Simple ad-hoc metrics are not enabled for this dataset',
|
'Simple ad-hoc metrics are not enabled for this dataset',
|
||||||
)}
|
)
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{t('Simple')}
|
{t('Simple')}
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
) : (
|
) : (
|
||||||
t('Simple')
|
t('Simple')
|
||||||
),
|
),
|
||||||
disabled: extra.disallow_adhoc_metrics,
|
disabled: extra.disallow_adhoc_metrics || this.isSemanticLayer(),
|
||||||
children: (
|
children: (
|
||||||
<>
|
<>
|
||||||
<FormItem label={t('column')}>
|
<FormItem label={t('column')}>
|
||||||
@@ -467,18 +521,25 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: EXPRESSION_TYPES.SQL,
|
key: EXPRESSION_TYPES.SQL,
|
||||||
label: extra.disallow_adhoc_metrics ? (
|
label:
|
||||||
|
extra.disallow_adhoc_metrics || this.isSemanticLayer() ? (
|
||||||
<Tooltip
|
<Tooltip
|
||||||
title={t(
|
title={
|
||||||
|
this.isSemanticLayer()
|
||||||
|
? t(
|
||||||
|
'Custom SQL ad-hoc metrics are not supported for semantic layer datasets',
|
||||||
|
)
|
||||||
|
: t(
|
||||||
'Custom SQL ad-hoc metrics are not enabled for this dataset',
|
'Custom SQL ad-hoc metrics are not enabled for this dataset',
|
||||||
)}
|
)
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{t('Custom SQL')}
|
{t('Custom SQL')}
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
) : (
|
) : (
|
||||||
t('Custom SQL')
|
t('Custom SQL')
|
||||||
),
|
),
|
||||||
disabled: extra.disallow_adhoc_metrics,
|
disabled: extra.disallow_adhoc_metrics || this.isSemanticLayer(),
|
||||||
children: (
|
children: (
|
||||||
<SQLEditor
|
<SQLEditor
|
||||||
data-test="sql-editor"
|
data-test="sql-editor"
|
||||||
@@ -536,3 +597,10 @@ export default class AdhocMetricEditPopover extends PureComponent {
|
|||||||
}
|
}
|
||||||
AdhocMetricEditPopover.propTypes = propTypes;
|
AdhocMetricEditPopover.propTypes = propTypes;
|
||||||
AdhocMetricEditPopover.defaultProps = defaultProps;
|
AdhocMetricEditPopover.defaultProps = defaultProps;
|
||||||
|
|
||||||
|
// Map Redux state to props to get access to datasource with disabled states
|
||||||
|
const mapStateToProps = state => ({
|
||||||
|
reduxDatasource: state.explore?.datasource,
|
||||||
|
});
|
||||||
|
|
||||||
|
export default connect(mapStateToProps)(AdhocMetricEditPopover);
|
||||||
|
|||||||
@@ -0,0 +1,225 @@
|
|||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
dndAdhocMetricsControl,
|
||||||
|
dndAdhocMetricControl,
|
||||||
|
dndAdhocMetricControl2,
|
||||||
|
dndGroupByControl,
|
||||||
|
dndColumnsControl,
|
||||||
|
Dataset,
|
||||||
|
} from '@superset-ui/chart-controls';
|
||||||
|
import withAsyncVerification from './withAsyncVerification';
|
||||||
|
import {
|
||||||
|
createMetricsVerification,
|
||||||
|
createColumnsVerification,
|
||||||
|
createSemanticLayerOnChange,
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
} from './SemanticLayerVerification';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a datasource supports semantic layer verification
|
||||||
|
*/
|
||||||
|
function needsSemanticLayerVerification(datasource: Dataset): boolean {
|
||||||
|
if (!datasource || !('database' in datasource) || !datasource.database) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const database = datasource.database as any;
|
||||||
|
return Boolean(database.engine_information?.supports_dynamic_columns);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced metrics control with semantic layer verification
|
||||||
|
*/
|
||||||
|
export const semanticLayerDndAdhocMetricsControl = {
|
||||||
|
...dndAdhocMetricsControl,
|
||||||
|
type: withAsyncVerification({
|
||||||
|
baseControl: 'DndMetricSelect',
|
||||||
|
verify: createMetricsVerification(),
|
||||||
|
onChange: createSemanticLayerOnChange(
|
||||||
|
'metrics',
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
),
|
||||||
|
showLoadingState: true,
|
||||||
|
}),
|
||||||
|
mapStateToProps: (state: any, controlState: any) => {
|
||||||
|
// Call the original mapStateToProps if it exists
|
||||||
|
const originalProps = dndAdhocMetricsControl.mapStateToProps
|
||||||
|
? dndAdhocMetricsControl.mapStateToProps(state, controlState)
|
||||||
|
: {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...originalProps,
|
||||||
|
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
|
||||||
|
form_data: state.form_data,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced single metric control with semantic layer verification
|
||||||
|
*/
|
||||||
|
export const semanticLayerDndAdhocMetricControl = {
|
||||||
|
...dndAdhocMetricControl,
|
||||||
|
type: withAsyncVerification({
|
||||||
|
baseControl: 'DndMetricSelect',
|
||||||
|
verify: createMetricsVerification(),
|
||||||
|
onChange: createSemanticLayerOnChange(
|
||||||
|
'metric',
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
),
|
||||||
|
showLoadingState: true,
|
||||||
|
}),
|
||||||
|
mapStateToProps: (state: any, controlState: any) => {
|
||||||
|
// Call the original mapStateToProps if it exists
|
||||||
|
const originalProps = dndAdhocMetricControl.mapStateToProps
|
||||||
|
? dndAdhocMetricControl.mapStateToProps(state, controlState)
|
||||||
|
: {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...originalProps,
|
||||||
|
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
|
||||||
|
form_data: state.form_data,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced secondary metric control with semantic layer verification
|
||||||
|
*/
|
||||||
|
export const semanticLayerDndAdhocMetricControl2 = {
|
||||||
|
...dndAdhocMetricControl2,
|
||||||
|
type: withAsyncVerification({
|
||||||
|
baseControl: 'DndMetricSelect',
|
||||||
|
verify: createMetricsVerification(),
|
||||||
|
onChange: createSemanticLayerOnChange(
|
||||||
|
'metric_2',
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
),
|
||||||
|
showLoadingState: true,
|
||||||
|
}),
|
||||||
|
mapStateToProps: (state: any, controlState: any) => {
|
||||||
|
// Call the original mapStateToProps if it exists
|
||||||
|
const originalProps = dndAdhocMetricControl2.mapStateToProps
|
||||||
|
? dndAdhocMetricControl2.mapStateToProps(state, controlState)
|
||||||
|
: {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...originalProps,
|
||||||
|
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
|
||||||
|
form_data: state.form_data,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced group by control with semantic layer verification
|
||||||
|
*/
|
||||||
|
export const semanticLayerDndGroupByControl = {
|
||||||
|
...dndGroupByControl,
|
||||||
|
type: withAsyncVerification({
|
||||||
|
baseControl: 'DndColumnSelect',
|
||||||
|
verify: createColumnsVerification(),
|
||||||
|
onChange: createSemanticLayerOnChange(
|
||||||
|
'groupby',
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
),
|
||||||
|
showLoadingState: true,
|
||||||
|
}),
|
||||||
|
mapStateToProps: (state: any, controlState: any) => {
|
||||||
|
// Call the original mapStateToProps if it exists
|
||||||
|
const originalProps = dndGroupByControl.mapStateToProps
|
||||||
|
? dndGroupByControl.mapStateToProps(state, controlState)
|
||||||
|
: {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...originalProps,
|
||||||
|
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
|
||||||
|
form_data: state.form_data,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enhanced columns control with semantic layer verification
|
||||||
|
*/
|
||||||
|
export const semanticLayerDndColumnsControl = {
|
||||||
|
...dndColumnsControl,
|
||||||
|
type: withAsyncVerification({
|
||||||
|
baseControl: 'DndColumnSelect',
|
||||||
|
verify: createColumnsVerification(),
|
||||||
|
onChange: createSemanticLayerOnChange(
|
||||||
|
'columns',
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
),
|
||||||
|
showLoadingState: true,
|
||||||
|
}),
|
||||||
|
mapStateToProps: (state: any, controlState: any) => {
|
||||||
|
// Call the original mapStateToProps if it exists
|
||||||
|
const originalProps = dndColumnsControl.mapStateToProps
|
||||||
|
? dndColumnsControl.mapStateToProps(state, controlState)
|
||||||
|
: {};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...originalProps,
|
||||||
|
needAsyncVerification: needsSemanticLayerVerification(state.datasource),
|
||||||
|
form_data: state.form_data,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create override function for semantic layer controls
|
||||||
|
*/
|
||||||
|
function createSemanticLayerControlOverride(enhancedControl: any) {
|
||||||
|
return (originalConfig: any) =>
|
||||||
|
// For semantic layer datasources, use the enhanced control
|
||||||
|
// For regular datasources, use the original control
|
||||||
|
({
|
||||||
|
...originalConfig,
|
||||||
|
...enhancedControl,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Control overrides mapping
|
||||||
|
*/
|
||||||
|
export const semanticLayerControlOverrides = {
|
||||||
|
metrics: createSemanticLayerControlOverride(
|
||||||
|
semanticLayerDndAdhocMetricsControl,
|
||||||
|
),
|
||||||
|
metric: createSemanticLayerControlOverride(
|
||||||
|
semanticLayerDndAdhocMetricControl,
|
||||||
|
),
|
||||||
|
metric_2: createSemanticLayerControlOverride(
|
||||||
|
semanticLayerDndAdhocMetricControl2,
|
||||||
|
),
|
||||||
|
percent_metrics: createSemanticLayerControlOverride(
|
||||||
|
semanticLayerDndAdhocMetricsControl,
|
||||||
|
),
|
||||||
|
timeseries_limit_metric: createSemanticLayerControlOverride(
|
||||||
|
semanticLayerDndAdhocMetricControl,
|
||||||
|
),
|
||||||
|
groupby: createSemanticLayerControlOverride(semanticLayerDndGroupByControl),
|
||||||
|
columns: createSemanticLayerControlOverride(semanticLayerDndColumnsControl),
|
||||||
|
series_columns: createSemanticLayerControlOverride(
|
||||||
|
semanticLayerDndColumnsControl,
|
||||||
|
),
|
||||||
|
};
|
||||||
@@ -0,0 +1,548 @@
|
|||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { SupersetClient, JsonValue } from '@superset-ui/core';
|
||||||
|
import { Dataset } from '@superset-ui/chart-controls';
|
||||||
|
import { AsyncVerify, ControlPropsWithExtras } from './withAsyncVerification';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility to extract current form fields from form data
|
||||||
|
*/
|
||||||
|
export function collectQueryFields(formData: any): {
|
||||||
|
dimensions: string[];
|
||||||
|
metrics: string[];
|
||||||
|
} {
|
||||||
|
const dimensions: string[] = [];
|
||||||
|
const metrics: string[] = [];
|
||||||
|
|
||||||
|
// Extract dimensions from various field types
|
||||||
|
if (formData.groupby) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.groupby)
|
||||||
|
? formData.groupby
|
||||||
|
: [formData.groupby]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.columns) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.columns)
|
||||||
|
? formData.columns
|
||||||
|
: [formData.columns]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.all_columns) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.all_columns)
|
||||||
|
? formData.all_columns
|
||||||
|
: [formData.all_columns]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.series_columns) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.series_columns)
|
||||||
|
? formData.series_columns
|
||||||
|
: [formData.series_columns]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.series) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.series) ? formData.series : [formData.series]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.entity) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.entity) ? formData.entity : [formData.entity]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.x_axis) {
|
||||||
|
dimensions.push(
|
||||||
|
...(Array.isArray(formData.x_axis) ? formData.x_axis : [formData.x_axis]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract metrics from various field types
|
||||||
|
if (formData.metrics) {
|
||||||
|
metrics.push(
|
||||||
|
...(Array.isArray(formData.metrics)
|
||||||
|
? formData.metrics
|
||||||
|
: [formData.metrics]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.metric) {
|
||||||
|
metrics.push(formData.metric);
|
||||||
|
}
|
||||||
|
if (formData.metric_2) {
|
||||||
|
metrics.push(formData.metric_2);
|
||||||
|
}
|
||||||
|
if (formData.percent_metrics) {
|
||||||
|
metrics.push(
|
||||||
|
...(Array.isArray(formData.percent_metrics)
|
||||||
|
? formData.percent_metrics
|
||||||
|
: [formData.percent_metrics]),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (formData.timeseries_limit_metric) {
|
||||||
|
metrics.push(formData.timeseries_limit_metric);
|
||||||
|
}
|
||||||
|
if (formData.x) {
|
||||||
|
metrics.push(formData.x);
|
||||||
|
}
|
||||||
|
if (formData.y) {
|
||||||
|
metrics.push(formData.y);
|
||||||
|
}
|
||||||
|
if (formData.size) {
|
||||||
|
metrics.push(formData.size);
|
||||||
|
}
|
||||||
|
if (formData.secondary_metric) {
|
||||||
|
metrics.push(formData.secondary_metric);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out null/undefined values and convert objects to strings if needed
|
||||||
|
const cleanDimensions = dimensions
|
||||||
|
.filter(dim => dim != null)
|
||||||
|
.map(dim =>
|
||||||
|
typeof dim === 'string' ? dim : (dim as any)?.column_name || String(dim),
|
||||||
|
);
|
||||||
|
|
||||||
|
const cleanMetrics = metrics
|
||||||
|
.filter(metric => metric != null)
|
||||||
|
.map(metric =>
|
||||||
|
typeof metric === 'string'
|
||||||
|
? metric
|
||||||
|
: (metric as any)?.metric_name || String(metric),
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
dimensions: [...new Set(cleanDimensions)], // Remove duplicates
|
||||||
|
metrics: [...new Set(cleanMetrics)], // Remove duplicates
|
||||||
|
};
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a datasource supports semantic layer verification
|
||||||
|
*/
|
||||||
|
function supportsSemanticLayerVerification(datasource: Dataset): boolean {
|
||||||
|
if (!datasource || !('database' in datasource) || !datasource.database) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const database = datasource.database as any;
|
||||||
|
return Boolean(database.engine_information?.supports_dynamic_columns);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache for API calls to prevent duplicates
|
||||||
|
const apiCallCache = new Map<
|
||||||
|
string,
|
||||||
|
Promise<{ dimensions: string[]; metrics: string[] } | null>
|
||||||
|
>();
|
||||||
|
|
||||||
|
// Request debouncing - keyed by datasource + control combination
|
||||||
|
const pendingRequests = new Map<string, Promise<any>>();
|
||||||
|
const lastRequestTime = new Map<string, number>();
|
||||||
|
|
||||||
|
// Track which controls have had their initial verification
|
||||||
|
const initialVerificationDone = new Set<string>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create verification result from API response
|
||||||
|
*/
|
||||||
|
function createVerificationResult(
|
||||||
|
validationResult: { dimensions: string[]; metrics: string[] },
|
||||||
|
savedMetrics: any[],
|
||||||
|
props: ControlPropsWithExtras,
|
||||||
|
controlName?: string,
|
||||||
|
) {
|
||||||
|
const { datasource, actions } = props;
|
||||||
|
|
||||||
|
// Filter saved metrics to only include valid ones
|
||||||
|
const validMetricNames = new Set(validationResult.metrics);
|
||||||
|
const filteredSavedMetrics = savedMetrics.filter((metric: any) =>
|
||||||
|
validMetricNames.has(metric.metric_name || metric),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mark datasource metrics and columns as disabled if invalid (for left panel)
|
||||||
|
const dataset = datasource as Dataset;
|
||||||
|
let updatedDatasourceMetrics = dataset.metrics;
|
||||||
|
let updatedDatasourceColumns = dataset.columns;
|
||||||
|
|
||||||
|
// Filter valid names to only include those that exist in the original datasource
|
||||||
|
const originalDimensionNames = new Set(
|
||||||
|
dataset.columns?.map((col: any) => col.column_name) || [],
|
||||||
|
);
|
||||||
|
const originalMetricNames = new Set(
|
||||||
|
dataset.metrics?.map((metric: any) => metric.metric_name) || [],
|
||||||
|
);
|
||||||
|
|
||||||
|
const filteredValidMetricNames = new Set(
|
||||||
|
validationResult.metrics.filter(metric => originalMetricNames.has(metric)),
|
||||||
|
);
|
||||||
|
const filteredValidDimensionNames = new Set(
|
||||||
|
validationResult.dimensions.filter(dim => originalDimensionNames.has(dim)),
|
||||||
|
);
|
||||||
|
|
||||||
|
if (dataset.metrics) {
|
||||||
|
updatedDatasourceMetrics = dataset.metrics.map((metric: any) => ({
|
||||||
|
...metric,
|
||||||
|
isDisabled: !filteredValidMetricNames.has(metric.metric_name || metric),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also update columns using the same validation result
|
||||||
|
if (dataset.columns) {
|
||||||
|
updatedDatasourceColumns = dataset.columns.map((column: any) => ({
|
||||||
|
...column,
|
||||||
|
isDisabled: !filteredValidDimensionNames.has(
|
||||||
|
column.column_name || column,
|
||||||
|
),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create updated datasource for left panel
|
||||||
|
const updatedDatasource = {
|
||||||
|
...dataset,
|
||||||
|
metrics: updatedDatasourceMetrics,
|
||||||
|
columns: updatedDatasourceColumns,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Update the Redux store's datasource to affect the left panel
|
||||||
|
if (actions && typeof actions.syncDatasourceMetadata === 'function') {
|
||||||
|
actions.syncDatasourceMetadata(updatedDatasource);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
savedMetrics: filteredSavedMetrics,
|
||||||
|
datasource: updatedDatasource,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call the validation API
|
||||||
|
*/
|
||||||
|
export async function callValidationAPI(
|
||||||
|
datasource: Dataset,
|
||||||
|
selectedDimensions: string[],
|
||||||
|
selectedMetrics: string[],
|
||||||
|
controlName?: string,
|
||||||
|
): Promise<{ dimensions: string[]; metrics: string[] } | null> {
|
||||||
|
const databaseId = (datasource.database as any)?.id;
|
||||||
|
if (!datasource?.id || !databaseId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create cache key based on the request parameters
|
||||||
|
const cacheKey = JSON.stringify({
|
||||||
|
datasource_id: datasource.id,
|
||||||
|
dimensions: selectedDimensions.sort(),
|
||||||
|
metrics: selectedMetrics.sort(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a key for this specific control to prevent duplicate requests
|
||||||
|
const controlKey = `${datasource.id}_${controlName || 'unknown'}`;
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Check if we already have a pending request for the same parameters
|
||||||
|
if (apiCallCache.has(cacheKey)) {
|
||||||
|
console.log(`[API] Reusing cached request for control: ${controlName}`);
|
||||||
|
return apiCallCache.get(cacheKey)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we have a pending request for this specific control
|
||||||
|
if (pendingRequests.has(controlKey)) {
|
||||||
|
console.log(`[API] Request already pending for control: ${controlName}, waiting...`);
|
||||||
|
return pendingRequests.get(controlKey)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enhanced deduplication: check if we have an identical request in flight
|
||||||
|
const requestSignature = `${datasource.id}_${selectedDimensions.join(',')}_${selectedMetrics.join(',')}`;
|
||||||
|
|
||||||
|
// If we have an identical request already cached, return it
|
||||||
|
if (apiCallCache.has(requestSignature)) {
|
||||||
|
console.log(`[API] Identical request found for control: ${controlName}, reusing...`);
|
||||||
|
return apiCallCache.get(requestSignature)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Time-based deduplication: if we just made a request for this control, wait a bit
|
||||||
|
const lastTime = lastRequestTime.get(controlKey) || 0;
|
||||||
|
if (now - lastTime < 50) { // 50ms debounce
|
||||||
|
console.log(`[API] Request too soon for control: ${controlName}, debouncing...`);
|
||||||
|
return new Promise(resolve => {
|
||||||
|
setTimeout(async () => {
|
||||||
|
// Try again after debounce
|
||||||
|
const result = await callValidationAPI(datasource, selectedDimensions, selectedMetrics, controlName);
|
||||||
|
resolve(result);
|
||||||
|
}, 50);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
lastRequestTime.set(controlKey, now);
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log(`[API] Making request for control: ${controlName}`, {
|
||||||
|
datasource_id: datasource.id,
|
||||||
|
dimensions: selectedDimensions,
|
||||||
|
metrics: selectedMetrics,
|
||||||
|
});
|
||||||
|
|
||||||
|
const apiPromise = SupersetClient.post({
|
||||||
|
endpoint: `/api/v1/database/${databaseId}/valid_metrics_and_dimensions/`,
|
||||||
|
jsonPayload: {
|
||||||
|
datasource_id: datasource.id,
|
||||||
|
dimensions: selectedDimensions,
|
||||||
|
metrics: selectedMetrics,
|
||||||
|
},
|
||||||
|
}).then(
|
||||||
|
response => response.json as { dimensions: string[]; metrics: string[] },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Cache the promise for the exact same parameters
|
||||||
|
apiCallCache.set(cacheKey, apiPromise);
|
||||||
|
|
||||||
|
// Cache by request signature for identical requests
|
||||||
|
apiCallCache.set(requestSignature, apiPromise);
|
||||||
|
|
||||||
|
// Also track this request for this specific control
|
||||||
|
pendingRequests.set(controlKey, apiPromise);
|
||||||
|
|
||||||
|
// Clean up on completion
|
||||||
|
const result = await apiPromise;
|
||||||
|
apiCallCache.delete(cacheKey);
|
||||||
|
apiCallCache.delete(requestSignature);
|
||||||
|
pendingRequests.delete(controlKey);
|
||||||
|
console.log(`[API] Request completed for control: ${controlName}`, result);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
// Clean up on error
|
||||||
|
apiCallCache.delete(cacheKey);
|
||||||
|
apiCallCache.delete(requestSignature);
|
||||||
|
pendingRequests.delete(controlKey);
|
||||||
|
|
||||||
|
console.warn('Failed to fetch valid metrics and dimensions:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create verification function for metrics controls
|
||||||
|
*/
|
||||||
|
export function createMetricsVerification(controlName?: string): AsyncVerify {
|
||||||
|
return async (props: ControlPropsWithExtras) => {
|
||||||
|
const { datasource, form_data, savedMetrics = [], value } = props;
|
||||||
|
|
||||||
|
// Only verify for semantic layer datasources
|
||||||
|
if (!supportsSemanticLayerVerification(datasource as Dataset)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[MetricsVerification] Triggered for control: ${controlName}`, {
|
||||||
|
datasource: datasource?.id,
|
||||||
|
form_data,
|
||||||
|
value,
|
||||||
|
savedMetrics: savedMetrics.length,
|
||||||
|
stackTrace: new Error().stack?.split('\n').slice(1, 4).join('\n'),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create form data with the current value for this control
|
||||||
|
const syntheticFormData = { ...form_data };
|
||||||
|
if (controlName) {
|
||||||
|
syntheticFormData[controlName] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract query fields using the complete form data approach
|
||||||
|
const queryFields = collectQueryFields(syntheticFormData);
|
||||||
|
|
||||||
|
console.log(`[MetricsVerification] Query fields:`, queryFields);
|
||||||
|
console.log(`[MetricsVerification] Form data:`, form_data);
|
||||||
|
console.log(`[MetricsVerification] Synthetic form data:`, syntheticFormData);
|
||||||
|
|
||||||
|
const validationResult = await callValidationAPI(
|
||||||
|
datasource as Dataset,
|
||||||
|
queryFields.dimensions,
|
||||||
|
queryFields.metrics,
|
||||||
|
controlName,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!validationResult) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return createVerificationResult(
|
||||||
|
validationResult,
|
||||||
|
savedMetrics,
|
||||||
|
props,
|
||||||
|
controlName,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create verification function for dimensions controls
|
||||||
|
*/
|
||||||
|
export function createColumnsVerification(controlName?: string): AsyncVerify {
|
||||||
|
return async (props: ControlPropsWithExtras) => {
|
||||||
|
const { datasource, form_data, options = [], actions, value } = props;
|
||||||
|
|
||||||
|
// Only verify for semantic layer datasources
|
||||||
|
if (!supportsSemanticLayerVerification(datasource as Dataset)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle initial verification for fresh charts
|
||||||
|
const triggerInitialVerification = (props as any).triggerInitialVerification;
|
||||||
|
const datasourceControlKey = `${datasource?.id}_${controlName}`;
|
||||||
|
|
||||||
|
if (triggerInitialVerification && !initialVerificationDone.has(datasourceControlKey)) {
|
||||||
|
console.log(`[ColumnsVerification] Triggering initial verification for control: ${controlName}`);
|
||||||
|
initialVerificationDone.add(datasourceControlKey);
|
||||||
|
|
||||||
|
// Trigger initial verification with empty form data
|
||||||
|
const initialResult = await callValidationAPI(
|
||||||
|
datasource as Dataset,
|
||||||
|
[],
|
||||||
|
[],
|
||||||
|
controlName,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (initialResult) {
|
||||||
|
// Mark all options as enabled/disabled based on initial result
|
||||||
|
const validDimensionNames = new Set(initialResult.dimensions);
|
||||||
|
const updatedOptions = options.map((option: any) => ({
|
||||||
|
...option,
|
||||||
|
isDisabled: !validDimensionNames.has(option.column_name || option),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Update left panel disabled states
|
||||||
|
const verificationResult = createVerificationResult(
|
||||||
|
initialResult,
|
||||||
|
[],
|
||||||
|
props,
|
||||||
|
controlName,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
options: updatedOptions,
|
||||||
|
datasource: verificationResult.datasource,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[ColumnsVerification] Triggered for control: ${controlName}`, {
|
||||||
|
datasource: datasource?.id,
|
||||||
|
form_data,
|
||||||
|
value,
|
||||||
|
options: options.length,
|
||||||
|
stackTrace: new Error().stack?.split('\n').slice(1, 6).join('\n'),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create form data with the current value
|
||||||
|
const syntheticFormData = { ...form_data };
|
||||||
|
if (controlName) {
|
||||||
|
syntheticFormData[controlName] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract query fields using the complete form data approach
|
||||||
|
const queryFields = collectQueryFields(syntheticFormData);
|
||||||
|
|
||||||
|
console.log(`[ColumnsVerification] Query fields:`, queryFields);
|
||||||
|
console.log(`[ColumnsVerification] Form data:`, form_data);
|
||||||
|
console.log(`[ColumnsVerification] Synthetic form data:`, syntheticFormData);
|
||||||
|
|
||||||
|
const validationResult = await callValidationAPI(
|
||||||
|
datasource as Dataset,
|
||||||
|
queryFields.dimensions,
|
||||||
|
queryFields.metrics,
|
||||||
|
controlName,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!validationResult) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark dimension options as disabled if invalid
|
||||||
|
const validDimensionNames = new Set(validationResult.dimensions);
|
||||||
|
const updatedOptions = options.map((option: any) => ({
|
||||||
|
...option,
|
||||||
|
isDisabled: !validDimensionNames.has(option.column_name || option),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Use createVerificationResult helper for consistent processing
|
||||||
|
const verificationResult = createVerificationResult(
|
||||||
|
validationResult,
|
||||||
|
[], // savedMetrics not used for columns verification
|
||||||
|
props,
|
||||||
|
controlName,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
options: updatedOptions,
|
||||||
|
datasource: verificationResult.datasource,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create onChange handler that triggers re-rendering of other controls when values change
|
||||||
|
*/
|
||||||
|
export function createSemanticLayerOnChange(
|
||||||
|
controlName: string,
|
||||||
|
affectedControls: string[],
|
||||||
|
) {
|
||||||
|
return (value: JsonValue, props: ControlPropsWithExtras) => {
|
||||||
|
const { actions, form_data } = props;
|
||||||
|
|
||||||
|
// Trigger re-rendering of affected controls by updating their values
|
||||||
|
// This forces the verification to run again
|
||||||
|
affectedControls.forEach(controlField => {
|
||||||
|
if (
|
||||||
|
controlField !== controlName &&
|
||||||
|
form_data &&
|
||||||
|
form_data[controlField]
|
||||||
|
) {
|
||||||
|
actions.setControlValue(controlField, form_data[controlField], []);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get list of control fields that should trigger re-rendering
|
||||||
|
*/
|
||||||
|
export const SEMANTIC_LAYER_CONTROL_FIELDS = [
|
||||||
|
// Metric controls
|
||||||
|
'metrics',
|
||||||
|
'metric',
|
||||||
|
'metric_2',
|
||||||
|
'percent_metrics',
|
||||||
|
'timeseries_limit_metric',
|
||||||
|
'x',
|
||||||
|
'y',
|
||||||
|
'size',
|
||||||
|
'secondary_metric',
|
||||||
|
|
||||||
|
// Dimension controls
|
||||||
|
'groupby',
|
||||||
|
'columns',
|
||||||
|
'all_columns',
|
||||||
|
'series_columns',
|
||||||
|
'series',
|
||||||
|
'entity',
|
||||||
|
'x_axis',
|
||||||
|
];
|
||||||
@@ -54,6 +54,10 @@ export type FullControlProps = ControlPropsWithExtras & {
|
|||||||
* An extra flag for triggering async verification. Set it in mapStateToProps.
|
* An extra flag for triggering async verification. Set it in mapStateToProps.
|
||||||
*/
|
*/
|
||||||
needAsyncVerification?: boolean;
|
needAsyncVerification?: boolean;
|
||||||
|
/**
|
||||||
|
* Whether to skip useEffect verification and only use onChange verification.
|
||||||
|
*/
|
||||||
|
skipEffectVerification?: boolean;
|
||||||
/**
|
/**
|
||||||
* Whether to show loading state when verification is still loading.
|
* Whether to show loading state when verification is still loading.
|
||||||
*/
|
*/
|
||||||
@@ -136,6 +140,7 @@ export default function withAsyncVerification({
|
|||||||
hovered,
|
hovered,
|
||||||
onChange: basicOnChange,
|
onChange: basicOnChange,
|
||||||
needAsyncVerification = false,
|
needAsyncVerification = false,
|
||||||
|
skipEffectVerification = false,
|
||||||
isLoading: initialIsLoading = false,
|
isLoading: initialIsLoading = false,
|
||||||
showLoadingState = defaultShowLoadingState,
|
showLoadingState = defaultShowLoadingState,
|
||||||
verify = defaultVerify,
|
verify = defaultVerify,
|
||||||
@@ -145,6 +150,7 @@ export default function withAsyncVerification({
|
|||||||
const [verifiedProps, setVerifiedProps] = useState({});
|
const [verifiedProps, setVerifiedProps] = useState({});
|
||||||
const [isLoading, setIsLoading] = useState<boolean>(initialIsLoading);
|
const [isLoading, setIsLoading] = useState<boolean>(initialIsLoading);
|
||||||
const { addWarningToast } = restProps.actions;
|
const { addWarningToast } = restProps.actions;
|
||||||
|
const verificationTriggeredByChange = useRef(false);
|
||||||
|
|
||||||
// memoize `restProps`, so that verification only triggers when material
|
// memoize `restProps`, so that verification only triggers when material
|
||||||
// props are actually updated.
|
// props are actually updated.
|
||||||
@@ -153,19 +159,6 @@ export default function withAsyncVerification({
|
|||||||
otherProps = otherPropsRef.current = restProps;
|
otherProps = otherPropsRef.current = restProps;
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleChange = useCallback(
|
|
||||||
(value: JsonValue) => {
|
|
||||||
// the default onChange handler, triggers the `setControlValue` action
|
|
||||||
if (basicOnChange) {
|
|
||||||
basicOnChange(value);
|
|
||||||
}
|
|
||||||
if (onChange) {
|
|
||||||
onChange(value, { ...otherProps, ...verifiedProps });
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[basicOnChange, otherProps, verifiedProps],
|
|
||||||
);
|
|
||||||
|
|
||||||
const verifyProps = useEffectEvent(
|
const verifyProps = useEffectEvent(
|
||||||
(verifyFunc: AsyncVerify, props: typeof otherProps) => {
|
(verifyFunc: AsyncVerify, props: typeof otherProps) => {
|
||||||
if (showLoadingState) {
|
if (showLoadingState) {
|
||||||
@@ -202,11 +195,49 @@ export default function withAsyncVerification({
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
const handleChange = useCallback(
|
||||||
|
(value: JsonValue) => {
|
||||||
|
// the default onChange handler, triggers the `setControlValue` action
|
||||||
|
if (basicOnChange) {
|
||||||
|
basicOnChange(value);
|
||||||
|
}
|
||||||
|
if (onChange) {
|
||||||
|
onChange(value, { ...otherProps, ...verifiedProps });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trigger verification with the new value if verification is enabled
|
||||||
if (needAsyncVerification && verify) {
|
if (needAsyncVerification && verify) {
|
||||||
|
verificationTriggeredByChange.current = true;
|
||||||
|
const propsWithNewValue = { ...otherProps, ...verifiedProps, value };
|
||||||
|
verifyProps(verify, propsWithNewValue);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[
|
||||||
|
basicOnChange,
|
||||||
|
otherProps,
|
||||||
|
verifiedProps,
|
||||||
|
needAsyncVerification,
|
||||||
|
verify,
|
||||||
|
verifyProps,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (needAsyncVerification && verify && !skipEffectVerification) {
|
||||||
|
// Skip verification if it was just triggered by onChange
|
||||||
|
if (verificationTriggeredByChange.current) {
|
||||||
|
verificationTriggeredByChange.current = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
verifyProps(verify, otherProps);
|
verifyProps(verify, otherProps);
|
||||||
}
|
}
|
||||||
}, [needAsyncVerification, verify, otherProps, verifyProps]);
|
}, [
|
||||||
|
needAsyncVerification,
|
||||||
|
verify,
|
||||||
|
otherProps,
|
||||||
|
verifyProps,
|
||||||
|
skipEffectVerification,
|
||||||
|
]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ControlComponent
|
<ControlComponent
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ import {
|
|||||||
ClientErrorObject,
|
ClientErrorObject,
|
||||||
} from '@superset-ui/core';
|
} from '@superset-ui/core';
|
||||||
import setupErrorMessages from 'src/setup/setupErrorMessages';
|
import setupErrorMessages from 'src/setup/setupErrorMessages';
|
||||||
|
import setupSemanticLayer from 'src/setup/setupSemanticLayer';
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||||
declare global {
|
declare global {
|
||||||
@@ -96,4 +97,7 @@ export default function setupApp() {
|
|||||||
|
|
||||||
// set up app wide custom error messages
|
// set up app wide custom error messages
|
||||||
setupErrorMessages();
|
setupErrorMessages();
|
||||||
|
|
||||||
|
// set up semantic layer controls
|
||||||
|
setupSemanticLayer();
|
||||||
}
|
}
|
||||||
|
|||||||
41
superset-frontend/src/setup/setupSemanticLayer.ts
Normal file
41
superset-frontend/src/setup/setupSemanticLayer.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { setSemanticLayerUtilities } from '@superset-ui/chart-controls';
|
||||||
|
import withAsyncVerification from 'src/explore/components/controls/withAsyncVerification';
|
||||||
|
import {
|
||||||
|
createMetricsVerification,
|
||||||
|
createColumnsVerification,
|
||||||
|
createSemanticLayerOnChange,
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
} from 'src/explore/components/controls/SemanticLayerVerification';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize semantic layer controls by setting up the utilities
|
||||||
|
* in the chart controls package.
|
||||||
|
*/
|
||||||
|
export default function setupSemanticLayer() {
|
||||||
|
setSemanticLayerUtilities({
|
||||||
|
withAsyncVerification,
|
||||||
|
createMetricsVerification,
|
||||||
|
createColumnsVerification,
|
||||||
|
createSemanticLayerOnChange,
|
||||||
|
SEMANTIC_LAYER_CONTROL_FIELDS,
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -174,6 +174,8 @@ MODEL_API_RW_METHOD_PERMISSION_MAP = {
|
|||||||
"put_filters": "write",
|
"put_filters": "write",
|
||||||
"put_colors": "write",
|
"put_colors": "write",
|
||||||
"sync_permissions": "write",
|
"sync_permissions": "write",
|
||||||
|
"valid_metrics_and_dimensions": "read",
|
||||||
|
"sync_metrics": "write",
|
||||||
}
|
}
|
||||||
|
|
||||||
EXTRA_FORM_DATA_APPEND_KEYS = {
|
EXTRA_FORM_DATA_APPEND_KEYS = {
|
||||||
|
|||||||
@@ -23,11 +23,13 @@ from superset.connectors.sqla.models import SqlaTable
|
|||||||
from superset.daos.base import BaseDAO
|
from superset.daos.base import BaseDAO
|
||||||
from superset.databases.filters import DatabaseFilter
|
from superset.databases.filters import DatabaseFilter
|
||||||
from superset.databases.ssh_tunnel.models import SSHTunnel
|
from superset.databases.ssh_tunnel.models import SSHTunnel
|
||||||
from superset.extensions import db
|
from superset.db_engine_specs.base import ValidColumnsType
|
||||||
|
from superset.extensions import cache_manager, db
|
||||||
from superset.models.core import Database, DatabaseUserOAuth2Tokens
|
from superset.models.core import Database, DatabaseUserOAuth2Tokens
|
||||||
from superset.models.dashboard import Dashboard
|
from superset.models.dashboard import Dashboard
|
||||||
from superset.models.slice import Slice
|
from superset.models.slice import Slice
|
||||||
from superset.models.sql_lab import TabState
|
from superset.models.sql_lab import TabState
|
||||||
|
from superset.sql.parse import Table
|
||||||
from superset.utils.core import DatasourceType
|
from superset.utils.core import DatasourceType
|
||||||
from superset.utils.ssh_tunnel import unmask_password_info
|
from superset.utils.ssh_tunnel import unmask_password_info
|
||||||
|
|
||||||
@@ -166,6 +168,65 @@ class DatabaseDAO(BaseDAO[Database]):
|
|||||||
|
|
||||||
return ssh_tunnel
|
return ssh_tunnel
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@cache_manager.data_cache.memoize(timeout=86400) # 1 day cache
|
||||||
|
def get_valid_metrics_and_dimensions(
|
||||||
|
cls,
|
||||||
|
database_id: int,
|
||||||
|
datasource_id: int,
|
||||||
|
dimensions: set[str],
|
||||||
|
metrics: set[str],
|
||||||
|
) -> ValidColumnsType:
|
||||||
|
"""
|
||||||
|
Get valid metrics and dimensions for a datasource using the database engine spec.
|
||||||
|
Results are cached for 1 day to improve performance.
|
||||||
|
|
||||||
|
:param database_id: The database ID
|
||||||
|
:param datasource_id: The datasource ID
|
||||||
|
:param dimensions: Set of selected column names
|
||||||
|
:param metrics: Set of selected metric names
|
||||||
|
:return: Dictionary with 'dimensions' and 'metrics' keys containing valid sets
|
||||||
|
:raises ValueError: If database or datasource not found, or invalid type
|
||||||
|
"""
|
||||||
|
database = cls.find_by_id(database_id)
|
||||||
|
if not database:
|
||||||
|
raise ValueError(f"Database with id {database_id} not found")
|
||||||
|
|
||||||
|
datasource = db.session.query(SqlaTable).get(datasource_id)
|
||||||
|
if not datasource:
|
||||||
|
raise ValueError(f"Table with id {datasource_id} not found")
|
||||||
|
|
||||||
|
return database.db_engine_spec.get_valid_metrics_and_dimensions(
|
||||||
|
database,
|
||||||
|
datasource,
|
||||||
|
dimensions,
|
||||||
|
metrics,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_metrics(
|
||||||
|
cls,
|
||||||
|
database_id: int,
|
||||||
|
table: Table,
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Get table metrics from the source system for semantic layer datasets.
|
||||||
|
|
||||||
|
:param database_id: The database ID
|
||||||
|
:param table: The table object with name, schema, and catalog
|
||||||
|
:return: List of metrics from the source system
|
||||||
|
:raises ValueError: If database not found or doesn't support dynamic metrics
|
||||||
|
"""
|
||||||
|
database = cls.find_by_id(database_id)
|
||||||
|
if not database:
|
||||||
|
raise ValueError(f"Database with id {database_id} not found")
|
||||||
|
|
||||||
|
# Check if database supports dynamic metrics (semantic layer)
|
||||||
|
if not database.db_engine_spec.supports_dynamic_columns:
|
||||||
|
raise ValueError("Database does not support dynamic metrics")
|
||||||
|
|
||||||
|
return database.get_metrics(table)
|
||||||
|
|
||||||
|
|
||||||
class SSHTunnelDAO(BaseDAO[SSHTunnel]):
|
class SSHTunnelDAO(BaseDAO[SSHTunnel]):
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -75,12 +75,11 @@ class DatasetDAO(BaseDAO[SqlaTable]):
|
|||||||
database: Database,
|
database: Database,
|
||||||
table: Table,
|
table: Table,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
try:
|
with database.get_inspector(
|
||||||
database.get_table(table)
|
catalog=table.catalog,
|
||||||
return True
|
schema=table.schema,
|
||||||
except SQLAlchemyError as ex: # pragma: no cover
|
) as inspector:
|
||||||
logger.warning("Got an error %s validating table: %s", str(ex), table)
|
return database.db_engine_spec.has_table(database, inspector, table)
|
||||||
return False
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def validate_uniqueness(
|
def validate_uniqueness(
|
||||||
|
|||||||
@@ -103,6 +103,7 @@ from superset.databases.schemas import (
|
|||||||
UploadPostSchema,
|
UploadPostSchema,
|
||||||
ValidateSQLRequest,
|
ValidateSQLRequest,
|
||||||
ValidateSQLResponse,
|
ValidateSQLResponse,
|
||||||
|
ValidMetricsAndDimensionsRequestSchema,
|
||||||
)
|
)
|
||||||
from superset.databases.utils import get_table_metadata
|
from superset.databases.utils import get_table_metadata
|
||||||
from superset.db_engine_specs import get_available_engine_specs
|
from superset.db_engine_specs import get_available_engine_specs
|
||||||
@@ -164,6 +165,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
|
|||||||
"available",
|
"available",
|
||||||
"validate_parameters",
|
"validate_parameters",
|
||||||
"validate_sql",
|
"validate_sql",
|
||||||
|
"valid_metrics_and_dimensions",
|
||||||
"delete_ssh_tunnel",
|
"delete_ssh_tunnel",
|
||||||
"schemas_access_for_file_upload",
|
"schemas_access_for_file_upload",
|
||||||
"get_connection",
|
"get_connection",
|
||||||
@@ -2098,3 +2100,70 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
|
|||||||
database, database.get_default_catalog(), schemas_allowed, True
|
database, database.get_default_catalog(), schemas_allowed, True
|
||||||
)
|
)
|
||||||
return self.response(200, schemas=schemas_allowed_processed)
|
return self.response(200, schemas=schemas_allowed_processed)
|
||||||
|
|
||||||
|
@expose("/<int:pk>/valid_metrics_and_dimensions/", methods=("POST",))
|
||||||
|
@protect()
|
||||||
|
@statsd_metrics
|
||||||
|
@event_logger.log_this_with_context(
|
||||||
|
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
|
||||||
|
f".valid_metrics_and_dimensions",
|
||||||
|
log_to_statsd=False,
|
||||||
|
)
|
||||||
|
@requires_json
|
||||||
|
def valid_metrics_and_dimensions(self, pk: int) -> FlaskResponse:
|
||||||
|
"""Get valid metrics and dimensions for a datasource.
|
||||||
|
---
|
||||||
|
post:
|
||||||
|
summary: Get valid metrics and dimensions for a datasource
|
||||||
|
parameters:
|
||||||
|
- in: path
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
name: pk
|
||||||
|
description: The database ID
|
||||||
|
requestBody:
|
||||||
|
description: Valid metrics and dimensions request
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ValidMetricsAndDimensionsRequestSchema"
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Valid metrics and dimensions
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/ValidMetricsAndDimensionsResponseSchema"
|
||||||
|
400:
|
||||||
|
$ref: '#/components/responses/400'
|
||||||
|
401:
|
||||||
|
$ref: '#/components/responses/401'
|
||||||
|
404:
|
||||||
|
$ref: '#/components/responses/404'
|
||||||
|
500:
|
||||||
|
$ref: '#/components/responses/500'
|
||||||
|
"""
|
||||||
|
request_schema = ValidMetricsAndDimensionsRequestSchema()
|
||||||
|
try:
|
||||||
|
item = request_schema.load(request.json)
|
||||||
|
except ValidationError as error:
|
||||||
|
return self.response_400(message=error.messages)
|
||||||
|
|
||||||
|
datasource_id = item["datasource_id"]
|
||||||
|
dimensions = set(item["dimensions"])
|
||||||
|
metrics = set(item["metrics"])
|
||||||
|
|
||||||
|
result = DatabaseDAO.get_valid_metrics_and_dimensions(
|
||||||
|
pk,
|
||||||
|
datasource_id,
|
||||||
|
dimensions,
|
||||||
|
metrics,
|
||||||
|
)
|
||||||
|
|
||||||
|
response_data = {
|
||||||
|
"dimensions": list(result["dimensions"]),
|
||||||
|
"metrics": list(result["metrics"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.response(200, **response_data)
|
||||||
|
|||||||
@@ -707,7 +707,9 @@ class TableMetadataResponseSchema(Schema):
|
|||||||
TableMetadataPrimaryKeyResponseSchema,
|
TableMetadataPrimaryKeyResponseSchema,
|
||||||
metadata={"description": "Primary keys metadata"},
|
metadata={"description": "Primary keys metadata"},
|
||||||
)
|
)
|
||||||
selectStar = fields.String(metadata={"description": "SQL select star"}) # noqa: N815
|
selectStar = fields.String(
|
||||||
|
metadata={"description": "SQL select star"}
|
||||||
|
) # noqa: N815
|
||||||
|
|
||||||
|
|
||||||
class TableExtraMetadataResponseSchema(Schema):
|
class TableExtraMetadataResponseSchema(Schema):
|
||||||
@@ -1341,3 +1343,31 @@ class QualifiedTableSchema(Schema):
|
|||||||
load_default=None,
|
load_default=None,
|
||||||
metadata={"description": "The table catalog"},
|
metadata={"description": "The table catalog"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidMetricsAndDimensionsRequestSchema(Schema):
|
||||||
|
datasource_id = fields.Integer(
|
||||||
|
required=True,
|
||||||
|
metadata={"description": "The datasource ID"},
|
||||||
|
)
|
||||||
|
dimensions = fields.List(
|
||||||
|
fields.String(),
|
||||||
|
required=True,
|
||||||
|
metadata={"description": "List of selected dimension names"},
|
||||||
|
)
|
||||||
|
metrics = fields.List(
|
||||||
|
fields.String(),
|
||||||
|
required=True,
|
||||||
|
metadata={"description": "List of selected metric names"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ValidMetricsAndDimensionsResponseSchema(Schema):
|
||||||
|
dimensions = fields.List(
|
||||||
|
fields.String(),
|
||||||
|
metadata={"description": "List of valid dimension names"},
|
||||||
|
)
|
||||||
|
metrics = fields.List(
|
||||||
|
fields.String(),
|
||||||
|
metadata={"description": "List of valid metric names"},
|
||||||
|
)
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface
|
|||||||
from flask_babel import ngettext
|
from flask_babel import ngettext
|
||||||
from jinja2.exceptions import TemplateSyntaxError
|
from jinja2.exceptions import TemplateSyntaxError
|
||||||
from marshmallow import ValidationError
|
from marshmallow import ValidationError
|
||||||
|
from sqlalchemy.exc import NoResultFound, NoSuchTableError
|
||||||
|
|
||||||
from superset import event_logger, is_feature_enabled
|
from superset import event_logger, is_feature_enabled
|
||||||
from superset.commands.dataset.create import CreateDatasetCommand
|
from superset.commands.dataset.create import CreateDatasetCommand
|
||||||
@@ -58,6 +59,7 @@ from superset.commands.importers.exceptions import NoValidFilesFoundError
|
|||||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||||
from superset.connectors.sqla.models import SqlaTable
|
from superset.connectors.sqla.models import SqlaTable
|
||||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
|
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
|
||||||
|
from superset.daos.database import DatabaseDAO
|
||||||
from superset.daos.dataset import DatasetDAO
|
from superset.daos.dataset import DatasetDAO
|
||||||
from superset.databases.filters import DatabaseFilter
|
from superset.databases.filters import DatabaseFilter
|
||||||
from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter
|
from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter
|
||||||
@@ -75,6 +77,7 @@ from superset.datasets.schemas import (
|
|||||||
)
|
)
|
||||||
from superset.exceptions import SupersetTemplateException
|
from superset.exceptions import SupersetTemplateException
|
||||||
from superset.jinja_context import BaseTemplateProcessor, get_template_processor
|
from superset.jinja_context import BaseTemplateProcessor, get_template_processor
|
||||||
|
from superset.sql.parse import Table
|
||||||
from superset.utils import json
|
from superset.utils import json
|
||||||
from superset.utils.core import parse_boolean_string
|
from superset.utils.core import parse_boolean_string
|
||||||
from superset.views.base import DatasourceFilter
|
from superset.views.base import DatasourceFilter
|
||||||
@@ -110,6 +113,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
|
|||||||
"duplicate",
|
"duplicate",
|
||||||
"get_or_create_dataset",
|
"get_or_create_dataset",
|
||||||
"warm_up_cache",
|
"warm_up_cache",
|
||||||
|
"sync_metrics",
|
||||||
}
|
}
|
||||||
list_columns = [
|
list_columns = [
|
||||||
"id",
|
"id",
|
||||||
@@ -215,6 +219,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
|
|||||||
"columns.type_generic",
|
"columns.type_generic",
|
||||||
"database.backend",
|
"database.backend",
|
||||||
"database.allow_multi_catalog",
|
"database.allow_multi_catalog",
|
||||||
|
"database.engine_information",
|
||||||
"columns.advanced_data_type",
|
"columns.advanced_data_type",
|
||||||
"is_managed_externally",
|
"is_managed_externally",
|
||||||
"uid",
|
"uid",
|
||||||
@@ -1174,6 +1179,71 @@ class DatasetRestApi(BaseSupersetModelRestApi):
|
|||||||
return self.response_400(message=str(ex))
|
return self.response_400(message=str(ex))
|
||||||
return self.response(200, **response)
|
return self.response(200, **response)
|
||||||
|
|
||||||
|
@expose("/<pk>/sync_metrics", methods=("PUT",))
|
||||||
|
@protect()
|
||||||
|
@safe
|
||||||
|
@statsd_metrics
|
||||||
|
@event_logger.log_this_with_context(
|
||||||
|
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.sync_metrics",
|
||||||
|
log_to_statsd=False,
|
||||||
|
)
|
||||||
|
def sync_metrics(self, pk: int) -> Response:
|
||||||
|
"""Sync table metrics from the source system for semantic layer datasets.
|
||||||
|
---
|
||||||
|
post:
|
||||||
|
summary: Sync table metrics from the source system for semantic layer datasets
|
||||||
|
parameters:
|
||||||
|
- in: path
|
||||||
|
name: pk
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
description: The dataset ID
|
||||||
|
responses:
|
||||||
|
200:
|
||||||
|
description: Metrics from the source system
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
metric_name:
|
||||||
|
type: string
|
||||||
|
expression:
|
||||||
|
type: string
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
400:
|
||||||
|
$ref: '#/components/responses/400'
|
||||||
|
401:
|
||||||
|
$ref: '#/components/responses/401'
|
||||||
|
403:
|
||||||
|
$ref: '#/components/responses/403'
|
||||||
|
404:
|
||||||
|
$ref: '#/components/responses/404'
|
||||||
|
500:
|
||||||
|
$ref: '#/components/responses/500'
|
||||||
|
"""
|
||||||
|
# Get the dataset
|
||||||
|
try:
|
||||||
|
dataset = DatasetDAO.find_by_id(pk)
|
||||||
|
if not dataset:
|
||||||
|
return self.response_404()
|
||||||
|
except DatasetNotFoundError:
|
||||||
|
return self.response_404()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create table object from dataset
|
||||||
|
table = Table(dataset.table_name, dataset.schema, catalog=dataset.catalog)
|
||||||
|
# Get metrics from the source system using DatabaseDAO
|
||||||
|
metrics = DatabaseDAO.get_metrics(dataset.database_id, table)
|
||||||
|
return self.response(200, result=metrics)
|
||||||
|
except (NoResultFound, NoSuchTableError):
|
||||||
|
return self.response_404()
|
||||||
|
except ValueError as ex:
|
||||||
|
return self.response_400(message=str(ex))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def render_dataset_fields(
|
def render_dataset_fields(
|
||||||
data: dict[str, Any], processor: BaseTemplateProcessor
|
data: dict[str, Any], processor: BaseTemplateProcessor
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ from typing import (
|
|||||||
cast,
|
cast,
|
||||||
ContextManager,
|
ContextManager,
|
||||||
NamedTuple,
|
NamedTuple,
|
||||||
|
Type,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
TypedDict,
|
TypedDict,
|
||||||
Union,
|
Union,
|
||||||
@@ -54,7 +55,7 @@ from sqlalchemy.engine.reflection import Inspector
|
|||||||
from sqlalchemy.engine.url import URL
|
from sqlalchemy.engine.url import URL
|
||||||
from sqlalchemy.ext.compiler import compiles
|
from sqlalchemy.ext.compiler import compiles
|
||||||
from sqlalchemy.sql import literal_column, quoted_name, text
|
from sqlalchemy.sql import literal_column, quoted_name, text
|
||||||
from sqlalchemy.sql.expression import ColumnClause, Select, TextClause
|
from sqlalchemy.sql.expression import BinaryExpression, ColumnClause, Select, TextClause
|
||||||
from sqlalchemy.types import TypeEngine
|
from sqlalchemy.types import TypeEngine
|
||||||
|
|
||||||
from superset import db
|
from superset import db
|
||||||
@@ -62,6 +63,10 @@ from superset.constants import QUERY_CANCEL_KEY, TimeGrain as TimeGrainConstants
|
|||||||
from superset.databases.utils import get_table_metadata, make_url_safe
|
from superset.databases.utils import get_table_metadata, make_url_safe
|
||||||
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
||||||
from superset.exceptions import OAuth2Error, OAuth2RedirectError
|
from superset.exceptions import OAuth2Error, OAuth2RedirectError
|
||||||
|
from superset.extensions.semantic_layer import (
|
||||||
|
get_sqla_type_from_dimension_type,
|
||||||
|
SemanticLayer,
|
||||||
|
)
|
||||||
from superset.sql.parse import (
|
from superset.sql.parse import (
|
||||||
BaseSQLStatement,
|
BaseSQLStatement,
|
||||||
LimitMethod,
|
LimitMethod,
|
||||||
@@ -85,7 +90,7 @@ from superset.utils.network import is_hostname_valid, is_port_open
|
|||||||
from superset.utils.oauth2 import encode_oauth2_state
|
from superset.utils.oauth2 import encode_oauth2_state
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from superset.connectors.sqla.models import TableColumn
|
from superset.connectors.sqla.models import SqlaTable, TableColumn
|
||||||
from superset.databases.schemas import TableMetadataResponse
|
from superset.databases.schemas import TableMetadataResponse
|
||||||
from superset.models.core import Database
|
from superset.models.core import Database
|
||||||
from superset.models.sql_lab import Query
|
from superset.models.sql_lab import Query
|
||||||
@@ -106,6 +111,15 @@ logger = logging.getLogger()
|
|||||||
GenericDBException = Exception
|
GenericDBException = Exception
|
||||||
|
|
||||||
|
|
||||||
|
class ValidColumnsType(TypedDict):
|
||||||
|
"""
|
||||||
|
Type for valid columns returned by `get_valid_metrics_and_dimensions`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
dimensions: set[str]
|
||||||
|
metrics: set[str]
|
||||||
|
|
||||||
|
|
||||||
def convert_inspector_columns(cols: list[SQLAColumnType]) -> list[ResultSetColumnType]:
|
def convert_inspector_columns(cols: list[SQLAColumnType]) -> list[ResultSetColumnType]:
|
||||||
result_set_columns: list[ResultSetColumnType] = []
|
result_set_columns: list[ResultSetColumnType] = []
|
||||||
for col in cols:
|
for col in cols:
|
||||||
@@ -143,7 +157,9 @@ builtin_time_grains: dict[str | None, str] = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class TimestampExpression(ColumnClause): # pylint: disable=abstract-method, too-many-ancestors
|
class TimestampExpression(
|
||||||
|
ColumnClause
|
||||||
|
): # pylint: disable=abstract-method, too-many-ancestors
|
||||||
def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
|
def __init__(self, expr: str, col: ColumnClause, **kwargs: Any) -> None:
|
||||||
"""Sqlalchemy class that can be used to render native column elements respecting
|
"""Sqlalchemy class that can be used to render native column elements respecting
|
||||||
engine-specific quoting rules as part of a string-based expression.
|
engine-specific quoting rules as part of a string-based expression.
|
||||||
@@ -214,6 +230,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
"engine+driver://user:password@host:port/dbname[?key=value&key=value...]"
|
"engine+driver://user:password@host:port/dbname[?key=value&key=value...]"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# databases can optionally specify a semantic layer
|
||||||
|
semantic_layer: Type[SemanticLayer] | None = None
|
||||||
|
|
||||||
disable_ssh_tunneling = False
|
disable_ssh_tunneling = False
|
||||||
|
|
||||||
_date_trunc_functions: dict[str, str] = {}
|
_date_trunc_functions: dict[str, str] = {}
|
||||||
@@ -377,6 +396,10 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
disallow_uri_query_params: dict[str, set[str]] = {}
|
disallow_uri_query_params: dict[str, set[str]] = {}
|
||||||
# A Dict of query parameters that will always be used on every connection
|
# A Dict of query parameters that will always be used on every connection
|
||||||
# by driver name
|
# by driver name
|
||||||
|
|
||||||
|
# Whether to use equality operators (= true/false) instead of IS operators
|
||||||
|
# for boolean filters. Some databases like Snowflake don't support IS true/false
|
||||||
|
use_equality_for_boolean_filters = False
|
||||||
enforce_uri_query_params: dict[str, dict[str, Any]] = {}
|
enforce_uri_query_params: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
force_column_alias_quotes = False
|
force_column_alias_quotes = False
|
||||||
@@ -384,9 +407,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
max_column_name_length: int | None = None
|
max_column_name_length: int | None = None
|
||||||
try_remove_schema_from_table_name = True # pylint: disable=invalid-name
|
try_remove_schema_from_table_name = True # pylint: disable=invalid-name
|
||||||
run_multiple_statements_as_one = False
|
run_multiple_statements_as_one = False
|
||||||
custom_errors: dict[
|
custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = (
|
||||||
Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]
|
{}
|
||||||
] = {}
|
)
|
||||||
|
|
||||||
# List of JSON path to fields in `encrypted_extra` that should be masked when the
|
# List of JSON path to fields in `encrypted_extra` that should be masked when the
|
||||||
# database is edited. By default everything is masked.
|
# database is edited. By default everything is masked.
|
||||||
@@ -436,6 +459,11 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
# the `cancel_query` value in the `extra` field of the `query` object
|
# the `cancel_query` value in the `extra` field of the `query` object
|
||||||
has_query_id_before_execute = True
|
has_query_id_before_execute = True
|
||||||
|
|
||||||
|
# This attribute is used for semantic layers, where only certain combinations of
|
||||||
|
# metrics and dimensions are valid for given datasource. For traditional databases
|
||||||
|
# this should be set to false.
|
||||||
|
supports_dynamic_columns = False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_rls_method(cls) -> RLSMethod:
|
def get_rls_method(cls) -> RLSMethod:
|
||||||
"""
|
"""
|
||||||
@@ -1202,6 +1230,78 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
"""
|
"""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handle_boolean_filter(
|
||||||
|
cls, sqla_col: Any, op: str, value: bool
|
||||||
|
) -> BinaryExpression:
|
||||||
|
"""
|
||||||
|
Handle boolean filter operations with engine-specific logic.
|
||||||
|
|
||||||
|
By default, uses SQLAlchemy's IS operator (column IS true/false).
|
||||||
|
Engines that don't support IS for boolean values can override
|
||||||
|
use_equality_for_boolean_filters to use equality operators instead.
|
||||||
|
|
||||||
|
:param sqla_col: SQLAlchemy column element
|
||||||
|
:param op: Filter operator (IS_TRUE or IS_FALSE)
|
||||||
|
:param value: Boolean value (True or False)
|
||||||
|
:return: SQLAlchemy expression for the boolean filter
|
||||||
|
"""
|
||||||
|
if cls.use_equality_for_boolean_filters:
|
||||||
|
return sqla_col == value
|
||||||
|
else:
|
||||||
|
return sqla_col.is_(value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handle_null_filter(
|
||||||
|
cls,
|
||||||
|
sqla_col: Any,
|
||||||
|
op: utils.FilterOperator,
|
||||||
|
) -> BinaryExpression:
|
||||||
|
"""
|
||||||
|
Handle null/not null filter operations.
|
||||||
|
|
||||||
|
:param sqla_col: SQLAlchemy column element
|
||||||
|
:param op: Filter operator (IS_NULL or IS_NOT_NULL)
|
||||||
|
:return: SQLAlchemy expression for the null filter
|
||||||
|
"""
|
||||||
|
from superset.utils import core as utils
|
||||||
|
|
||||||
|
if op == utils.FilterOperator.IS_NULL:
|
||||||
|
return sqla_col.is_(None)
|
||||||
|
elif op == utils.FilterOperator.IS_NOT_NULL:
|
||||||
|
return sqla_col.isnot(None)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid null filter operator: {op}")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def handle_comparison_filter(
|
||||||
|
cls, sqla_col: Any, op: utils.FilterOperator, value: Any
|
||||||
|
) -> BinaryExpression:
|
||||||
|
"""
|
||||||
|
Handle comparison filter operations (=, !=, >, <, >=, <=).
|
||||||
|
|
||||||
|
:param sqla_col: SQLAlchemy column element
|
||||||
|
:param op: Filter operator
|
||||||
|
:param value: Filter value
|
||||||
|
:return: SQLAlchemy expression for the comparison filter
|
||||||
|
"""
|
||||||
|
from superset.utils import core as utils
|
||||||
|
|
||||||
|
if op == utils.FilterOperator.EQUALS:
|
||||||
|
return sqla_col == value
|
||||||
|
elif op == utils.FilterOperator.NOT_EQUALS:
|
||||||
|
return sqla_col != value
|
||||||
|
elif op == utils.FilterOperator.GREATER_THAN:
|
||||||
|
return sqla_col > value
|
||||||
|
elif op == utils.FilterOperator.LESS_THAN:
|
||||||
|
return sqla_col < value
|
||||||
|
elif op == utils.FilterOperator.GREATER_THAN_OR_EQUALS:
|
||||||
|
return sqla_col >= value
|
||||||
|
elif op == utils.FilterOperator.LESS_THAN_OR_EQUALS:
|
||||||
|
return sqla_col <= value
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid comparison filter operator: {op}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def handle_cursor(cls, cursor: Any, query: Query) -> None:
|
def handle_cursor(cls, cursor: Any, query: Query) -> None:
|
||||||
"""Handle a live cursor between the execute and fetchall calls
|
"""Handle a live cursor between the execute and fetchall calls
|
||||||
@@ -1385,8 +1485,32 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
|
|
||||||
if schema and cls.try_remove_schema_from_table_name:
|
if schema and cls.try_remove_schema_from_table_name:
|
||||||
tables = {re.sub(f"^{schema}\\.", "", table) for table in tables}
|
tables = {re.sub(f"^{schema}\\.", "", table) for table in tables}
|
||||||
|
|
||||||
|
# add semantic views as tables too
|
||||||
|
if cls.semantic_layer:
|
||||||
|
semantic_layer = cls.semantic_layer(inspector.engine)
|
||||||
|
tables.update(
|
||||||
|
semantic_view.name
|
||||||
|
for semantic_view in semantic_layer.get_semantic_views()
|
||||||
|
)
|
||||||
|
|
||||||
return tables
|
return tables
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def has_table(
|
||||||
|
cls,
|
||||||
|
database: Database,
|
||||||
|
inspector: Inspector,
|
||||||
|
table: Table,
|
||||||
|
) -> bool:
|
||||||
|
if cls.semantic_layer:
|
||||||
|
semantic_layer = cls.semantic_layer(inspector.engine)
|
||||||
|
semantic_views = semantic_layer.get_semantic_views()
|
||||||
|
if table.table in {semantic_view.name for semantic_view in semantic_views}:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return inspector.has_table(table.table, table.schema)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_view_names( # pylint: disable=unused-argument
|
def get_view_names( # pylint: disable=unused-argument
|
||||||
cls,
|
cls,
|
||||||
@@ -1460,6 +1584,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
@classmethod
|
@classmethod
|
||||||
def get_columns( # pylint: disable=unused-argument
|
def get_columns( # pylint: disable=unused-argument
|
||||||
cls,
|
cls,
|
||||||
|
database: Database,
|
||||||
inspector: Inspector,
|
inspector: Inspector,
|
||||||
table: Table,
|
table: Table,
|
||||||
options: dict[str, Any] | None = None,
|
options: dict[str, Any] | None = None,
|
||||||
@@ -1467,7 +1592,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
"""
|
"""
|
||||||
Get all columns from a given schema and table.
|
Get all columns from a given schema and table.
|
||||||
|
|
||||||
The inspector will be bound to a catalog, if one was specified.
|
The inspector will be bound to a catalog, if one was specified. If the database
|
||||||
|
supports semantic layers the method will check if the table is a semantic view,
|
||||||
|
and return columns (metrics and dimensions) from it instead.
|
||||||
|
|
||||||
:param inspector: SqlAlchemy Inspector instance
|
:param inspector: SqlAlchemy Inspector instance
|
||||||
:param table: Table instance
|
:param table: Table instance
|
||||||
@@ -1475,6 +1602,26 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
some databases
|
some databases
|
||||||
:return: All columns in table
|
:return: All columns in table
|
||||||
"""
|
"""
|
||||||
|
if cls.semantic_layer:
|
||||||
|
semantic_layer = cls.semantic_layer(inspector.engine)
|
||||||
|
semantic_views = {
|
||||||
|
semantic_view.name: semantic_view
|
||||||
|
for semantic_view in semantic_layer.get_semantic_views()
|
||||||
|
}
|
||||||
|
if semantic_view := semantic_views.get(table.table):
|
||||||
|
dialect = database.get_dialect()
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"name": dimension.name,
|
||||||
|
"column_name": dimension.name,
|
||||||
|
"type": cls.column_datatype_to_string(
|
||||||
|
get_sqla_type_from_dimension_type(dimension.type),
|
||||||
|
dialect,
|
||||||
|
),
|
||||||
|
}
|
||||||
|
for dimension in semantic_layer.get_dimensions(semantic_view)
|
||||||
|
]
|
||||||
|
|
||||||
return convert_inspector_columns(
|
return convert_inspector_columns(
|
||||||
cast(
|
cast(
|
||||||
list[SQLAColumnType],
|
list[SQLAColumnType],
|
||||||
@@ -1492,6 +1639,22 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
"""
|
"""
|
||||||
Get all metrics from a given schema and table.
|
Get all metrics from a given schema and table.
|
||||||
"""
|
"""
|
||||||
|
if cls.semantic_layer:
|
||||||
|
semantic_layer = cls.semantic_layer(inspector.engine)
|
||||||
|
semantic_views = {
|
||||||
|
semantic_view.name: semantic_view
|
||||||
|
for semantic_view in semantic_layer.get_semantic_views()
|
||||||
|
}
|
||||||
|
if semantic_view := semantic_views.get(table.table):
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"metric_name": metric.name,
|
||||||
|
"verbose_name": metric.name,
|
||||||
|
"expression": metric.sql,
|
||||||
|
}
|
||||||
|
for metric in semantic_layer.get_metrics(semantic_view)
|
||||||
|
]
|
||||||
|
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"metric_name": "count",
|
"metric_name": "count",
|
||||||
@@ -1501,6 +1664,62 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_valid_metrics_and_dimensions(
|
||||||
|
cls,
|
||||||
|
database: Database,
|
||||||
|
table: SqlaTable,
|
||||||
|
dimensions: set[str],
|
||||||
|
metrics: set[str],
|
||||||
|
) -> ValidColumnsType:
|
||||||
|
"""
|
||||||
|
Get valid metrics and dimensions.
|
||||||
|
|
||||||
|
Given a datasource, and sets of selected metrics and dimensions, return the
|
||||||
|
sets of valid metrics and dimensions that can further be selected.
|
||||||
|
"""
|
||||||
|
if cls.semantic_layer:
|
||||||
|
with database.get_sqla_engine() as engine:
|
||||||
|
semantic_layer = cls.semantic_layer(engine)
|
||||||
|
semantic_views = {
|
||||||
|
semantic_view.name: semantic_view
|
||||||
|
for semantic_view in semantic_layer.get_semantic_views()
|
||||||
|
}
|
||||||
|
if semantic_view := semantic_views.get(table.table):
|
||||||
|
selected_metrics = {
|
||||||
|
metric
|
||||||
|
for metric in semantic_layer.get_metrics(semantic_view)
|
||||||
|
if metric.name in metrics
|
||||||
|
}
|
||||||
|
selected_dimensions = {
|
||||||
|
dimension
|
||||||
|
for dimension in semantic_layer.get_dimensions(semantic_view)
|
||||||
|
if dimension.name in dimensions
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
"metrics": {
|
||||||
|
metric.name
|
||||||
|
for metric in semantic_layer.get_valid_metrics(
|
||||||
|
semantic_view,
|
||||||
|
selected_metrics,
|
||||||
|
selected_dimensions,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
"dimensions": {
|
||||||
|
dimension.name
|
||||||
|
for dimension in semantic_layer.get_valid_dimensions(
|
||||||
|
semantic_view,
|
||||||
|
selected_metrics,
|
||||||
|
selected_dimensions,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"dimensions": {column.column_name for column in table.columns},
|
||||||
|
"metrics": {metric.metric_name for metric in table.metrics},
|
||||||
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def where_latest_partition( # pylint: disable=unused-argument
|
def where_latest_partition( # pylint: disable=unused-argument
|
||||||
cls,
|
cls,
|
||||||
@@ -1767,6 +1986,11 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
:param kwargs: kwargs to be passed to cursor.execute()
|
:param kwargs: kwargs to be passed to cursor.execute()
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
if cls.semantic_layer:
|
||||||
|
with cls.get_engine(database, schema="tpcds_sf10tcl") as engine:
|
||||||
|
semantic_layer = cls.semantic_layer(engine)
|
||||||
|
query = semantic_layer.get_query_from_standard_sql(query).sql
|
||||||
|
|
||||||
if cls.arraysize:
|
if cls.arraysize:
|
||||||
cursor.arraysize = cls.arraysize
|
cursor.arraysize = cls.arraysize
|
||||||
try:
|
try:
|
||||||
@@ -2148,6 +2372,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
|
|||||||
"supports_file_upload": cls.supports_file_upload,
|
"supports_file_upload": cls.supports_file_upload,
|
||||||
"disable_ssh_tunneling": cls.disable_ssh_tunneling,
|
"disable_ssh_tunneling": cls.disable_ssh_tunneling,
|
||||||
"supports_dynamic_catalog": cls.supports_dynamic_catalog,
|
"supports_dynamic_catalog": cls.supports_dynamic_catalog,
|
||||||
|
"supports_dynamic_columns": cls.supports_dynamic_columns,
|
||||||
"supports_oauth2": cls.supports_oauth2,
|
"supports_oauth2": cls.supports_oauth2,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
194
superset/db_engine_specs/metricflow.py
Normal file
194
superset/db_engine_specs/metricflow.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing,
|
||||||
|
# software distributed under the License is distributed on an
|
||||||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
# KIND, either express or implied. See the License for the
|
||||||
|
# specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
"""
|
||||||
|
An interface to dbt's semantic layer, Metric Flow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, TYPE_CHECKING, TypedDict
|
||||||
|
|
||||||
|
from shillelagh.backends.apsw.dialects.base import get_adapter_for_table_name
|
||||||
|
from shillelagh.backends.apsw.dialects.metricflow import TABLE_NAME
|
||||||
|
from sqlalchemy import event
|
||||||
|
from sqlalchemy.engine import Connection, Engine
|
||||||
|
|
||||||
|
from superset.connectors.sqla.models import SqlaTable
|
||||||
|
from superset.constants import TimeGrain
|
||||||
|
from superset.db_engine_specs.base import ValidColumnsType
|
||||||
|
from superset.db_engine_specs.shillelagh import ShillelaghEngineSpec
|
||||||
|
from superset.extensions import cache_manager
|
||||||
|
from superset.utils.cache import memoized_func
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from shillelagh.fields import Field
|
||||||
|
from sqlalchemy.engine.reflection import Inspector
|
||||||
|
|
||||||
|
from superset.models.core import Database
|
||||||
|
from superset.sql.parse import Table
|
||||||
|
from superset.superset_typing import ResultSetColumnType
|
||||||
|
|
||||||
|
|
||||||
|
@event.listens_for(Engine, "engine_connect")
|
||||||
|
def receive_engine_connect(conn: Connection, branch: bool) -> None:
|
||||||
|
"""
|
||||||
|
Called when a new DB connection is created.
|
||||||
|
|
||||||
|
This hook adds a cache to the `_build_column_from_dimension` method of the Metric
|
||||||
|
Flow adapter, since it's called frequently and can be expensive.
|
||||||
|
"""
|
||||||
|
engine = conn.engine
|
||||||
|
if not engine or not engine.name == "metricflow":
|
||||||
|
return
|
||||||
|
|
||||||
|
from shillelagh.adapters.api.dbt_metricflow import DbtMetricFlowAPI
|
||||||
|
|
||||||
|
if getattr(DbtMetricFlowAPI, "_patched", False):
|
||||||
|
return
|
||||||
|
|
||||||
|
original_method = DbtMetricFlowAPI._build_column_from_dimension
|
||||||
|
|
||||||
|
@memoized_func(
|
||||||
|
key="metricflow:dimension:{name}",
|
||||||
|
cache=cache_manager.data_cache,
|
||||||
|
)
|
||||||
|
def cached_build_column_from_dimension(
|
||||||
|
self: DbtMetricFlowAPI,
|
||||||
|
name: str,
|
||||||
|
*args: Any,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Field:
|
||||||
|
return original_method(self, name)
|
||||||
|
|
||||||
|
DbtMetricFlowAPI._build_column_from_dimension = cached_build_column_from_dimension
|
||||||
|
DbtMetricFlowAPI._patched = True
|
||||||
|
|
||||||
|
|
||||||
|
SELECT_STAR_MESSAGE = (
|
||||||
|
'The dbt semantic layer does not support data preview, since the "metrics" table '
|
||||||
|
"is a virtual table that is not materialized. An administrator should configure "
|
||||||
|
'the database in Apache Superset so that the "Disable SQL Lab data preview '
|
||||||
|
'queries" option under "Advanced" → "SQL Lab" is enabled.'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MetricType(TypedDict, total=False):
|
||||||
|
"""
|
||||||
|
Type for metrics returned by `get_metrics`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
metric_name: str
|
||||||
|
expression: str
|
||||||
|
verbose_name: str | None
|
||||||
|
metric_type: str | None
|
||||||
|
description: str | None
|
||||||
|
d3format: str | None
|
||||||
|
warning_text: str | None
|
||||||
|
extra: str | None
|
||||||
|
|
||||||
|
|
||||||
|
class DbtMetricFlowEngineSpec(ShillelaghEngineSpec):
|
||||||
|
"""
|
||||||
|
Engine for the the dbt semantic layer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
engine = "metricflow"
|
||||||
|
engine_name = "dbt Metric Flow"
|
||||||
|
sqlalchemy_uri_placeholder = (
|
||||||
|
"metricflow://[ab123.us1.dbt.com]/<environment_id>"
|
||||||
|
"?service_token=<service_token>"
|
||||||
|
)
|
||||||
|
|
||||||
|
supports_dynamic_columns = True
|
||||||
|
|
||||||
|
_time_grain_expressions = {
|
||||||
|
TimeGrain.DAY: "{col}__day",
|
||||||
|
TimeGrain.WEEK: "{col}__week",
|
||||||
|
TimeGrain.MONTH: "{col}__month",
|
||||||
|
TimeGrain.QUARTER: "{col}__quarter",
|
||||||
|
TimeGrain.YEAR: "{col}__year",
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def select_star(cls, *args: Any, **kwargs: Any) -> str:
|
||||||
|
"""
|
||||||
|
Return a ``SELECT *`` query.
|
||||||
|
"""
|
||||||
|
message = SELECT_STAR_MESSAGE.replace("'", "''")
|
||||||
|
return f"SELECT '{message}' AS warning"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_columns(
|
||||||
|
cls,
|
||||||
|
inspector: Inspector,
|
||||||
|
table: Table,
|
||||||
|
options: dict[str, Any] | None = None,
|
||||||
|
) -> list[ResultSetColumnType]:
|
||||||
|
columns: list[ResultSetColumnType] = []
|
||||||
|
|
||||||
|
for column in inspector.get_columns(table.table, table.schema):
|
||||||
|
# ignore metrics
|
||||||
|
if "computed" in column:
|
||||||
|
continue
|
||||||
|
|
||||||
|
column["column_name"] = column["name"]
|
||||||
|
columns.append(column)
|
||||||
|
|
||||||
|
return columns
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_metrics(
|
||||||
|
cls,
|
||||||
|
database: Database,
|
||||||
|
inspector: Inspector,
|
||||||
|
table: Table,
|
||||||
|
) -> list[MetricType]:
|
||||||
|
"""
|
||||||
|
Get all metrics.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"metric_name": column["name"],
|
||||||
|
"expression": column["computed"]["sqltext"],
|
||||||
|
"description": column["comment"],
|
||||||
|
}
|
||||||
|
for column in inspector.get_columns(table.table, table.schema)
|
||||||
|
if "computed" in column
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_valid_metrics_and_dimensions(
|
||||||
|
cls,
|
||||||
|
database: Database,
|
||||||
|
table: SqlaTable,
|
||||||
|
dimensions: set[str],
|
||||||
|
metrics: set[str],
|
||||||
|
) -> ValidColumnsType:
|
||||||
|
"""
|
||||||
|
Get valid metrics and dimensions.
|
||||||
|
|
||||||
|
Given a datasource, and sets of selected metrics and dimensions, return the
|
||||||
|
sets of valid metrics and dimensions that can further be selected.
|
||||||
|
"""
|
||||||
|
with database.get_sqla_engine() as engine:
|
||||||
|
connection = engine.connect()
|
||||||
|
adapter = get_adapter_for_table_name(connection, TABLE_NAME)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"metrics": adapter._get_metrics_for_dimensions(dimensions),
|
||||||
|
"dimensions": adapter._get_dimensions_for_metrics(metrics),
|
||||||
|
}
|
||||||
@@ -16,11 +16,13 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
from collections import defaultdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from re import Pattern
|
from re import Pattern
|
||||||
from typing import Any, Optional, TYPE_CHECKING, TypedDict
|
from typing import Any, Iterator, Optional, TYPE_CHECKING, TypedDict
|
||||||
from urllib import parse
|
from urllib import parse
|
||||||
|
|
||||||
from apispec import APISpec
|
from apispec import APISpec
|
||||||
@@ -30,20 +32,48 @@ from cryptography.hazmat.primitives import serialization
|
|||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask_babel import gettext as __
|
from flask_babel import gettext as __
|
||||||
from marshmallow import fields, Schema
|
from marshmallow import fields, Schema
|
||||||
from sqlalchemy import types
|
from sqlalchemy import text, types
|
||||||
|
from sqlalchemy.engine.interfaces import Dialect
|
||||||
from sqlalchemy.engine.reflection import Inspector
|
from sqlalchemy.engine.reflection import Inspector
|
||||||
from sqlalchemy.engine.url import URL
|
from sqlalchemy.engine.url import URL
|
||||||
|
from sqlglot import exp, parse_one
|
||||||
|
|
||||||
from superset.constants import TimeGrain
|
from superset.constants import TimeGrain
|
||||||
from superset.databases.utils import make_url_safe
|
from superset.databases.utils import make_url_safe
|
||||||
from superset.db_engine_specs.base import BaseEngineSpec, BasicPropertiesType
|
from superset.db_engine_specs.base import BaseEngineSpec, BasicPropertiesType
|
||||||
from superset.db_engine_specs.postgres import PostgresBaseEngineSpec
|
from superset.db_engine_specs.postgres import PostgresBaseEngineSpec
|
||||||
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
||||||
|
from superset.extensions.semantic_layer import (
|
||||||
|
BINARY,
|
||||||
|
BOOLEAN,
|
||||||
|
Column as SemanticColumn,
|
||||||
|
DATE,
|
||||||
|
DATETIME,
|
||||||
|
DECIMAL,
|
||||||
|
Dimension as SemanticDimension,
|
||||||
|
Filter as SemanticFilter,
|
||||||
|
INTEGER,
|
||||||
|
Metric as SemanticMetric,
|
||||||
|
NoSort,
|
||||||
|
NUMBER,
|
||||||
|
OBJECT,
|
||||||
|
Query as SemanticQuery,
|
||||||
|
SemanticView,
|
||||||
|
Sort as SemanticSort,
|
||||||
|
SortDirectionEnum,
|
||||||
|
STRING,
|
||||||
|
Table as SemanticTable,
|
||||||
|
TIME,
|
||||||
|
Type as SemanticType,
|
||||||
|
)
|
||||||
from superset.models.sql_lab import Query
|
from superset.models.sql_lab import Query
|
||||||
|
from superset.sql.parse import Table
|
||||||
from superset.utils import json
|
from superset.utils import json
|
||||||
from superset.utils.core import get_user_agent, QuerySource
|
from superset.utils.core import get_user_agent, QuerySource
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from sqlalchemy.engine.base import Engine
|
||||||
|
|
||||||
from superset.models.core import Database
|
from superset.models.core import Database
|
||||||
|
|
||||||
# Regular expressions to catch custom errors
|
# Regular expressions to catch custom errors
|
||||||
@@ -77,16 +107,318 @@ class SnowflakeParametersType(TypedDict):
|
|||||||
warehouse: str
|
warehouse: str
|
||||||
|
|
||||||
|
|
||||||
|
class SnowflakeSemanticLayer:
|
||||||
|
def __init__(self, engine: Engine) -> None:
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
def execute(
|
||||||
|
self,
|
||||||
|
sql: str,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Iterator[dict[str, Any]]:
|
||||||
|
with self.engine.connect() as connection:
|
||||||
|
for row in connection.execute(text(sql), kwargs).mappings():
|
||||||
|
yield dict(row)
|
||||||
|
|
||||||
|
def get_semantic_views(self) -> set[SemanticView]:
|
||||||
|
sql = """
|
||||||
|
SHOW SEMANTIC VIEWS
|
||||||
|
->> SELECT "name" FROM $1;
|
||||||
|
"""
|
||||||
|
return {SemanticView(row["name"]) for row in self.execute(sql)}
|
||||||
|
|
||||||
|
def get_type(self, snowflake_type: str | None) -> type[SemanticType]:
|
||||||
|
if snowflake_type is None:
|
||||||
|
return STRING
|
||||||
|
|
||||||
|
type_map = {
|
||||||
|
STRING: {r"VARCHAR\(\d+\)$", "STRING$", "TEXT$", r"CHAR\(\d+\)$"},
|
||||||
|
INTEGER: {r"NUMBER\(38,\s?0\)$", "INT$", "INTEGER$", "BIGINT$"},
|
||||||
|
DECIMAL: {r"NUMBER\(10,\s?2\)$"},
|
||||||
|
NUMBER: {r"NUMBER\(\d+,\s?\d+\)$", "FLOAT$", "DOUBLE$"},
|
||||||
|
BOOLEAN: {"BOOLEAN$"},
|
||||||
|
DATE: {"DATE$"},
|
||||||
|
DATETIME: {"TIMESTAMP_TZ$", "TIMESTAMP__NTZ$"},
|
||||||
|
TIME: {"TIME$"},
|
||||||
|
OBJECT: {"OBJECT$"},
|
||||||
|
BINARY: {r"BINARY\(\d+\)$", r"VARBINARY\(\d+\)$"},
|
||||||
|
}
|
||||||
|
for semantic_type, patterns in type_map.items():
|
||||||
|
if any(
|
||||||
|
re.match(pattern, snowflake_type, re.IGNORECASE) for pattern in patterns
|
||||||
|
):
|
||||||
|
return semantic_type
|
||||||
|
|
||||||
|
return STRING
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def quote_table(cls, table: Table, dialect: Dialect) -> str:
|
||||||
|
"""
|
||||||
|
Fully quote a table name, including the schema and catalog.
|
||||||
|
"""
|
||||||
|
quoters = {
|
||||||
|
"catalog": dialect.identifier_preparer.quote_schema,
|
||||||
|
"schema": dialect.identifier_preparer.quote_schema,
|
||||||
|
"table": dialect.identifier_preparer.quote,
|
||||||
|
}
|
||||||
|
|
||||||
|
return ".".join(
|
||||||
|
function(getattr(table, key))
|
||||||
|
for key, function in quoters.items()
|
||||||
|
if getattr(table, key)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_metrics(self, semantic_view: SemanticView) -> set[SemanticMetric]:
|
||||||
|
quoted_semantic_view_name = self.quote_table(
|
||||||
|
Table(semantic_view.name),
|
||||||
|
self.engine.dialect,
|
||||||
|
)
|
||||||
|
sql = f"""
|
||||||
|
DESC SEMANTIC VIEW {quoted_semantic_view_name}
|
||||||
|
->> SELECT "object_name", "property", "property_value"
|
||||||
|
FROM $1
|
||||||
|
WHERE
|
||||||
|
"object_kind" = 'METRIC' AND
|
||||||
|
"property" IN ('DATA_TYPE', 'TABLE');
|
||||||
|
""" # noqa: S608 (semantic_view.name is quoted)
|
||||||
|
rows = self.execute(sql)
|
||||||
|
|
||||||
|
metrics: set[SemanticMetric] = set()
|
||||||
|
for name, group in itertools.groupby(rows, key=lambda x: x["object_name"]):
|
||||||
|
attributes = defaultdict(set)
|
||||||
|
for row in group:
|
||||||
|
attributes[row["property"]].add(row["property_value"])
|
||||||
|
|
||||||
|
table = next(iter(attributes["TABLE"]))
|
||||||
|
metric_name = table + "." + name
|
||||||
|
type_ = self.get_type(next(iter(attributes["DATA_TYPE"])))
|
||||||
|
sql = self.engine.dialect.identifier_preparer.quote(metric_name)
|
||||||
|
tables = frozenset(attributes["TABLE"])
|
||||||
|
join_columns = frozenset()
|
||||||
|
|
||||||
|
metrics.add(SemanticMetric(metric_name, type_, sql, tables, join_columns))
|
||||||
|
|
||||||
|
return metrics
|
||||||
|
|
||||||
|
def get_dimensions(self, semantic_view: SemanticView) -> set[SemanticDimension]:
|
||||||
|
quoted_semantic_view_name = self.quote_table(
|
||||||
|
Table(semantic_view.name),
|
||||||
|
self.engine.dialect,
|
||||||
|
)
|
||||||
|
sql = f"""
|
||||||
|
DESC SEMANTIC VIEW {quoted_semantic_view_name}
|
||||||
|
->> SELECT "object_name", "property", "property_value"
|
||||||
|
FROM $1
|
||||||
|
WHERE
|
||||||
|
"object_kind" = 'DIMENSION' AND
|
||||||
|
"property" IN ('DATA_TYPE', 'TABLE');
|
||||||
|
""" # noqa: S608 (semantic_view.name is quoted)
|
||||||
|
rows = self.execute(sql)
|
||||||
|
|
||||||
|
dimensions: set[SemanticDimension] = set()
|
||||||
|
for name, group in itertools.groupby(rows, key=lambda x: x["object_name"]):
|
||||||
|
attributes = defaultdict(set)
|
||||||
|
for row in group:
|
||||||
|
attributes[row["property"]].add(row["property_value"])
|
||||||
|
|
||||||
|
table = next(iter(attributes["TABLE"]))
|
||||||
|
dimension_name = table + "." + name
|
||||||
|
column = SemanticColumn(SemanticTable(table), name)
|
||||||
|
type_ = self.get_type(next(iter(attributes["DATA_TYPE"])))
|
||||||
|
|
||||||
|
dimensions.add(SemanticDimension(column, dimension_name, type_))
|
||||||
|
|
||||||
|
return dimensions
|
||||||
|
|
||||||
|
def get_valid_metrics(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[SemanticMetric],
|
||||||
|
dimensions: set[SemanticDimension],
|
||||||
|
) -> set[SemanticMetric]:
|
||||||
|
# all metrics and dimensions are valid inside a given semantic view
|
||||||
|
return self.get_metrics(semantic_view)
|
||||||
|
|
||||||
|
def get_valid_dimensions(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[SemanticMetric],
|
||||||
|
dimensions: set[SemanticDimension],
|
||||||
|
) -> set[SemanticDimension]:
|
||||||
|
# all metrics and dimensions are valid inside a given semantic view
|
||||||
|
return self.get_dimensions(semantic_view)
|
||||||
|
|
||||||
|
def get_query(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[SemanticMetric],
|
||||||
|
dimensions: set[SemanticDimension],
|
||||||
|
filters: set[SemanticFilter],
|
||||||
|
sort: SemanticSort = NoSort,
|
||||||
|
limit: int | None = None,
|
||||||
|
offset: int | None = None,
|
||||||
|
) -> SemanticQuery:
|
||||||
|
ast = self.build_query(
|
||||||
|
semantic_view,
|
||||||
|
metrics,
|
||||||
|
dimensions,
|
||||||
|
filters,
|
||||||
|
sort,
|
||||||
|
limit,
|
||||||
|
offset,
|
||||||
|
)
|
||||||
|
return SemanticQuery(sql=ast.sql(dialect="snowflake", pretty=True))
|
||||||
|
|
||||||
|
def build_query(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[SemanticMetric],
|
||||||
|
dimensions: set[SemanticDimension],
|
||||||
|
filters: set[SemanticFilter],
|
||||||
|
sort: SemanticSort = NoSort,
|
||||||
|
limit: int | None = None,
|
||||||
|
offset: int | None = None,
|
||||||
|
) -> exp.Select:
|
||||||
|
semantic_view = exp.SemanticView(
|
||||||
|
this=exp.Table(this=exp.Identifier(this=semantic_view.name, quoted=True)),
|
||||||
|
dimensions=[
|
||||||
|
exp.Column(
|
||||||
|
this=exp.Identifier(this=dimension.column.name, quoted=True),
|
||||||
|
table=exp.Identifier(
|
||||||
|
this=dimension.column.relation.name,
|
||||||
|
quoted=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
for dimension in dimensions
|
||||||
|
],
|
||||||
|
metrics=[
|
||||||
|
exp.Column(
|
||||||
|
this=exp.Identifier(this=column, quoted=True),
|
||||||
|
table=exp.Identifier(this=table, quoted=True),
|
||||||
|
)
|
||||||
|
for table, column in (
|
||||||
|
metric.name.split(".", 1)
|
||||||
|
for metric in metrics
|
||||||
|
if "." in metric.name
|
||||||
|
)
|
||||||
|
],
|
||||||
|
)
|
||||||
|
query = exp.Select(
|
||||||
|
expressions=[exp.Star()],
|
||||||
|
**{"from": exp.From(this=exp.Table(this=semantic_view))},
|
||||||
|
)
|
||||||
|
|
||||||
|
if sort.items:
|
||||||
|
order = [
|
||||||
|
exp.Ordered(
|
||||||
|
this=exp.Column(this=exp.Identifier(this=item.field.name)),
|
||||||
|
desc=item.direction == SortDirectionEnum.DESC,
|
||||||
|
nulls_first=item.nulls_first,
|
||||||
|
)
|
||||||
|
for item in sort.items
|
||||||
|
]
|
||||||
|
query.args["order"] = exp.Order(expressions=order)
|
||||||
|
|
||||||
|
if offset:
|
||||||
|
query = query.offset(offset)
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
query = query.limit(limit)
|
||||||
|
|
||||||
|
return query
|
||||||
|
|
||||||
|
def get_query_from_standard_sql(self, sql: str) -> SemanticQuery:
|
||||||
|
"""
|
||||||
|
Convert the Explore query into a proper query.
|
||||||
|
|
||||||
|
Explore will produce a pseudo-SQL query that references metrics and dimensions
|
||||||
|
as if they were columns in a table. This method replaces the table name with a
|
||||||
|
call to `SEMANTIC_VIEW`, and removes the `GROUP BY` clause, since all the
|
||||||
|
aggregations happen inside the `SEMANTIC_VIEW` call.
|
||||||
|
"""
|
||||||
|
ast = parse_one(sql, "snowflake")
|
||||||
|
table = ast.find(exp.Table)
|
||||||
|
if not table:
|
||||||
|
return SemanticQuery(sql=sql)
|
||||||
|
|
||||||
|
semantic_views = self.get_semantic_views()
|
||||||
|
if table.name not in {semantic_view.name for semantic_view in semantic_views}:
|
||||||
|
return SemanticQuery(sql=sql)
|
||||||
|
|
||||||
|
# collect all metric and dimensions
|
||||||
|
semantic_view = SemanticView(table.name)
|
||||||
|
all_metrics = self.get_metrics(semantic_view)
|
||||||
|
all_dimensions = self.get_dimensions(semantic_view)
|
||||||
|
|
||||||
|
# collect metrics and dimensions used in the query
|
||||||
|
columns = {column.name for column in ast.find_all(exp.Column)}
|
||||||
|
metrics = [metric for metric in all_metrics if metric.name in columns]
|
||||||
|
dimensions = [
|
||||||
|
dimension for dimension in all_dimensions if dimension.name in columns
|
||||||
|
]
|
||||||
|
|
||||||
|
# now replace table with a call to `SEMANTIC_VIEW`
|
||||||
|
udtf = exp.Table(
|
||||||
|
this=exp.SemanticView(
|
||||||
|
this=exp.Table(
|
||||||
|
this=exp.Identifier(this=semantic_view.name, quoted=True)
|
||||||
|
),
|
||||||
|
metrics=[
|
||||||
|
exp.Column(
|
||||||
|
this=exp.Identifier(this=column, quoted=True),
|
||||||
|
table=exp.Identifier(this=table, quoted=True),
|
||||||
|
)
|
||||||
|
for table, column in (
|
||||||
|
metric.name.split(".", 1)
|
||||||
|
for metric in metrics
|
||||||
|
if "." in metric.name
|
||||||
|
)
|
||||||
|
],
|
||||||
|
dimensions=[
|
||||||
|
exp.Column(
|
||||||
|
this=exp.Identifier(this=column, quoted=True),
|
||||||
|
table=exp.Identifier(this=table, quoted=True),
|
||||||
|
)
|
||||||
|
for table, column in (
|
||||||
|
dimension.name.split(".", 1)
|
||||||
|
for dimension in dimensions
|
||||||
|
if "." in dimension.name
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
alias=exp.TableAlias(
|
||||||
|
this=exp.Identifier(this="table_alias", quoted=False),
|
||||||
|
columns=[
|
||||||
|
exp.Identifier(this=column.name, quoted=True)
|
||||||
|
for column in metrics + dimensions
|
||||||
|
],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
table.replace(udtf)
|
||||||
|
|
||||||
|
# remove group by, since aggregations are done inside the `SEMANTIC_VIEW` call
|
||||||
|
del ast.args["group"]
|
||||||
|
|
||||||
|
print("BETO")
|
||||||
|
print(ast.sql(dialect="snowflake", pretty=True))
|
||||||
|
return SemanticQuery(sql=ast.sql(dialect="snowflake", pretty=True))
|
||||||
|
|
||||||
|
|
||||||
class SnowflakeEngineSpec(PostgresBaseEngineSpec):
|
class SnowflakeEngineSpec(PostgresBaseEngineSpec):
|
||||||
engine = "snowflake"
|
engine = "snowflake"
|
||||||
engine_name = "Snowflake"
|
engine_name = "Snowflake"
|
||||||
force_column_alias_quotes = True
|
force_column_alias_quotes = True
|
||||||
max_column_name_length = 256
|
max_column_name_length = 256
|
||||||
|
|
||||||
|
# Snowflake doesn't support IS true/false syntax, use = true/false instead
|
||||||
|
use_equality_for_boolean_filters = True
|
||||||
|
|
||||||
parameters_schema = SnowflakeParametersSchema()
|
parameters_schema = SnowflakeParametersSchema()
|
||||||
default_driver = "snowflake"
|
default_driver = "snowflake"
|
||||||
sqlalchemy_uri_placeholder = "snowflake://"
|
sqlalchemy_uri_placeholder = "snowflake://"
|
||||||
|
|
||||||
|
semantic_layer = SnowflakeSemanticLayer
|
||||||
|
|
||||||
supports_dynamic_schema = True
|
supports_dynamic_schema = True
|
||||||
supports_catalog = supports_dynamic_catalog = supports_cross_catalog_queries = True
|
supports_catalog = supports_dynamic_catalog = supports_cross_catalog_queries = True
|
||||||
|
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ class SupersetAPSWDialect(APSWDialect):
|
|||||||
"superset": {
|
"superset": {
|
||||||
"prefix": None,
|
"prefix": None,
|
||||||
"allowed_dbs": self.allowed_dbs,
|
"allowed_dbs": self.allowed_dbs,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
"safe": True,
|
"safe": True,
|
||||||
"isolation_level": self.isolation_level,
|
"isolation_level": self.isolation_level,
|
||||||
|
|||||||
340
superset/extensions/semantic_layer.py
Normal file
340
superset/extensions/semantic_layer.py
Normal file
@@ -0,0 +1,340 @@
|
|||||||
|
import enum
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timedelta
|
||||||
|
from functools import total_ordering
|
||||||
|
from typing import Protocol, runtime_checkable
|
||||||
|
|
||||||
|
from sqlalchemy import types as sqltypes
|
||||||
|
from sqlalchemy.engine.base import Engine
|
||||||
|
|
||||||
|
|
||||||
|
class Type:
|
||||||
|
"""
|
||||||
|
Base class for types.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class INTEGER(Type):
|
||||||
|
"""
|
||||||
|
Represents an integer type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class NUMBER(Type):
|
||||||
|
"""
|
||||||
|
Represents a number type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DECIMAL(Type):
|
||||||
|
"""
|
||||||
|
Represents a decimal type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class STRING(Type):
|
||||||
|
"""
|
||||||
|
Represents a string type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BOOLEAN(Type):
|
||||||
|
"""
|
||||||
|
Represents a boolean type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DATE(Type):
|
||||||
|
"""
|
||||||
|
Represents a date type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TIME(Type):
|
||||||
|
"""
|
||||||
|
Represents a time type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DATETIME(DATE, TIME):
|
||||||
|
"""
|
||||||
|
Represents a datetime type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class INTERVAL(Type):
|
||||||
|
"""
|
||||||
|
Represents an interval type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class OBJECT(Type):
|
||||||
|
"""
|
||||||
|
Represents an object type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BINARY(Type):
|
||||||
|
"""
|
||||||
|
Represents a binary type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SemanticView:
|
||||||
|
name: str
|
||||||
|
description: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Relation:
|
||||||
|
name: str
|
||||||
|
schema: str | None = None
|
||||||
|
catalog: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Table:
|
||||||
|
name: str
|
||||||
|
schema: str | None = None
|
||||||
|
catalog: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class View:
|
||||||
|
name: str
|
||||||
|
sql: str
|
||||||
|
schema: str | None = None
|
||||||
|
catalog: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Virtual:
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Metric:
|
||||||
|
name: str
|
||||||
|
type: type[Type]
|
||||||
|
sql: str
|
||||||
|
tables: frozenset[Table]
|
||||||
|
join_columns: frozenset[str]
|
||||||
|
|
||||||
|
|
||||||
|
@total_ordering
|
||||||
|
class ComparableEnum(enum.Enum):
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if isinstance(other, enum.Enum):
|
||||||
|
return self.value == other.value
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __lt__(self, other: object) -> bool:
|
||||||
|
if isinstance(other, enum.Enum):
|
||||||
|
return self.value < other.value
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.__class__, self.name))
|
||||||
|
|
||||||
|
|
||||||
|
class TimeGrain(ComparableEnum):
|
||||||
|
second = timedelta(seconds=1)
|
||||||
|
minute = timedelta(minutes=1)
|
||||||
|
hour = timedelta(hours=1)
|
||||||
|
|
||||||
|
|
||||||
|
class DateGrain(ComparableEnum):
|
||||||
|
day = timedelta(days=1)
|
||||||
|
week = timedelta(weeks=1)
|
||||||
|
month = timedelta(days=30)
|
||||||
|
quarter = timedelta(days=90)
|
||||||
|
year = timedelta(days=365)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Column:
|
||||||
|
relation: Table | View | Virtual
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Dimension:
|
||||||
|
column: Column
|
||||||
|
name: str
|
||||||
|
type: type[Type]
|
||||||
|
grain: TimeGrain | DateGrain | None = None
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
metadata = f"[{self.grain.name}]" if self.grain else ""
|
||||||
|
return f"{self.type.__name__} {self.name} {metadata}".strip()
|
||||||
|
|
||||||
|
|
||||||
|
class FilterTypeEnum(enum.Enum):
|
||||||
|
WHERE = enum.auto()
|
||||||
|
HAVING = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Filter:
|
||||||
|
type: FilterTypeEnum
|
||||||
|
expression: str
|
||||||
|
|
||||||
|
|
||||||
|
class SortDirectionEnum(enum.Enum):
|
||||||
|
ASC = enum.auto()
|
||||||
|
DESC = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SortField:
|
||||||
|
field: Metric | Dimension
|
||||||
|
direction: SortDirectionEnum
|
||||||
|
nulls_first: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Sort:
|
||||||
|
items: list[SortField]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Query:
|
||||||
|
sql: str
|
||||||
|
|
||||||
|
|
||||||
|
NoSort = Sort(items=[])
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class SemanticLayer(Protocol):
|
||||||
|
"""
|
||||||
|
A generic protocol for semantic layers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, engine: Engine) -> None: ...
|
||||||
|
|
||||||
|
def get_semantic_views(self) -> set[SemanticView]:
|
||||||
|
"""
|
||||||
|
Return a set of the semantic views.
|
||||||
|
|
||||||
|
A semantic view is an organizational group of metrics and dimensions. It's not a
|
||||||
|
logical grouping, since metrics and dimensions from a given semantic view might
|
||||||
|
not be compatible. An implementation might expose a single semantic view for
|
||||||
|
exploration of available metric and dimesnions, and smaller curated semantic
|
||||||
|
views that are domain specific.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_metrics(self, semantic_view: SemanticView) -> set[Metric]:
|
||||||
|
"""
|
||||||
|
Return a set of metrics from a given semantic views.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_dimensions(self, semantic_view: SemanticView) -> set[Dimension]:
|
||||||
|
"""
|
||||||
|
Return a set of dimensions from a given semantic views.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_valid_metrics(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[Metric],
|
||||||
|
dimensions: set[Dimension],
|
||||||
|
) -> set[Metric]:
|
||||||
|
"""
|
||||||
|
Return compatible metrics for the given metrics and dimensions.
|
||||||
|
|
||||||
|
For metrics to be valid they must be compatible with all the provided
|
||||||
|
dimensions.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_valid_dimensions(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[Metric],
|
||||||
|
dimensions: set[Dimension],
|
||||||
|
) -> set[Dimension]:
|
||||||
|
"""
|
||||||
|
Return compatible dimensions for the given metrics.
|
||||||
|
|
||||||
|
For dimensions to be valid they must be compatible with all the provided
|
||||||
|
metrics.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_query(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
metrics: set[Metric],
|
||||||
|
dimensions: set[Dimension],
|
||||||
|
# populations: set[Population],
|
||||||
|
filters: set[Filter],
|
||||||
|
sort: Sort = NoSort,
|
||||||
|
limit: int | None = None,
|
||||||
|
offset: int | None = None,
|
||||||
|
) -> Query:
|
||||||
|
"""
|
||||||
|
Build a SQL query from the given metrics, dimensions, filters, and sort order.
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_query_from_standard_sql(
|
||||||
|
self,
|
||||||
|
semantic_view: SemanticView,
|
||||||
|
sql: str,
|
||||||
|
) -> Query:
|
||||||
|
"""
|
||||||
|
Build a SQL query from a pseudo-query referencing metrics and dimensions.
|
||||||
|
|
||||||
|
For example, given `metric1` having the expression `COUNT(*)`, this query:
|
||||||
|
|
||||||
|
SELECT metric1, dim1
|
||||||
|
FROM semantic_layer
|
||||||
|
GROUP BY dim1
|
||||||
|
|
||||||
|
Becomes:
|
||||||
|
|
||||||
|
SELECT metric1, dim1
|
||||||
|
FROM (
|
||||||
|
SELECT COUNT(*) AS metric1, dim1
|
||||||
|
FROM fact_table
|
||||||
|
JOIN dim_table
|
||||||
|
ON fact_table.dim_id = dim_table.id
|
||||||
|
GROUP BY dim1
|
||||||
|
) AS semantic_view
|
||||||
|
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
TYPE_MAPPING: dict[Type, type[sqltypes.TypeEngine]] = {
|
||||||
|
# Numeric types
|
||||||
|
INTEGER: sqltypes.Integer,
|
||||||
|
NUMBER: sqltypes.Numeric,
|
||||||
|
DECIMAL: sqltypes.DECIMAL,
|
||||||
|
# String types
|
||||||
|
STRING: sqltypes.String,
|
||||||
|
# Boolean type
|
||||||
|
BOOLEAN: sqltypes.Boolean,
|
||||||
|
# Date/time types
|
||||||
|
DATE: sqltypes.Date,
|
||||||
|
TIME: sqltypes.Time,
|
||||||
|
DATETIME: sqltypes.DateTime,
|
||||||
|
INTERVAL: sqltypes.Interval,
|
||||||
|
# Complex types
|
||||||
|
OBJECT: sqltypes.JSON,
|
||||||
|
BINARY: sqltypes.LargeBinary,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_sqla_type_from_dimension_type(
|
||||||
|
dimension_type: Type,
|
||||||
|
) -> sqltypes.TypeEngine:
|
||||||
|
"""
|
||||||
|
Get the SQLAlchemy type corresponding to the given dimension type.
|
||||||
|
"""
|
||||||
|
return TYPE_MAPPING.get(dimension_type, sqltypes.String)()
|
||||||
@@ -126,7 +126,9 @@ class ConfigurationMethod(StrEnum):
|
|||||||
DYNAMIC_FORM = "dynamic_form"
|
DYNAMIC_FORM = "dynamic_form"
|
||||||
|
|
||||||
|
|
||||||
class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable=too-many-public-methods
|
class Database(
|
||||||
|
Model, AuditMixinNullable, ImportExportMixin
|
||||||
|
): # pylint: disable=too-many-public-methods
|
||||||
"""An ORM object that stores Database related information"""
|
"""An ORM object that stores Database related information"""
|
||||||
|
|
||||||
__tablename__ = "dbs"
|
__tablename__ = "dbs"
|
||||||
@@ -400,9 +402,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
|
|||||||
return (
|
return (
|
||||||
username
|
username
|
||||||
if (username := get_username())
|
if (username := get_username())
|
||||||
else object_url.username
|
else object_url.username if self.impersonate_user else None
|
||||||
if self.impersonate_user
|
|
||||||
else None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@@ -987,7 +987,10 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
|
|||||||
schema=table.schema,
|
schema=table.schema,
|
||||||
) as inspector:
|
) as inspector:
|
||||||
return self.db_engine_spec.get_columns(
|
return self.db_engine_spec.get_columns(
|
||||||
inspector, table, self.schema_options
|
self,
|
||||||
|
inspector,
|
||||||
|
table,
|
||||||
|
self.schema_options,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_metrics(
|
def get_metrics(
|
||||||
@@ -1076,9 +1079,11 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
|
|||||||
return self.perm
|
return self.perm
|
||||||
|
|
||||||
def has_table(self, table: Table) -> bool:
|
def has_table(self, table: Table) -> bool:
|
||||||
with self.get_sqla_engine(catalog=table.catalog, schema=table.schema) as engine:
|
with self.get_inspector(
|
||||||
# do not pass "" as an empty schema; force null
|
catalog=table.catalog,
|
||||||
return engine.has_table(table.table, table.schema or None)
|
schema=table.schema,
|
||||||
|
) as inspector:
|
||||||
|
return self.db_engine_spec.has_table(self, inspector, table)
|
||||||
|
|
||||||
def has_view(self, table: Table) -> bool:
|
def has_view(self, table: Table) -> bool:
|
||||||
with self.get_sqla_engine(catalog=table.catalog, schema=table.schema) as engine:
|
with self.get_sqla_engine(catalog=table.catalog, schema=table.schema) as engine:
|
||||||
|
|||||||
@@ -85,6 +85,7 @@ SQLGLOT_DIALECTS = {
|
|||||||
# "kustosql": ???
|
# "kustosql": ???
|
||||||
# "kylin": ???
|
# "kylin": ???
|
||||||
"mariadb": Dialects.MYSQL,
|
"mariadb": Dialects.MYSQL,
|
||||||
|
"metricflow": Dialects.SQLITE,
|
||||||
"motherduck": Dialects.DUCKDB,
|
"motherduck": Dialects.DUCKDB,
|
||||||
"mssql": Dialects.TSQL,
|
"mssql": Dialects.TSQL,
|
||||||
"mysql": Dialects.MYSQL,
|
"mysql": Dialects.MYSQL,
|
||||||
|
|||||||
@@ -189,6 +189,7 @@ class Datasource(BaseSupersetView):
|
|||||||
raise DatasetNotFoundError() from ex
|
raise DatasetNotFoundError() from ex
|
||||||
return self.json_response(external_metadata)
|
return self.json_response(external_metadata)
|
||||||
|
|
||||||
|
|
||||||
@expose("/samples", methods=("POST",))
|
@expose("/samples", methods=("POST",))
|
||||||
@has_access_api
|
@has_access_api
|
||||||
@api
|
@api
|
||||||
|
|||||||
Reference in New Issue
Block a user