Compare commits

..

5 Commits

Author SHA1 Message Date
Joe Li
f2d05d10ff fix(embedded-e2e): use route allowlist in static test server
The test app server only ever serves /, /index.html, and /sdk/index.js,
so replace dynamic path joining with a fixed allowlist. This eliminates
the data flow from req.url to readFileSync that CodeQL flagged as a
path-traversal sink — the previous resolve+startsWith containment check
was correct but not recognized as a sanitizer by the analyzer.
2026-04-30 19:27:59 -07:00
Joe Li
6669b186d7 ci(embedded-e2e): build SDK and configure test environment
- Add a build-embedded-sdk step to bashlib.sh and wire it into the
  superset-playwright and superset-e2e workflows so the SDK bundle is
  compiled before Playwright runs.
- Set SUPERSET_FEATURE_EMBEDDED_SUPERSET=true via workflow env so the
  feature flag only affects Playwright jobs. Setting it in the shared
  integration test config breaks unrelated Python tests because the
  security manager's guest-user paths access g.user through paths that
  most tests don't mock.
- Add CORS for localhost:9000 and TALISMAN_ENABLED=False to the
  integration test config. Talisman defaults to X-Frame-Options:
  SAMEORIGIN, which blocks the embedded dashboard from rendering
  inside an iframe hosted on a different port.
2026-04-30 19:27:47 -07:00
Joe Li
ca9eeec59d feat(embedded-e2e): add Playwright E2E tests for embedded dashboards
Adds five tests covering the embedded dashboard flow against the
world_health example: render, hideTitle UI config, chart rendering,
allowed_domains referrer check, and guest-token data access. Includes:

- A chromium-embedded Playwright project, excluded from the main
  project via testIgnore so it can be opted into separately.
- An EmbeddedPage page object and API helpers for embedding/guest
  tokens plus dashboard lookup by slug.
- A static test app (embedded-app/index.html) loaded from a minimal
  Node static server. Playwright bridges the guest-token fetch from
  Node into the browser via page.exposeFunction.
- EMBEDDED timeout/config constants.

Workflow integration and test-environment configuration land in a
follow-up commit.
2026-04-30 19:27:33 -07:00
Vitor Avila
86eb6176d1 fix: Enforce per-user caching on legacy API endpoint (#39789) 2026-04-30 18:04:33 -03:00
Joe Li
4244ae87bf fix(deps): regenerate pinned requirements for psycopg2-binary 2.9.12 (#39790)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-30 17:46:23 -03:00
36 changed files with 1122 additions and 725 deletions

View File

@@ -59,6 +59,15 @@ build-assets() {
say "::endgroup::"
}
build-embedded-sdk() {
cd "$GITHUB_WORKSPACE/superset-embedded-sdk"
say "::group::Build embedded SDK bundle for E2E tests"
npm ci
npm run build
say "::endgroup::"
}
build-instrumented-assets() {
cd "$GITHUB_WORKSPACE/superset-frontend"

View File

@@ -169,6 +169,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
SUPERSET_FEATURE_EMBEDDED_SUPERSET: "true"
services:
postgres:
image: postgres:17-alpine
@@ -239,6 +240,11 @@ jobs:
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Build embedded SDK
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: build-embedded-sdk
- name: Install Playwright
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies

View File

@@ -43,6 +43,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
SUPERSET_FEATURE_EMBEDDED_SUPERSET: "true"
services:
postgres:
image: postgres:17-alpine
@@ -113,6 +114,11 @@ jobs:
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Build embedded SDK
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: build-embedded-sdk
- name: Install Playwright
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies

View File

@@ -1,122 +0,0 @@
---
title: Glossary
hide_title: true
sidebar_position: 10
---
import { getAllGlossaryTopics } from '../../superset-frontend/packages/superset-ui-core/src/glossary';
import { Table, ConfigProvider, theme } from 'antd';
import { useColorMode } from '@docusaurus/theme-common';
import { useCallback, useEffect, useRef } from 'react';
export const GlossaryStructure = [
{
title: 'Term',
dataIndex: 'title',
key: 'title',
width: 200,
},
{
title: 'Short Description',
dataIndex: 'short',
key: 'short',
},
];
export const GlossaryContent = () => {
const { colorMode } = useColorMode();
const isDark = colorMode === 'dark';
const tableRefs = useRef({});
const scrollToRow = useCallback((topic, rowKey) => {
const topicId = encodeURIComponent(topic);
const encRowKey = encodeURIComponent(rowKey);
const row = tableRefs.current[topicId]?.[encRowKey];
if (row) {
row.scrollIntoView({ behavior: 'smooth', block: 'center' });
row.classList.add('table-row-highlight');
setTimeout(() => row.classList.remove('table-row-highlight'), 2000);
}
}, []);
useEffect(() => {
let hash = '';
try {
hash = decodeURIComponent(window.location.hash.slice(1));
} catch (e) {
// Malformed percent-encoding in the URL hash — silently skip the
// scroll-to-row behavior rather than letting the page render fail.
return;
}
if (!hash) return;
const [topic, term] = hash.split('__');
if (topic && term) scrollToRow(topic, hash);
}, [scrollToRow]);
return (
<div>
<ConfigProvider
theme={{
algorithm: isDark ? theme.darkAlgorithm : theme.defaultAlgorithm,
}}
>
{getAllGlossaryTopics().map((topic) => {
const topicName = topic.getName();
const topicFragment = encodeURIComponent(topicName);
const terms = topic.getAllTerms();
return (
<div key={topicName} id={topicFragment}>
<h3>{topic.getDisplayName()}</h3>
<Table
dataSource={terms
.map((term) => {
const key = term.getTitle()
? encodeURIComponent(`${topicName}__${term.getTitle()}`)
: undefined;
return key
? {
title: term.getDisplayTitle(),
short: term.getShort(),
key,
}
: null;
})
.filter(Boolean)}
columns={GlossaryStructure}
rowKey="key"
pagination={false}
showHeader
bordered
onRow={(record) => {
if (!record?.key) return {};
const topicId = topicFragment;
return {
ref: (node) => {
if (!tableRefs.current[topicId]) tableRefs.current[topicId] = {};
if (node) {
tableRefs.current[topicId][record.key] = node;
} else {
// cleanup stale reference when row unmounts
delete tableRefs.current[topicId][record.key];
if (Object.keys(tableRefs.current[topicId]).length === 0) {
delete tableRefs.current[topicId];
}
}
},
};
}}
/>
</div>
);
})}
</ConfigProvider>
</div>
);
};
## Glossary
<GlossaryContent />

View File

@@ -60,11 +60,6 @@ const sidebars = {
},
],
},
{
type: 'doc',
label: 'Glossary',
id: 'glossary'
},
{
type: 'doc',
label: 'FAQ',

View File

@@ -707,7 +707,7 @@ protobuf==4.25.8
# proto-plus
psutil==6.1.0
# via apache-superset
psycopg2-binary==2.9.9
psycopg2-binary==2.9.12
# via apache-superset
py-key-value-aio==0.4.4
# via fastmcp

View File

@@ -23,10 +23,6 @@ import { ControlSubSectionHeader } from '../components/ControlSubSectionHeader';
import { ControlPanelSectionConfig } from '../types';
import { formatSelectOptions, displayTimeRelatedControls } from '../utils';
import { glossary } from '@superset-ui/core';
const TIME_SHIFT_DESCRIPTION = glossary.Advanced_Analytics.Time_Shift.encode();
export const advancedAnalyticsControls: ControlPanelSectionConfig = {
label: t('Advanced analytics'),
tabOverride: 'data',
@@ -127,7 +123,12 @@ export const advancedAnalyticsControls: ControlPanelSectionConfig = {
['156 weeks ago', t('156 weeks ago')],
['3 years ago', t('3 years ago')],
],
description: TIME_SHIFT_DESCRIPTION,
description: t(
'Overlay one or more timeseries from a ' +
'relative time period. Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported.',
),
},
},
],

View File

@@ -25,10 +25,6 @@ import {
ControlState,
} from '../types';
import { INVALID_DATE } from '..';
import { glossary } from '@superset-ui/core';
// Glossary terms used for tooltips
const TIME_SHIFT_DESCRIPTION = glossary.Advanced_Analytics.Time_Shift.encode();
const fullChoices = [
['1 day ago', t('1 day ago')],
@@ -86,7 +82,16 @@ export const timeComparisonControls: ({
placeholder: t('Select or type a custom value...'),
label: t('Time shift'),
choices: showFullChoices ? fullChoices : reducedChoices,
description: TIME_SHIFT_DESCRIPTION,
description: t(
'Overlay results from a relative time period. ' +
'Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported. ' +
'Use "Inherit range from time filters" ' +
'to shift the comparison time range ' +
'by the same length as your time range ' +
'and use "Custom" to set a custom comparison range.',
),
},
},
],

View File

@@ -39,13 +39,6 @@ import {
xAxisMixin,
} from '..';
import { glossary } from '@superset-ui/core';
// Glossary terms used for tooltips
const DIMENSION_DESCRIPTION = glossary.Query.Dimension.encode();
const METRIC_DESCRIPTION = glossary.Query.Metric.encode();
const SORT_DESCRIPTION = glossary.Query.Sort.encode();
type Control = {
savedMetrics?: Metric[] | null;
default?: unknown;
@@ -85,7 +78,11 @@ export const dndGroupByControl: SharedControlConfig<
clearable: true,
default: [],
includeTime: false,
description: DIMENSION_DESCRIPTION,
description: t(
'Dimensions contain qualitative values such as names, dates, or geographical data. ' +
'Use dimensions to categorize, segment, and reveal the details in your data. ' +
'Dimensions affect the level of detail in the view.',
),
optionRenderer: (c: ColumnMeta) => <ColumnOption showType column={c} />,
valueRenderer: (c: ColumnMeta) => <ColumnOption column={c} />,
valueKey: 'column_name',
@@ -183,7 +180,11 @@ export const dndAdhocMetricsControl: SharedControlConfig<
datasource,
datasourceType: datasource?.type,
}),
description: METRIC_DESCRIPTION,
description: t(
'Select one or many metrics to display. ' +
'You can use an aggregation function on a column ' +
'or write custom SQL to create a metric.',
),
};
export const dndAdhocMetricControl: typeof dndAdhocMetricsControl = {
@@ -223,7 +224,11 @@ export const dndSortByControl: SharedControlConfig<
type: 'DndMetricSelect',
label: t('Sort query by'),
default: null,
description: SORT_DESCRIPTION,
description: t(
'Orders the query result that generates the source data for this chart. ' +
'If a series or row limit is reached, this determines what data are truncated. ' +
'If undefined, defaults to the first metric (where appropriate).',
),
mapStateToProps: ({ datasource }) => ({
columns: datasource?.columns || [],
savedMetrics: defineSavedMetrics(datasource),

View File

@@ -86,10 +86,6 @@ import {
dndTooltipMetricsControl,
} from './dndControls';
import { matrixifyControls } from './matrixifyControls';
import { glossary } from '@superset-ui/core';
const SERIES_DESCRIPTION = glossary.Query.Series.encode();
const ROW_LIMIT_DESCRIPTION = glossary.Query.Row_Limit.encode();
const categoricalSchemeRegistry = getCategoricalSchemeRegistry();
const sequentialSchemeRegistry = getSequentialSchemeRegistry();
@@ -239,7 +235,9 @@ const row_limit: SharedControlConfig<'SelectControl'> = {
],
default: 10000,
choices: formatSelectOptions(ROW_LIMIT_OPTIONS),
description: ROW_LIMIT_DESCRIPTION,
description: t(
'Limits the number of the rows that are computed in the query that is the source of the data used for this chart.',
),
};
const order_desc: SharedControlConfig<'CheckboxControl'> = {
@@ -264,7 +262,12 @@ const limit: SharedControlConfig<'SelectControl'> = {
validators: [legacyValidateInteger],
choices: formatSelectOptions(SERIES_LIMITS),
clearable: true,
description: SERIES_DESCRIPTION,
description: t(
'Limits the number of series that get displayed. A joined subquery (or an extra phase ' +
'where subqueries are not supported) is applied to limit the number of series that get ' +
'fetched and rendered. This feature is useful when grouping by high cardinality ' +
'column(s) though does increase the query complexity and cost.',
),
};
const series_limit: SharedControlConfig<'SelectControl'> = {
@@ -274,7 +277,12 @@ const series_limit: SharedControlConfig<'SelectControl'> = {
placeholder: t('None'),
validators: [legacyValidateInteger],
choices: formatSelectOptions(SERIES_LIMITS),
description: SERIES_DESCRIPTION,
description: t(
'Limits the number of series that get displayed. A joined subquery (or an extra phase ' +
'where subqueries are not supported) is applied to limit the number of series that get ' +
'fetched and rendered. This feature is useful when grouping by high cardinality ' +
'column(s) though does increase the query complexity and cost.',
),
};
const group_others_when_limit_reached: SharedControlConfig<'CheckboxControl'> =

View File

@@ -16,70 +16,17 @@
* specific language governing permissions and limitations
* under the License.
*/
import type { CSSProperties } from 'react';
import { Tooltip as AntdTooltip } from 'antd';
import type { TooltipProps, TooltipPlacement } from './types';
import { resolveGlossaryString } from '@superset-ui/core';
const TOOLTIP_SEPARATOR_STYLE: CSSProperties = {
margin: '8px 0',
border: 'none',
borderTop: '1px solid rgba(255, 255, 255, 0.2)',
};
export const Tooltip = ({
overlayStyle,
title,
children,
...props
}: TooltipProps) => {
if (typeof title !== 'string') {
return (
<AntdTooltip
title={title}
styles={{
body: { overflow: 'hidden', textOverflow: 'ellipsis' },
root: overlayStyle ?? {},
}}
{...props}
>
{children}
</AntdTooltip>
);
}
const [glossaryUrl, description] = resolveGlossaryString(title);
const wrappedChildren = glossaryUrl ? (
<a href={glossaryUrl} target="_blank" rel="noopener noreferrer">
{children}
</a>
) : (
children
);
const wrappedDescription = glossaryUrl ? (
<>
{description}
<hr style={TOOLTIP_SEPARATOR_STYLE} />
<em>Click to Learn More</em>
</>
) : (
description
);
return (
<AntdTooltip
title={wrappedDescription}
styles={{
body: { overflow: 'hidden', textOverflow: 'ellipsis' },
root: overlayStyle ?? {},
}}
{...props}
>
{wrappedChildren}
</AntdTooltip>
);
};
export const Tooltip = ({ overlayStyle, ...props }: TooltipProps) => (
<AntdTooltip
styles={{
body: { overflow: 'hidden', textOverflow: 'ellipsis' },
root: overlayStyle ?? {},
}}
{...props}
/>
);
export type { TooltipProps, TooltipPlacement };

View File

@@ -1,121 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Glossary definition containing terms organized by topic.
*
* ## How to add new glossary entries:
*
* 1. Add a new topic (if needed) or use an existing one
* 2. Add a term under the topic with a key (term name) and value object containing:
* - short: A brief description (displayed in tooltips)
* - extended (optional): An extended description (displayed in documentation)
*
* ## Example:
* export const glossaryDefinition: GlossaryDefinition = {
* Query: {
* Row_Limit: {
* short: noTranslate('Limits the number of rows...'),
* extended: noTranslate('Additional details...'), // optional
* },
* },
* };
*
* ## Formatting Notes:
* - Term names with underscores (e.g., `Row_Limit`) will be displayed with spaces
* (e.g., "Row Limit") when rendered in the UI and documentation
*/
export const glossaryDefinition: GlossaryDefinition = {
Query: {
Dimension: {
short: noTranslate(
'Dimensions contain qualitative values such as names, dates, or geographical data. ' +
'Use dimensions to categorize, segment, and reveal the details in your data. ' +
'Dimensions affect the level of detail in the view.',
),
},
Metric: {
short: noTranslate(
'Select one or many metrics to display. ' +
'You can use an aggregation function on a column or write custom SQL to create a metric.',
),
},
Series: {
short: noTranslate(
'Limits the number of series that get displayed. ' +
'A joined subquery (or an extra phase where subqueries are not supported) is applied ' +
'to limit the number of series that get fetched and rendered. ' +
'This feature is useful when grouping by high cardinality column(s) ' +
'though does increase the query complexity and cost.',
),
},
Row_Limit: {
short: noTranslate(
'Limits the number of rows that get displayed. ' +
'This feature is useful when grouping by high cardinality column(s) ' +
'though does increase the query complexity and cost.',
),
},
Sort: {
short: noTranslate(
'Orders the query result that generates the source data for this chart. ' +
'If a series or row limit is reached, this determines what data are truncated. ' +
'If undefined, defaults to the first metric (where appropriate).',
),
},
},
Advanced_Analytics: {
Time_Shift: {
short: noTranslate(
'Overlay results from a relative time period. ' +
'Expects relative time deltas in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported. ' +
'Use "Inherit range from time filters" to shift the comparison time range ' +
'by the same length as your time range and use "Custom" to set a custom comparison range.',
),
},
},
};
/**
* Identity passthrough used in environments (such as the docs site) that do
* not have an i18n runtime. Translation of glossary strings is performed at
* resolution time by callers in app contexts that do have i18n available.
*
* Named `noTranslate` (rather than `t`) so it does not visually shadow the
* imported i18n `t` used elsewhere in this package.
*/
function noTranslate(message: string): string {
return message;
}
/**
* The glossary definition is a nested object where the first level keys are topics,
* and the second level keys are term titles. This remains a static string-based
* structure, mainly for good IDE autocomplete.
*/
export type GlossaryStrings = {
short: string;
extended?: string;
};
export type GlossaryDefinition = Record<
string,
Record<string, GlossaryStrings>
>;

View File

@@ -1,154 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Local type definition to avoid circular dependency with glossaryUtils
type Glossary = Record<string, Record<string, GlossaryTerm>>;
// Encoding format prefix for glossary strings
export const GLOSSARY_ENCODING_PREFIX = '[GLOSSARY]|';
export class GlossaryTerm {
/**
* The topic under which the term is categorized.
*/
private readonly topic: string;
/**
* The name of the term being defined.
*/
private readonly title: string;
/**
* A short description of the term. Displayed on the frontend as a tooltip.
*/
private readonly short: string;
/**
* An extended description of the term, shown alongside short on the documentation.
*/
private readonly extended?: string;
constructor(options: {
topic: string;
title: string;
short: string;
extended?: string;
}) {
this.topic = options.topic;
this.title = options.title;
this.short = options.short;
this.extended = options.extended;
}
getTopic(): string {
return this.topic;
}
getTitle(): string {
return this.title;
}
/**
* Returns a formatted display version of the title with underscores replaced by spaces.
*/
getDisplayTitle(): string {
return this.title.replace(/_/g, ' ');
}
/**
* Returns the short description, optionally transformed by a provided translation function.
*/
getShort(t?: (value: string) => string): string {
if (!t) {
return this.short;
}
return t(this.short);
}
getExtended(t?: (value: string) => string): string | undefined {
if (!t) {
return this.extended;
}
if (!this.extended) {
return undefined;
}
return t(this.extended);
}
/**
* Encodes the glossary term into a string format that can be resolved later.
* Format: [GLOSSARY]|topic|title
*/
encode(): string {
return `${GLOSSARY_ENCODING_PREFIX}${this.topic}|${this.title}`;
}
}
export class GlossaryTopic {
private readonly name: string;
private readonly terms: Map<string, GlossaryTerm>;
constructor(name: string, terms: GlossaryTerm[]) {
this.name = name;
this.terms = new Map(terms.map(term => [term.getTitle(), term]));
}
getName(): string {
return this.name;
}
/**
* Returns a formatted display version of the topic name with underscores replaced by spaces.
*/
getDisplayName(): string {
return this.name.replace(/_/g, ' ');
}
getTerm(title: string): GlossaryTerm | undefined {
return this.terms.get(title);
}
getAllTerms(): GlossaryTerm[] {
return Array.from(this.terms.values());
}
}
export class GlossaryMap {
private readonly topics: Map<string, GlossaryTopic>;
constructor(glossary: Glossary) {
const topics = new Map<string, GlossaryTopic>();
Object.entries(glossary).forEach(([topicName, termsByTitle]) => {
const topicTerms = Object.values(termsByTitle);
topics.set(topicName, new GlossaryTopic(topicName, topicTerms));
});
this.topics = topics;
}
getTopic(topicName: string): GlossaryTopic | undefined {
return this.topics.get(topicName);
}
getAllTopics(): GlossaryTopic[] {
return Array.from(this.topics.values());
}
}

View File

@@ -1,63 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
GlossaryMap,
GlossaryTerm,
type GlossaryTopic,
} from './glossaryModels';
import { glossaryDefinition } from './glossary';
/**
* The exported glossary object is a runtime structure where each entry is a GlossaryTerm instance, but the key
* structure mirrors `glossaryDefinition` so IDEs can autocomplete, yet callers can use methods like `getShort()`.
*/
export type Glossary = {
[Topic in keyof typeof glossaryDefinition]: {
[Title in keyof (typeof glossaryDefinition)[Topic]]: GlossaryTerm;
};
};
const glossary: Glossary = Object.fromEntries(
Object.entries(glossaryDefinition).map(([topic, termsByTitle]) => [
topic,
Object.fromEntries(
Object.entries(termsByTitle).map(([title, termStrings]) => [
title,
new GlossaryTerm({
topic,
title,
short: termStrings.short,
extended: termStrings.extended ?? '',
}),
]),
),
]),
) as Glossary;
const glossaryMap = new GlossaryMap(glossary);
export const getAllGlossaryTopics = (): GlossaryTopic[] =>
glossaryMap.getAllTopics();
export const getGlossaryTopic = (
topicName: string,
): GlossaryTopic | undefined => glossaryMap.getTopic(topicName);
export default glossary;

View File

@@ -1,26 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export { GlossaryTerm, GlossaryTopic } from './glossaryModels';
export {
default as glossary,
getAllGlossaryTopics,
getGlossaryTopic,
} from './glossaryUtils';
export { resolveGlossaryString } from './tooltipUtils';

View File

@@ -1,50 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { getGlossaryTopic } from './glossaryUtils';
import { t } from '@superset-ui/core';
export const GLOSSARY_BASE_URL = 'https://superset.apache.org/docs';
// Pattern matches: [GLOSSARY]|topic|title
// Captures: topic and title for lookup in glossary
const GLOSSARY_ENCODING_PATTERN = /^\[GLOSSARY\]\|([^|]+)\|([^|]+)$/;
export const resolveGlossaryString = (
glossaryString: string,
): [string | undefined, string] => {
const encoded = glossaryString.trim();
const match = encoded.match(GLOSSARY_ENCODING_PATTERN);
if (!match) {
return [undefined, encoded];
}
const topic = match[1];
const title = match[2];
// Look up the term from the glossary to get the translated description
const glossaryTopic = getGlossaryTopic(topic);
const term = glossaryTopic?.getTerm(title);
const description = term ? term.getShort(t) : encoded;
const glossaryUrl = buildGlossaryUrl(topic, title);
return [glossaryUrl, description];
};
const buildGlossaryUrl = (topic: string, title: string): string =>
`${GLOSSARY_BASE_URL}/glossary#${encodeURIComponent(`${topic}__${title}`)}`;

View File

@@ -35,4 +35,3 @@ export * from './ui-overrides';
export * from './hooks';
export * from './currency-format';
export * from './time-comparison';
export * from './glossary';

View File

@@ -95,6 +95,7 @@ export default defineConfig({
testIgnore: [
'**/tests/auth/**/*.spec.ts',
'**/tests/sqllab/**/*.spec.ts',
'**/tests/embedded/**/*.spec.ts',
...(process.env.INCLUDE_EXPERIMENTAL ? [] : ['**/experimental/**']),
],
use: {
@@ -132,6 +133,18 @@ export default defineConfig({
// No storageState = clean browser with no cached cookies
},
},
{
// Embedded dashboard tests - validates the full embedding flow:
// external app -> SDK -> iframe -> guest token -> dashboard render
name: 'chromium-embedded',
testMatch: '**/tests/embedded/**/*.spec.ts',
use: {
browserName: 'chromium',
testIdAttribute: 'data-test',
// Uses admin auth for API calls to configure embedding and get guest tokens
storageState: 'playwright/.auth/user.json',
},
},
],
// Web server setup - disabled in CI (Flask started separately in workflow)

View File

@@ -0,0 +1,96 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Embedded Dashboard Test App</title>
<style>
html, body { margin: 0; padding: 0; height: 100%; }
#superset-container { width: 100%; height: 100vh; }
#superset-container iframe { width: 100%; height: 100%; border: none; }
#error { color: red; padding: 20px; display: none; }
#status { padding: 10px; font-family: monospace; font-size: 12px; }
</style>
</head>
<body>
<div id="status">Initializing embedded dashboard...</div>
<div id="error"></div>
<div id="superset-container" data-test="embedded-container"></div>
<script src="/sdk/index.js"></script>
<script>
(async function () {
const params = new URLSearchParams(window.location.search);
const uuid = params.get('uuid');
const supersetDomain = params.get('supersetDomain');
if (!uuid || !supersetDomain) {
document.getElementById('error').style.display = 'block';
document.getElementById('error').textContent =
'Missing required query params: uuid, supersetDomain';
return;
}
const statusEl = document.getElementById('status');
// fetchGuestToken is injected by Playwright via page.exposeFunction()
// Falls back to window.__guestToken for simple/static token injection
async function fetchGuestToken() {
if (typeof window.__fetchGuestToken === 'function') {
statusEl.textContent = 'Fetching guest token...';
const token = await window.__fetchGuestToken();
statusEl.textContent = 'Guest token received, loading dashboard...';
return token;
}
if (window.__guestToken) {
return window.__guestToken;
}
throw new Error('No guest token source available');
}
try {
// Parse optional UI config from query params
const uiConfig = {};
if (params.get('hideTitle') === 'true') uiConfig.hideTitle = true;
if (params.get('hideTab') === 'true') uiConfig.hideTab = true;
if (params.get('hideChartControls') === 'true') uiConfig.hideChartControls = true;
const dashboard = await supersetEmbeddedSdk.embedDashboard({
id: uuid,
supersetDomain: supersetDomain,
mountPoint: document.getElementById('superset-container'),
fetchGuestToken: fetchGuestToken,
dashboardUiConfig: Object.keys(uiConfig).length > 0 ? uiConfig : undefined,
debug: params.get('debug') === 'true',
});
statusEl.textContent = 'Dashboard embedded successfully';
// Expose dashboard API on window for Playwright assertions
window.__embeddedDashboard = dashboard;
} catch (err) {
document.getElementById('error').style.display = 'block';
document.getElementById('error').textContent = 'Embed failed: ' + err.message;
statusEl.textContent = 'Error';
}
})();
</script>
</body>
</html>

View File

@@ -132,26 +132,14 @@ export interface DashboardResult {
published?: boolean;
}
/**
* Get a dashboard by its title
* @param page - Playwright page instance (provides authentication context)
* @param title - The dashboard_title to search for
* @returns Dashboard object if found, null if not found
*/
export async function getDashboardByName(
async function getDashboardByFilter(
page: Page,
title: string,
col: 'dashboard_title' | 'slug',
value: string,
): Promise<DashboardResult | null> {
const filter = {
filters: [
{
col: 'dashboard_title',
opr: 'eq',
value: title,
},
],
};
const queryParam = rison.encode(filter);
const queryParam = rison.encode({
filters: [{ col, opr: 'eq', value }],
});
const response = await apiGet(
page,
`${ENDPOINTS.DASHBOARD}?q=${queryParam}`,
@@ -169,3 +157,29 @@ export async function getDashboardByName(
return null;
}
/**
* Get a dashboard by its title
* @param page - Playwright page instance (provides authentication context)
* @param title - The dashboard_title to search for
* @returns Dashboard object if found, null if not found
*/
export async function getDashboardByName(
page: Page,
title: string,
): Promise<DashboardResult | null> {
return getDashboardByFilter(page, 'dashboard_title', title);
}
/**
* Get a dashboard by its slug
* @param page - Playwright page instance (provides authentication context)
* @param slug - The slug to search for
* @returns Dashboard object if found, null if not found
*/
export async function getDashboardBySlug(
page: Page,
slug: string,
): Promise<DashboardResult | null> {
return getDashboardByFilter(page, 'slug', slug);
}

View File

@@ -0,0 +1,113 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Page } from '@playwright/test';
import { apiPost, apiPut } from './requests';
import { ENDPOINTS as DASHBOARD_ENDPOINTS } from './dashboard';
export const ENDPOINTS = {
SECURITY_LOGIN: 'api/v1/security/login',
GUEST_TOKEN: 'api/v1/security/guest_token/',
} as const;
export interface EmbeddedConfig {
uuid: string;
allowed_domains: string[];
dashboard_id: number;
}
/**
* Enable embedding on a dashboard and return the embedded UUID.
* Uses PUT (upsert) to preserve UUID across repeated calls.
* @param page - Playwright page instance (provides authentication context)
* @param dashboardIdOrSlug - Numeric dashboard id or slug
* @param allowedDomains - Domains allowed to embed; empty array allows all
* @returns Embedded config with UUID, allowed_domains, and dashboard_id
*/
export async function apiEnableEmbedding(
page: Page,
dashboardIdOrSlug: number | string,
allowedDomains: string[] = [],
): Promise<EmbeddedConfig> {
const response = await apiPut(
page,
`${DASHBOARD_ENDPOINTS.DASHBOARD}${dashboardIdOrSlug}/embedded`,
{ allowed_domains: allowedDomains },
);
const body = await response.json();
return body.result as EmbeddedConfig;
}
/**
* Get a guest token for an embedded dashboard.
* Uses the admin login flow (login → access_token → guest_token).
* @param page - Playwright page instance (used for request context)
* @param dashboardId - Dashboard id to grant access to
* @param options - Optional login credentials and RLS rules
* @returns Signed guest token string
*/
export async function getGuestToken(
page: Page,
dashboardId: number | string,
options?: {
username?: string;
password?: string;
rls?: Array<{ dataset: number; clause: string }>;
},
): Promise<string> {
const username = options?.username ?? 'admin';
const password = options?.password ?? 'general';
const rls = options?.rls ?? [];
// Step 1: Login to get access token
const loginResponse = await apiPost(
page,
ENDPOINTS.SECURITY_LOGIN,
{
username,
password,
provider: 'db',
refresh: true,
},
{ allowMissingCsrf: true },
);
const loginBody = await loginResponse.json();
const accessToken = loginBody.access_token;
// Step 2: Fetch guest token using the access token.
// Uses raw page.request.post() (not apiPost) because the guest token endpoint
// requires a JWT Bearer token rather than session+CSRF auth.
const guestResponse = await page.request.post(ENDPOINTS.GUEST_TOKEN, {
data: {
user: {
username: 'embedded_test_user',
first_name: 'Embedded',
last_name: 'TestUser',
},
resources: [{ type: 'dashboard', id: String(dashboardId) }],
rls,
},
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${accessToken}`,
},
});
const guestBody = await guestResponse.json();
return guestBody.token;
}

View File

@@ -0,0 +1,140 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Page, FrameLocator } from '@playwright/test';
import { EMBEDDED } from '../utils/constants';
/**
* Page object for the embedded dashboard test app.
*
* The test app runs on a separate origin (localhost:9000) and uses the
* @superset-ui/embedded-sdk to render a Superset dashboard in an iframe.
* Playwright's page.exposeFunction() bridges the guest token from Node.js
* into the browser page.
*/
export class EmbeddedPage {
private readonly page: Page;
private static readonly SELECTORS = {
CONTAINER: '[data-test="embedded-container"]',
IFRAME: 'iframe[title="Embedded Dashboard"]',
STATUS: '#status',
ERROR: '#error',
} as const;
constructor(page: Page) {
this.page = page;
}
/**
* Set up the guest token bridge before navigating.
* Must be called BEFORE goto() since embedDashboard() calls fetchGuestToken
* immediately on page load.
*/
async exposeTokenFetcher(tokenFn: () => Promise<string>): Promise<void> {
await this.page.exposeFunction('__fetchGuestToken', tokenFn);
}
/**
* Navigate to the embedded test app with the given parameters.
*/
async goto(params: {
uuid: string;
supersetDomain: string;
hideTitle?: boolean;
hideTab?: boolean;
hideChartControls?: boolean;
debug?: boolean;
}): Promise<void> {
const searchParams = new URLSearchParams({
uuid: params.uuid,
supersetDomain: params.supersetDomain,
});
if (params.hideTitle) searchParams.set('hideTitle', 'true');
if (params.hideTab) searchParams.set('hideTab', 'true');
if (params.hideChartControls) searchParams.set('hideChartControls', 'true');
if (params.debug) searchParams.set('debug', 'true');
await this.page.goto(`${EMBEDDED.APP_URL}/?${searchParams.toString()}`);
}
/**
* FrameLocator for the embedded dashboard iframe.
*/
get iframe(): FrameLocator {
return this.page.frameLocator(EmbeddedPage.SELECTORS.IFRAME);
}
/**
* Wait for the iframe to appear in the DOM.
*/
async waitForIframe(options?: { timeout?: number }): Promise<void> {
await this.page.locator(EmbeddedPage.SELECTORS.IFRAME).waitFor({
state: 'attached',
timeout: options?.timeout ?? EMBEDDED.IFRAME_LOAD,
});
}
/**
* Wait for dashboard content to render inside the iframe.
* Looks for the grid-container which indicates charts are loading/loaded.
*/
async waitForDashboardContent(options?: { timeout?: number }): Promise<void> {
const frame = this.iframe;
await frame
.locator('.grid-container, [data-test="grid-container"]')
.first()
.waitFor({
state: 'visible',
timeout: options?.timeout ?? EMBEDDED.DASHBOARD_RENDER,
});
}
/**
* Get the status text from the test app.
*/
async getStatus(): Promise<string> {
return (
(await this.page.locator(EmbeddedPage.SELECTORS.STATUS).textContent()) ??
''
);
}
/**
* Get the error text, if any.
*/
async getError(): Promise<string> {
const errorEl = this.page.locator(EmbeddedPage.SELECTORS.ERROR);
const display = await errorEl.evaluate(el => getComputedStyle(el).display);
if (display === 'none') return '';
return (await errorEl.textContent()) ?? '';
}
/**
* Check if the dashboard title is visible inside the iframe.
*/
async isTitleVisible(): Promise<boolean> {
const frame = this.iframe;
return frame
.locator(
'[data-test="dashboard-header-container"] [data-test="editable-title-input"]',
)
.isVisible();
}
}

View File

@@ -0,0 +1,288 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { test, expect, Browser, BrowserContext, Page } from '@playwright/test';
import { createServer, IncomingMessage, ServerResponse, Server } from 'http';
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
import { apiEnableEmbedding, getGuestToken } from '../../helpers/api/embedded';
import { getDashboardBySlug } from '../../helpers/api/dashboard';
import { EmbeddedPage } from '../../pages/EmbeddedPage';
import { EMBEDDED } from '../../utils/constants';
/**
* Superset domain (Flask server) — set by CI or defaults to local dev
*/
const SUPERSET_DOMAIN = (() => {
const url = process.env.PLAYWRIGHT_BASE_URL || 'http://localhost:8088';
return url.replace(/\/+$/, '');
})();
const SUPERSET_BASE_URL = SUPERSET_DOMAIN.endsWith('/')
? SUPERSET_DOMAIN
: `${SUPERSET_DOMAIN}/`;
/**
* Path to the SDK bundle built from superset-embedded-sdk/
*/
const SDK_BUNDLE_PATH = join(
__dirname,
'../../../../superset-embedded-sdk/bundle/index.js',
);
/**
* Path to the embedded test app static files
*/
const EMBED_APP_DIR = join(__dirname, '../../embedded-app');
/**
* Create a minimal static file server for the embedded test app.
* Serves only a fixed allowlist of routes — the test app references just
* its index.html and the SDK bundle, so anything else is 404.
*/
const INDEX_HTML_PATH = join(EMBED_APP_DIR, 'index.html');
function createEmbedAppServer(): Server {
return createServer((req: IncomingMessage, res: ServerResponse) => {
const urlPath = req.url?.split('?')[0] || '/';
if (urlPath === '/sdk/index.js') {
if (!existsSync(SDK_BUNDLE_PATH)) {
res.writeHead(404);
res.end(
'SDK bundle not found. Run: cd superset-embedded-sdk && npm ci && npm run build',
);
return;
}
res.writeHead(200, { 'Content-Type': 'text/javascript' });
res.end(readFileSync(SDK_BUNDLE_PATH));
return;
}
if (urlPath === '/' || urlPath === '/index.html') {
res.writeHead(200, { 'Content-Type': 'text/html' });
res.end(readFileSync(INDEX_HTML_PATH));
return;
}
res.writeHead(404);
res.end('Not found');
});
}
/**
* Create a browser context authenticated as admin for API-only work
* (enabling embedding, restoring config). Caller is responsible for closing.
*/
function createAdminContext(browser: Browser): Promise<BrowserContext> {
return browser.newContext({
storageState: 'playwright/.auth/user.json',
baseURL: SUPERSET_BASE_URL,
});
}
// ─── Test Suite ────────────────────────────────────────────────────────────
// Describe wrapper is needed for shared server state and serial execution:
// all tests share a static file server on a fixed port and must not run in parallel.
test.describe('Embedded Dashboard E2E', () => {
test.describe.configure({ mode: 'serial' });
let server: Server;
let embedUuid: string;
let dashboardId: number;
/**
* Set up a page to render the default embedded dashboard.
* Tests that need a different UUID or UI config should not use this helper.
*/
async function setupEmbeddedPage(page: Page): Promise<EmbeddedPage> {
const embeddedPage = new EmbeddedPage(page);
await embeddedPage.exposeTokenFetcher(async () =>
getGuestToken(page, dashboardId),
);
await embeddedPage.goto({
uuid: embedUuid,
supersetDomain: SUPERSET_DOMAIN,
});
await embeddedPage.waitForIframe();
await embeddedPage.waitForDashboardContent();
return embeddedPage;
}
test.beforeAll(async ({ browser }) => {
// Skip all tests if the SDK bundle hasn't been built
test.skip(
!existsSync(SDK_BUNDLE_PATH),
'Embedded SDK bundle not found. Build it with: cd superset-embedded-sdk && npm ci && npm run build',
);
// Start the embedded test app server
server = createEmbedAppServer();
await new Promise<void>((resolve, reject) => {
server.on('error', reject);
server.listen(EMBEDDED.APP_PORT, () => resolve());
});
// Use a fresh context with auth to set up test data via API
const context = await createAdminContext(browser);
const setupPage = await context.newPage();
try {
// Find a well-known example dashboard
const dashboard = await getDashboardBySlug(setupPage, 'world_health');
if (!dashboard) {
throw new Error(
'Dashboard "world_health" not found. Ensure load_examples ran in CI setup.',
);
}
dashboardId = dashboard.id;
// Enable embedding on the dashboard (empty allowed_domains = allow all)
const embedded = await apiEnableEmbedding(setupPage, dashboardId);
embedUuid = embedded.uuid;
} finally {
await context.close();
}
});
test.afterAll(async () => {
if (server) {
await new Promise<void>(resolve => server.close(() => resolve()));
}
});
test('dashboard renders in embedded iframe', async ({ page }) => {
const embeddedPage = await setupEmbeddedPage(page);
// Verify the iframe src points to Superset's /embedded/ endpoint
const iframeSrc = await page
.locator('iframe[title="Embedded Dashboard"]')
.getAttribute('src');
expect(iframeSrc).toContain(`/embedded/${embedUuid}`);
// Verify no errors in the test app
const error = await embeddedPage.getError();
expect(error).toBe('');
// Baseline: title should be visible when hideTitle is not set
const titleVisible = await embeddedPage.isTitleVisible();
expect(titleVisible).toBe(true);
});
test('UI config hideTitle hides dashboard title', async ({ page }) => {
const embeddedPage = new EmbeddedPage(page);
await embeddedPage.exposeTokenFetcher(async () =>
getGuestToken(page, dashboardId),
);
await embeddedPage.goto({
uuid: embedUuid,
supersetDomain: SUPERSET_DOMAIN,
hideTitle: true,
});
await embeddedPage.waitForIframe();
await embeddedPage.waitForDashboardContent();
// The iframe URL should include uiConfig parameter
const iframeSrc = await page
.locator('iframe[title="Embedded Dashboard"]')
.getAttribute('src');
expect(iframeSrc).toContain('uiConfig=');
// Verify the title is actually hidden inside the iframe
const titleVisible = await embeddedPage.isTitleVisible();
expect(titleVisible).toBe(false);
});
test('charts render inside embedded iframe', async ({ page }) => {
const embeddedPage = await setupEmbeddedPage(page);
// Verify chart containers are present and visible in the iframe
const charts = embeddedPage.iframe.locator(
'.chart-container, [data-test="chart-container"]',
);
await expect(charts.first()).toBeVisible({
timeout: EMBEDDED.DASHBOARD_RENDER,
});
});
test('allowed_domains blocks unauthorized referrer', async ({
page,
browser,
}) => {
const context = await createAdminContext(browser);
const setupPage = await context.newPage();
try {
// Restrict to a domain that is NOT localhost:9000
const restrictedEmbed = await apiEnableEmbedding(setupPage, dashboardId, [
'https://allowed.example.com',
]);
const embeddedPage = new EmbeddedPage(page);
await embeddedPage.exposeTokenFetcher(async () =>
getGuestToken(page, dashboardId),
);
await embeddedPage.goto({
uuid: restrictedEmbed.uuid,
supersetDomain: SUPERSET_DOMAIN,
});
// The iframe should load but get a 403 from Superset's referrer check
await embeddedPage.waitForIframe();
// The dashboard content should NOT render (403 blocks the embedded page)
const content = embeddedPage.iframe.locator(
'.grid-container, [data-test="grid-container"]',
);
await expect(content).not.toBeVisible({ timeout: 5000 });
} finally {
// Restore the open embedding config for other tests
await apiEnableEmbedding(setupPage, dashboardId, []);
await context.close();
}
});
test('guest token enables dashboard data access', async ({ page }) => {
const embeddedPage = new EmbeddedPage(page);
let tokenCallCount = 0;
await embeddedPage.exposeTokenFetcher(async () => {
tokenCallCount += 1;
return getGuestToken(page, dashboardId);
});
await embeddedPage.goto({
uuid: embedUuid,
supersetDomain: SUPERSET_DOMAIN,
});
await embeddedPage.waitForIframe();
await embeddedPage.waitForDashboardContent();
// The SDK should have called fetchGuestToken at least once
expect(tokenCallCount).toBeGreaterThanOrEqual(1);
// Verify charts are actually rendering data (not just loading spinners)
const charts = embeddedPage.iframe.locator(
'.chart-container, [data-test="chart-container"]',
);
const chartCount = await charts.count();
expect(chartCount).toBeGreaterThan(0);
});
});

View File

@@ -75,3 +75,18 @@ export const TIMEOUT = {
*/
SLOW_TEST: 60000, // 60s for tests that chain multiple slow operations
} as const;
/**
* Embedded dashboard test app configuration.
* The test app is served by a Node.js http server started in the test fixture.
*/
export const EMBEDDED = {
/** Port for the embedded test app static server */
APP_PORT: 9000,
/** Full URL for the embedded test app */
APP_URL: 'http://localhost:9000',
/** Timeout for iframe to appear in the DOM */
IFRAME_LOAD: 15000, // 15s
/** Timeout for dashboard content to render inside the iframe */
DASHBOARD_RENDER: 30000, // 30s
} as const;

View File

@@ -28,9 +28,6 @@ import {
getStandardizedControls,
} from '@superset-ui/chart-controls';
import OptionDescription from './OptionDescription';
import { glossary } from '@superset-ui/core';
const TIME_SHIFT_DESCRIPTION = glossary.Advanced_Analytics.Time_Shift.encode();
const config: ControlPanelConfig = {
controlPanelSections: [
@@ -324,7 +321,12 @@ const config: ControlPanelConfig = {
['156 weeks', t('156 weeks')],
['3 years', t('3 years')],
],
description: TIME_SHIFT_DESCRIPTION,
description: t(
'Overlay one or more timeseries from a ' +
'relative time period. Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported.',
),
},
},
{

View File

@@ -26,9 +26,6 @@ import {
sections,
getStandardizedControls,
} from '@superset-ui/chart-controls';
import { glossary } from '@superset-ui/core';
const TIME_SHIFT_DESCRIPTION = glossary.Advanced_Analytics.Time_Shift.encode();
const config: ControlPanelConfig = {
controlPanelSections: [
@@ -207,7 +204,12 @@ const config: ControlPanelConfig = {
['156 weeks', t('156 weeks')],
['3 years', t('3 years')],
],
description: TIME_SHIFT_DESCRIPTION,
description: t(
'Overlay one or more timeseries from a ' +
'relative time period. Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported.',
),
},
},
{

View File

@@ -28,10 +28,6 @@ import {
D3_FORMAT_OPTIONS,
} from '@superset-ui/chart-controls';
import { glossary } from '@superset-ui/core';
const TIME_SHIFT_DESCRIPTION = glossary.Advanced_Analytics.Time_Shift.encode();
/*
Plugins in question:
@@ -476,7 +472,12 @@ export const timeSeriesSection: ControlPanelSectionConfig[] = [
['156 weeks', t('156 weeks')],
['3 years', t('3 years')],
],
description: TIME_SHIFT_DESCRIPTION,
description: t(
'Overlay one or more timeseries from a ' +
'relative time period. Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported.',
),
},
},
],

View File

@@ -21,6 +21,7 @@ import { t } from '@apache-superset/core/translation';
import { css, useTheme, SupersetTheme } from '@apache-superset/core/theme';
import { FormLabel, InfoTooltip, Tooltip } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
type ValidationError = string;
export type ControlHeaderProps = {
@@ -92,8 +93,15 @@ const ControlHeader: FC<ControlHeaderProps> = ({
>
{description && (
<span>
<Tooltip title={description}>
<Icons.InfoCircleOutlined css={iconStyles} />
<Tooltip
id="description-tooltip"
title={description}
placement="top"
>
<Icons.InfoCircleOutlined
css={iconStyles}
onClick={tooltipOnClick}
/>
</Tooltip>{' '}
</span>
)}

View File

@@ -22,10 +22,6 @@ import {
ControlSubSectionHeader,
} from '@superset-ui/chart-controls';
import { glossary } from '@superset-ui/core';
const TIME_SHIFT_DESCRIPTION = glossary.Advanced_Analytics.Time_Shift.encode();
export const datasourceAndVizType: ControlPanelSectionConfig = {
controlSetRows: [
['datasource'],
@@ -207,7 +203,12 @@ export const NVD3TimeSeries: ControlPanelSectionConfig[] = [
['156 weeks', t('156 weeks')],
['3 years', t('3 years')],
],
description: TIME_SHIFT_DESCRIPTION,
description: t(
'Overlay one or more timeseries from a ' +
'relative time period. Expects relative time deltas ' +
'in natural language (example: 24 hours, 7 days, ' +
'52 weeks, 365 days). Free text is supported.',
),
},
},
{

View File

@@ -22,7 +22,6 @@ from datetime import datetime
from pprint import pformat
from typing import Any, NamedTuple, TYPE_CHECKING
from flask import g
from flask_babel import gettext as _
from jinja2.exceptions import TemplateError
from pandas import DataFrame
@@ -38,6 +37,7 @@ from superset.extensions import event_logger
from superset.sql.parse import sanitize_clause, transpile_to_dialect
from superset.superset_typing import Column, Metric, OrderBy, QueryObjectDict
from superset.utils import json, pandas_postprocessing
from superset.utils.cache_keys import add_impersonation_cache_key_if_needed
from superset.utils.core import (
DTTM_ALIAS,
find_duplicates,
@@ -479,24 +479,7 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
# or if the CACHE_QUERY_BY_USER flag is on or per_user_caching is enabled on
# the database
try:
database = self.datasource.database # type: ignore
extra = json.loads(database.extra or "{}")
if (
(
feature_flag_manager.is_feature_enabled("CACHE_IMPERSONATION")
and database.impersonate_user
)
or feature_flag_manager.is_feature_enabled("CACHE_QUERY_BY_USER")
or extra.get("per_user_caching", False)
):
if key := database.db_engine_spec.get_impersonation_key(
getattr(g, "user", None)
):
logger.debug(
"Adding impersonation key to QueryObject cache dict: %s", key
)
cache_dict["impersonation_key"] = key
add_impersonation_cache_key_if_needed(self.datasource.database, cache_dict) # type: ignore
except AttributeError:
# datasource or database do not exist
pass

View File

@@ -0,0 +1,54 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import Any, TYPE_CHECKING
from flask import g
from superset import feature_flag_manager
from superset.utils.json import loads as json_loads
if TYPE_CHECKING:
from superset.models.core import Database
logger = logging.getLogger(__name__)
def add_impersonation_cache_key_if_needed(
database: Database,
cache_dict: dict[str, Any],
) -> None:
"""
Add a per-user cache-key when the DB connection is configured for
per-user caching, no-op otherwise.
"""
extra = json_loads(database.extra or "{}")
if (
(
feature_flag_manager.is_feature_enabled("CACHE_IMPERSONATION")
and database.impersonate_user
)
or feature_flag_manager.is_feature_enabled("CACHE_QUERY_BY_USER")
or extra.get("per_user_caching", False)
):
if key := database.db_engine_spec.get_impersonation_key(
getattr(g, "user", None)
):
logger.debug("Adding impersonation key to cache dict: %s", key)
cache_dict["impersonation_key"] = key

View File

@@ -65,6 +65,7 @@ from superset.superset_typing import (
)
from superset.utils import core as utils, csv, json
from superset.utils.cache import set_and_log_cache
from superset.utils.cache_keys import add_impersonation_cache_key_if_needed
from superset.utils.core import (
apply_max_row_limit,
DateColumn,
@@ -472,6 +473,16 @@ class BaseViz: # pylint: disable=too-many-public-methods
cache_dict["extra_cache_keys"] = self.datasource.get_extra_cache_keys(query_obj)
cache_dict["rls"] = security_manager.get_rls_cache_key(self.datasource)
cache_dict["changed_on"] = self.datasource.changed_on
# Add an impersonation key to cache if impersonation is enabled on the db
# or if the CACHE_QUERY_BY_USER flag is on or per_user_caching is enabled on
# the database
try:
add_impersonation_cache_key_if_needed(self.datasource.database, cache_dict)
except AttributeError:
# datasource or database do not exist
pass
json_data = self.json_dumps(cache_dict, sort_keys=True)
return hash_from_str(json_data)

View File

@@ -78,6 +78,15 @@ FEATURE_FLAGS = {
WEBDRIVER_BASEURL = "http://0.0.0.0:8081/"
# Enable CORS for embedded dashboard E2E tests (test app on port 9000)
ENABLE_CORS = True
CORS_OPTIONS: dict = {
"origins": [
"http://localhost:9000",
],
"supports_credentials": True,
}
def GET_FEATURE_FLAGS_FUNC(ff): # noqa: N802
ff_copy = copy(ff)
@@ -86,6 +95,7 @@ def GET_FEATURE_FLAGS_FUNC(ff): # noqa: N802
TESTING = True
TALISMAN_ENABLED = False
WTF_CSRF_ENABLED = False
FAB_ROLES = {"TestRole": [["Security", "menu_access"], ["List Users", "menu_access"]]}

View File

@@ -95,7 +95,7 @@ def test_cache_key_changes_for_new_query_object_same_params():
assert query_object2.cache_key() == cache_key1
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_key_cache_query_by_user_on_no_datasource(feature_flag_mock):
"""
When CACHE_QUERY_BY_USER flag is on and there is no datasource,
@@ -112,7 +112,7 @@ def test_cache_key_cache_query_by_user_on_no_datasource(feature_flag_mock):
assert query_object.cache_key() == cache_key
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_query_by_user_on_no_user(logger_mock, feature_flag_mock):
"""
@@ -140,16 +140,13 @@ def test_cache_key_cache_query_by_user_on_no_user(logger_mock, feature_flag_mock
logger_mock.debug.assert_called()
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.common.query_object.logger")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.utils.cache_keys.logger")
def test_cache_key_cache_query_by_user_on_with_user(logger_mock, feature_flag_mock):
"""
When the same user is requesting a cache key with CACHE_QUERY_BY_USER
flag on, the key will be the same
"""
# Configure logger to enable DEBUG level for isEnabledFor check
logger_mock.isEnabledFor.return_value = True
datasource = SqlaTable(
table_name="test_table",
columns=[],
@@ -167,17 +164,17 @@ def test_cache_key_cache_query_by_user_on_with_user(logger_mock, feature_flag_mo
cache_key1 = query_object.cache_key()
assert query_object.cache_key() == cache_key1
# Should have both impersonation and cache key generation logs
# Should have impersonation log emitted by the cache_keys helper
logger_mock.debug.assert_has_calls(
[
call("Adding impersonation key to QueryObject cache dict: %s", "test_user"),
call("Adding impersonation key to cache dict: %s", "test_user"),
],
any_order=True,
)
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.common.query_object.logger")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.utils.cache_keys.logger")
def test_cache_key_cache_query_by_user_on_with_different_user(
logger_mock, feature_flag_mock
):
@@ -185,9 +182,6 @@ def test_cache_key_cache_query_by_user_on_with_different_user(
When two different users are requesting a cache key with CACHE_QUERY_BY_USER
flag on, the key will be different
"""
# Configure logger to enable DEBUG level for isEnabledFor check
logger_mock.isEnabledFor.return_value = True
datasource = SqlaTable(
table_name="test_table",
columns=[],
@@ -209,21 +203,17 @@ def test_cache_key_cache_query_by_user_on_with_different_user(
assert cache_key1 != cache_key2
# Should have both impersonation and cache key generation logs (any order)
# Should have impersonation logs emitted by the cache_keys helper
logger_mock.debug.assert_has_calls(
[
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user1"
),
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user2"
),
call("Adding impersonation key to cache dict: %s", "test_user1"),
call("Adding impersonation key to cache dict: %s", "test_user2"),
],
any_order=True,
)
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_impersonation_on_no_user(logger_mock, feature_flag_mock):
"""
@@ -251,7 +241,7 @@ def test_cache_key_cache_impersonation_on_no_user(logger_mock, feature_flag_mock
logger_mock.debug.assert_called()
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_impersonation_on_with_user(logger_mock, feature_flag_mock):
"""
@@ -290,7 +280,7 @@ def test_cache_key_cache_impersonation_on_with_user(logger_mock, feature_flag_mo
assert len(impersonation_calls) == 0
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_impersonation_on_with_different_user(
logger_mock, feature_flag_mock
@@ -335,8 +325,8 @@ def test_cache_key_cache_impersonation_on_with_different_user(
assert len(impersonation_calls) == 0
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.common.query_object.logger")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.utils.cache_keys.logger")
def test_cache_key_cache_impersonation_on_with_different_user_and_db_impersonation(
logger_mock,
feature_flag_mock,
@@ -346,9 +336,6 @@ def test_cache_key_cache_impersonation_on_with_different_user_and_db_impersonati
flag on, and cache_impersonation is enabled on the database,
the keys will be different
"""
# Configure logger to enable DEBUG level for isEnabledFor check
logger_mock.isEnabledFor.return_value = True
datasource = SqlaTable(
table_name="test_table",
columns=[],
@@ -374,15 +361,11 @@ def test_cache_key_cache_impersonation_on_with_different_user_and_db_impersonati
assert cache_key1 != cache_key2
# Should have both impersonation and cache key generation logs (any order)
# Should have impersonation logs emitted by the cache_keys helper
logger_mock.debug.assert_has_calls(
[
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user1"
),
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user2"
),
call("Adding impersonation key to cache dict: %s", "test_user1"),
call("Adding impersonation key to cache dict: %s", "test_user2"),
],
any_order=True,
)

View File

@@ -0,0 +1,111 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Behavioral tests for ``viz.BaseViz.cache_key`` covering per-user cache-key
inclusion.
"""
from typing import Any
from unittest.mock import patch
from flask_appbuilder.security.sqla.models import User
from superset import viz
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.utils.core import override_user
QUERY_OBJ: dict[str, Any] = {"row_limit": 100, "from_dttm": None, "to_dttm": None}
def _viz_for(database: Database) -> viz.BaseViz:
datasource = SqlaTable(
table_name="t",
columns=[],
metrics=[],
main_dttm_col=None,
database=database,
)
return viz.BaseViz(datasource=datasource, form_data={"viz_type": "table"})
def test_no_per_user_opt_in_keys_match_across_users():
"""
Without any per-user caching opt-in, two different users on the same
database/query must produce the *same* cache key (regression guard — we
must not accidentally make every cache key per-user).
"""
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
obj = _viz_for(database)
with override_user(User(username="alice")):
key_a = obj.cache_key(QUERY_OBJ)
with override_user(User(username="bob")):
key_b = obj.cache_key(QUERY_OBJ)
assert key_a == key_b
def test_per_user_caching_in_extra_yields_distinct_keys_per_user():
"""
With ``per_user_caching: true`` set on the database, two different users
must produce *different* cache keys for the same query.
"""
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
obj = _viz_for(database)
with override_user(User(username="alice")):
key_a = obj.cache_key(QUERY_OBJ)
with override_user(User(username="bob")):
key_b = obj.cache_key(QUERY_OBJ)
assert key_a != key_b
def test_same_user_same_query_idempotent():
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
obj = _viz_for(database)
with override_user(User(username="alice")):
assert obj.cache_key(QUERY_OBJ) == obj.cache_key(QUERY_OBJ)
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_query_by_user_flag_yields_distinct_keys(feature_flag_mock):
"""
Global ``CACHE_QUERY_BY_USER`` flag also reaches the legacy viz path.
"""
feature_flag_mock.is_feature_enabled.side_effect = (
lambda feature=None: feature == "CACHE_QUERY_BY_USER"
)
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
obj = _viz_for(database)
with override_user(User(username="alice")):
key_a = obj.cache_key(QUERY_OBJ)
with override_user(User(username="bob")):
key_b = obj.cache_key(QUERY_OBJ)
assert key_a != key_b

View File

@@ -0,0 +1,107 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any
from unittest.mock import patch
from flask_appbuilder.security.sqla.models import User
from superset.models.core import Database
from superset.utils.cache_keys import add_impersonation_cache_key_if_needed
from superset.utils.core import override_user
def _flag(name: str):
"""Build a feature-flag side_effect that returns True only for ``name``."""
def side_effect(feature=None):
return feature == name
return side_effect
def _run(database: Database) -> dict[str, Any]:
"""Run the helper against a fresh dict and return that dict."""
cache_dict: dict[str, Any] = {}
add_impersonation_cache_key_if_needed(database, cache_dict)
return cache_dict
def test_no_per_user_caching_yields_no_key():
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
with override_user(User(username="u")):
assert "impersonation_key" not in _run(database)
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_query_by_user_adds_username(feature_flag_mock):
feature_flag_mock.is_feature_enabled.side_effect = _flag("CACHE_QUERY_BY_USER")
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
with override_user(User(username="alice")):
assert _run(database)["impersonation_key"] == "alice"
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_query_by_user_distinct_per_user(feature_flag_mock):
feature_flag_mock.is_feature_enabled.side_effect = _flag("CACHE_QUERY_BY_USER")
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
with override_user(User(username="alice")):
key_a = _run(database)["impersonation_key"]
with override_user(User(username="bob")):
key_b = _run(database)["impersonation_key"]
assert key_a != key_b
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_impersonation_requires_database_flag(feature_flag_mock):
"""
CACHE_IMPERSONATION alone is not enough; ``database.impersonate_user`` must
also be set on the database for the per-user key to apply.
"""
feature_flag_mock.is_feature_enabled.side_effect = _flag("CACHE_IMPERSONATION")
db_no_impersonation = Database(database_name="d", sqlalchemy_uri="sqlite://")
db_with_impersonation = Database(
database_name="d", sqlalchemy_uri="sqlite://", impersonate_user=True
)
with override_user(User(username="alice")):
assert "impersonation_key" not in _run(db_no_impersonation)
assert _run(db_with_impersonation)["impersonation_key"] == "alice"
def test_per_user_caching_in_extra_json_enables_key():
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
with override_user(User(username="alice")):
assert _run(database)["impersonation_key"] == "alice"
def test_no_user_yields_no_key(app_context): # noqa: ARG001
"""
With no logged-in user, the engine spec returns None even when per-user
caching is enabled — there's no identity to key on.
"""
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
# No override_user — g.user is unset
assert "impersonation_key" not in _run(database)