Compare commits

...

5 Commits

Author SHA1 Message Date
Joe Li
f2d05d10ff fix(embedded-e2e): use route allowlist in static test server
The test app server only ever serves /, /index.html, and /sdk/index.js,
so replace dynamic path joining with a fixed allowlist. This eliminates
the data flow from req.url to readFileSync that CodeQL flagged as a
path-traversal sink — the previous resolve+startsWith containment check
was correct but not recognized as a sanitizer by the analyzer.
2026-04-30 19:27:59 -07:00
Joe Li
6669b186d7 ci(embedded-e2e): build SDK and configure test environment
- Add a build-embedded-sdk step to bashlib.sh and wire it into the
  superset-playwright and superset-e2e workflows so the SDK bundle is
  compiled before Playwright runs.
- Set SUPERSET_FEATURE_EMBEDDED_SUPERSET=true via workflow env so the
  feature flag only affects Playwright jobs. Setting it in the shared
  integration test config breaks unrelated Python tests because the
  security manager's guest-user paths access g.user through paths that
  most tests don't mock.
- Add CORS for localhost:9000 and TALISMAN_ENABLED=False to the
  integration test config. Talisman defaults to X-Frame-Options:
  SAMEORIGIN, which blocks the embedded dashboard from rendering
  inside an iframe hosted on a different port.
2026-04-30 19:27:47 -07:00
Joe Li
ca9eeec59d feat(embedded-e2e): add Playwright E2E tests for embedded dashboards
Adds five tests covering the embedded dashboard flow against the
world_health example: render, hideTitle UI config, chart rendering,
allowed_domains referrer check, and guest-token data access. Includes:

- A chromium-embedded Playwright project, excluded from the main
  project via testIgnore so it can be opted into separately.
- An EmbeddedPage page object and API helpers for embedding/guest
  tokens plus dashboard lookup by slug.
- A static test app (embedded-app/index.html) loaded from a minimal
  Node static server. Playwright bridges the guest-token fetch from
  Node into the browser via page.exposeFunction.
- EMBEDDED timeout/config constants.

Workflow integration and test-environment configuration land in a
follow-up commit.
2026-04-30 19:27:33 -07:00
Vitor Avila
86eb6176d1 fix: Enforce per-user caching on legacy API endpoint (#39789) 2026-04-30 18:04:33 -03:00
Joe Li
4244ae87bf fix(deps): regenerate pinned requirements for psycopg2-binary 2.9.12 (#39790)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-30 17:46:23 -03:00
18 changed files with 1033 additions and 74 deletions

View File

@@ -59,6 +59,15 @@ build-assets() {
say "::endgroup::"
}
build-embedded-sdk() {
cd "$GITHUB_WORKSPACE/superset-embedded-sdk"
say "::group::Build embedded SDK bundle for E2E tests"
npm ci
npm run build
say "::endgroup::"
}
build-instrumented-assets() {
cd "$GITHUB_WORKSPACE/superset-frontend"

View File

@@ -169,6 +169,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
SUPERSET_FEATURE_EMBEDDED_SUPERSET: "true"
services:
postgres:
image: postgres:17-alpine
@@ -239,6 +240,11 @@ jobs:
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Build embedded SDK
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: build-embedded-sdk
- name: Install Playwright
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies

View File

@@ -43,6 +43,7 @@ jobs:
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
GITHUB_TOKEN: ${{ github.token }}
SUPERSET_FEATURE_EMBEDDED_SUPERSET: "true"
services:
postgres:
image: postgres:17-alpine
@@ -113,6 +114,11 @@ jobs:
uses: ./.github/actions/cached-dependencies
with:
run: build-instrumented-assets
- name: Build embedded SDK
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies
with:
run: build-embedded-sdk
- name: Install Playwright
if: steps.check.outputs.python || steps.check.outputs.frontend
uses: ./.github/actions/cached-dependencies

View File

@@ -707,7 +707,7 @@ protobuf==4.25.8
# proto-plus
psutil==6.1.0
# via apache-superset
psycopg2-binary==2.9.9
psycopg2-binary==2.9.12
# via apache-superset
py-key-value-aio==0.4.4
# via fastmcp

View File

@@ -95,6 +95,7 @@ export default defineConfig({
testIgnore: [
'**/tests/auth/**/*.spec.ts',
'**/tests/sqllab/**/*.spec.ts',
'**/tests/embedded/**/*.spec.ts',
...(process.env.INCLUDE_EXPERIMENTAL ? [] : ['**/experimental/**']),
],
use: {
@@ -132,6 +133,18 @@ export default defineConfig({
// No storageState = clean browser with no cached cookies
},
},
{
// Embedded dashboard tests - validates the full embedding flow:
// external app -> SDK -> iframe -> guest token -> dashboard render
name: 'chromium-embedded',
testMatch: '**/tests/embedded/**/*.spec.ts',
use: {
browserName: 'chromium',
testIdAttribute: 'data-test',
// Uses admin auth for API calls to configure embedding and get guest tokens
storageState: 'playwright/.auth/user.json',
},
},
],
// Web server setup - disabled in CI (Flask started separately in workflow)

View File

@@ -0,0 +1,96 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Embedded Dashboard Test App</title>
<style>
html, body { margin: 0; padding: 0; height: 100%; }
#superset-container { width: 100%; height: 100vh; }
#superset-container iframe { width: 100%; height: 100%; border: none; }
#error { color: red; padding: 20px; display: none; }
#status { padding: 10px; font-family: monospace; font-size: 12px; }
</style>
</head>
<body>
<div id="status">Initializing embedded dashboard...</div>
<div id="error"></div>
<div id="superset-container" data-test="embedded-container"></div>
<script src="/sdk/index.js"></script>
<script>
(async function () {
const params = new URLSearchParams(window.location.search);
const uuid = params.get('uuid');
const supersetDomain = params.get('supersetDomain');
if (!uuid || !supersetDomain) {
document.getElementById('error').style.display = 'block';
document.getElementById('error').textContent =
'Missing required query params: uuid, supersetDomain';
return;
}
const statusEl = document.getElementById('status');
// fetchGuestToken is injected by Playwright via page.exposeFunction()
// Falls back to window.__guestToken for simple/static token injection
async function fetchGuestToken() {
if (typeof window.__fetchGuestToken === 'function') {
statusEl.textContent = 'Fetching guest token...';
const token = await window.__fetchGuestToken();
statusEl.textContent = 'Guest token received, loading dashboard...';
return token;
}
if (window.__guestToken) {
return window.__guestToken;
}
throw new Error('No guest token source available');
}
try {
// Parse optional UI config from query params
const uiConfig = {};
if (params.get('hideTitle') === 'true') uiConfig.hideTitle = true;
if (params.get('hideTab') === 'true') uiConfig.hideTab = true;
if (params.get('hideChartControls') === 'true') uiConfig.hideChartControls = true;
const dashboard = await supersetEmbeddedSdk.embedDashboard({
id: uuid,
supersetDomain: supersetDomain,
mountPoint: document.getElementById('superset-container'),
fetchGuestToken: fetchGuestToken,
dashboardUiConfig: Object.keys(uiConfig).length > 0 ? uiConfig : undefined,
debug: params.get('debug') === 'true',
});
statusEl.textContent = 'Dashboard embedded successfully';
// Expose dashboard API on window for Playwright assertions
window.__embeddedDashboard = dashboard;
} catch (err) {
document.getElementById('error').style.display = 'block';
document.getElementById('error').textContent = 'Embed failed: ' + err.message;
statusEl.textContent = 'Error';
}
})();
</script>
</body>
</html>

View File

@@ -132,26 +132,14 @@ export interface DashboardResult {
published?: boolean;
}
/**
* Get a dashboard by its title
* @param page - Playwright page instance (provides authentication context)
* @param title - The dashboard_title to search for
* @returns Dashboard object if found, null if not found
*/
export async function getDashboardByName(
async function getDashboardByFilter(
page: Page,
title: string,
col: 'dashboard_title' | 'slug',
value: string,
): Promise<DashboardResult | null> {
const filter = {
filters: [
{
col: 'dashboard_title',
opr: 'eq',
value: title,
},
],
};
const queryParam = rison.encode(filter);
const queryParam = rison.encode({
filters: [{ col, opr: 'eq', value }],
});
const response = await apiGet(
page,
`${ENDPOINTS.DASHBOARD}?q=${queryParam}`,
@@ -169,3 +157,29 @@ export async function getDashboardByName(
return null;
}
/**
* Get a dashboard by its title
* @param page - Playwright page instance (provides authentication context)
* @param title - The dashboard_title to search for
* @returns Dashboard object if found, null if not found
*/
export async function getDashboardByName(
page: Page,
title: string,
): Promise<DashboardResult | null> {
return getDashboardByFilter(page, 'dashboard_title', title);
}
/**
* Get a dashboard by its slug
* @param page - Playwright page instance (provides authentication context)
* @param slug - The slug to search for
* @returns Dashboard object if found, null if not found
*/
export async function getDashboardBySlug(
page: Page,
slug: string,
): Promise<DashboardResult | null> {
return getDashboardByFilter(page, 'slug', slug);
}

View File

@@ -0,0 +1,113 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Page } from '@playwright/test';
import { apiPost, apiPut } from './requests';
import { ENDPOINTS as DASHBOARD_ENDPOINTS } from './dashboard';
export const ENDPOINTS = {
SECURITY_LOGIN: 'api/v1/security/login',
GUEST_TOKEN: 'api/v1/security/guest_token/',
} as const;
export interface EmbeddedConfig {
uuid: string;
allowed_domains: string[];
dashboard_id: number;
}
/**
* Enable embedding on a dashboard and return the embedded UUID.
* Uses PUT (upsert) to preserve UUID across repeated calls.
* @param page - Playwright page instance (provides authentication context)
* @param dashboardIdOrSlug - Numeric dashboard id or slug
* @param allowedDomains - Domains allowed to embed; empty array allows all
* @returns Embedded config with UUID, allowed_domains, and dashboard_id
*/
export async function apiEnableEmbedding(
page: Page,
dashboardIdOrSlug: number | string,
allowedDomains: string[] = [],
): Promise<EmbeddedConfig> {
const response = await apiPut(
page,
`${DASHBOARD_ENDPOINTS.DASHBOARD}${dashboardIdOrSlug}/embedded`,
{ allowed_domains: allowedDomains },
);
const body = await response.json();
return body.result as EmbeddedConfig;
}
/**
* Get a guest token for an embedded dashboard.
* Uses the admin login flow (login → access_token → guest_token).
* @param page - Playwright page instance (used for request context)
* @param dashboardId - Dashboard id to grant access to
* @param options - Optional login credentials and RLS rules
* @returns Signed guest token string
*/
export async function getGuestToken(
page: Page,
dashboardId: number | string,
options?: {
username?: string;
password?: string;
rls?: Array<{ dataset: number; clause: string }>;
},
): Promise<string> {
const username = options?.username ?? 'admin';
const password = options?.password ?? 'general';
const rls = options?.rls ?? [];
// Step 1: Login to get access token
const loginResponse = await apiPost(
page,
ENDPOINTS.SECURITY_LOGIN,
{
username,
password,
provider: 'db',
refresh: true,
},
{ allowMissingCsrf: true },
);
const loginBody = await loginResponse.json();
const accessToken = loginBody.access_token;
// Step 2: Fetch guest token using the access token.
// Uses raw page.request.post() (not apiPost) because the guest token endpoint
// requires a JWT Bearer token rather than session+CSRF auth.
const guestResponse = await page.request.post(ENDPOINTS.GUEST_TOKEN, {
data: {
user: {
username: 'embedded_test_user',
first_name: 'Embedded',
last_name: 'TestUser',
},
resources: [{ type: 'dashboard', id: String(dashboardId) }],
rls,
},
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${accessToken}`,
},
});
const guestBody = await guestResponse.json();
return guestBody.token;
}

View File

@@ -0,0 +1,140 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Page, FrameLocator } from '@playwright/test';
import { EMBEDDED } from '../utils/constants';
/**
* Page object for the embedded dashboard test app.
*
* The test app runs on a separate origin (localhost:9000) and uses the
* @superset-ui/embedded-sdk to render a Superset dashboard in an iframe.
* Playwright's page.exposeFunction() bridges the guest token from Node.js
* into the browser page.
*/
export class EmbeddedPage {
private readonly page: Page;
private static readonly SELECTORS = {
CONTAINER: '[data-test="embedded-container"]',
IFRAME: 'iframe[title="Embedded Dashboard"]',
STATUS: '#status',
ERROR: '#error',
} as const;
constructor(page: Page) {
this.page = page;
}
/**
* Set up the guest token bridge before navigating.
* Must be called BEFORE goto() since embedDashboard() calls fetchGuestToken
* immediately on page load.
*/
async exposeTokenFetcher(tokenFn: () => Promise<string>): Promise<void> {
await this.page.exposeFunction('__fetchGuestToken', tokenFn);
}
/**
* Navigate to the embedded test app with the given parameters.
*/
async goto(params: {
uuid: string;
supersetDomain: string;
hideTitle?: boolean;
hideTab?: boolean;
hideChartControls?: boolean;
debug?: boolean;
}): Promise<void> {
const searchParams = new URLSearchParams({
uuid: params.uuid,
supersetDomain: params.supersetDomain,
});
if (params.hideTitle) searchParams.set('hideTitle', 'true');
if (params.hideTab) searchParams.set('hideTab', 'true');
if (params.hideChartControls) searchParams.set('hideChartControls', 'true');
if (params.debug) searchParams.set('debug', 'true');
await this.page.goto(`${EMBEDDED.APP_URL}/?${searchParams.toString()}`);
}
/**
* FrameLocator for the embedded dashboard iframe.
*/
get iframe(): FrameLocator {
return this.page.frameLocator(EmbeddedPage.SELECTORS.IFRAME);
}
/**
* Wait for the iframe to appear in the DOM.
*/
async waitForIframe(options?: { timeout?: number }): Promise<void> {
await this.page.locator(EmbeddedPage.SELECTORS.IFRAME).waitFor({
state: 'attached',
timeout: options?.timeout ?? EMBEDDED.IFRAME_LOAD,
});
}
/**
* Wait for dashboard content to render inside the iframe.
* Looks for the grid-container which indicates charts are loading/loaded.
*/
async waitForDashboardContent(options?: { timeout?: number }): Promise<void> {
const frame = this.iframe;
await frame
.locator('.grid-container, [data-test="grid-container"]')
.first()
.waitFor({
state: 'visible',
timeout: options?.timeout ?? EMBEDDED.DASHBOARD_RENDER,
});
}
/**
* Get the status text from the test app.
*/
async getStatus(): Promise<string> {
return (
(await this.page.locator(EmbeddedPage.SELECTORS.STATUS).textContent()) ??
''
);
}
/**
* Get the error text, if any.
*/
async getError(): Promise<string> {
const errorEl = this.page.locator(EmbeddedPage.SELECTORS.ERROR);
const display = await errorEl.evaluate(el => getComputedStyle(el).display);
if (display === 'none') return '';
return (await errorEl.textContent()) ?? '';
}
/**
* Check if the dashboard title is visible inside the iframe.
*/
async isTitleVisible(): Promise<boolean> {
const frame = this.iframe;
return frame
.locator(
'[data-test="dashboard-header-container"] [data-test="editable-title-input"]',
)
.isVisible();
}
}

View File

@@ -0,0 +1,288 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { test, expect, Browser, BrowserContext, Page } from '@playwright/test';
import { createServer, IncomingMessage, ServerResponse, Server } from 'http';
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
import { apiEnableEmbedding, getGuestToken } from '../../helpers/api/embedded';
import { getDashboardBySlug } from '../../helpers/api/dashboard';
import { EmbeddedPage } from '../../pages/EmbeddedPage';
import { EMBEDDED } from '../../utils/constants';
/**
* Superset domain (Flask server) — set by CI or defaults to local dev
*/
const SUPERSET_DOMAIN = (() => {
const url = process.env.PLAYWRIGHT_BASE_URL || 'http://localhost:8088';
return url.replace(/\/+$/, '');
})();
const SUPERSET_BASE_URL = SUPERSET_DOMAIN.endsWith('/')
? SUPERSET_DOMAIN
: `${SUPERSET_DOMAIN}/`;
/**
* Path to the SDK bundle built from superset-embedded-sdk/
*/
const SDK_BUNDLE_PATH = join(
__dirname,
'../../../../superset-embedded-sdk/bundle/index.js',
);
/**
* Path to the embedded test app static files
*/
const EMBED_APP_DIR = join(__dirname, '../../embedded-app');
/**
* Create a minimal static file server for the embedded test app.
* Serves only a fixed allowlist of routes — the test app references just
* its index.html and the SDK bundle, so anything else is 404.
*/
const INDEX_HTML_PATH = join(EMBED_APP_DIR, 'index.html');
function createEmbedAppServer(): Server {
return createServer((req: IncomingMessage, res: ServerResponse) => {
const urlPath = req.url?.split('?')[0] || '/';
if (urlPath === '/sdk/index.js') {
if (!existsSync(SDK_BUNDLE_PATH)) {
res.writeHead(404);
res.end(
'SDK bundle not found. Run: cd superset-embedded-sdk && npm ci && npm run build',
);
return;
}
res.writeHead(200, { 'Content-Type': 'text/javascript' });
res.end(readFileSync(SDK_BUNDLE_PATH));
return;
}
if (urlPath === '/' || urlPath === '/index.html') {
res.writeHead(200, { 'Content-Type': 'text/html' });
res.end(readFileSync(INDEX_HTML_PATH));
return;
}
res.writeHead(404);
res.end('Not found');
});
}
/**
* Create a browser context authenticated as admin for API-only work
* (enabling embedding, restoring config). Caller is responsible for closing.
*/
function createAdminContext(browser: Browser): Promise<BrowserContext> {
return browser.newContext({
storageState: 'playwright/.auth/user.json',
baseURL: SUPERSET_BASE_URL,
});
}
// ─── Test Suite ────────────────────────────────────────────────────────────
// Describe wrapper is needed for shared server state and serial execution:
// all tests share a static file server on a fixed port and must not run in parallel.
test.describe('Embedded Dashboard E2E', () => {
test.describe.configure({ mode: 'serial' });
let server: Server;
let embedUuid: string;
let dashboardId: number;
/**
* Set up a page to render the default embedded dashboard.
* Tests that need a different UUID or UI config should not use this helper.
*/
async function setupEmbeddedPage(page: Page): Promise<EmbeddedPage> {
const embeddedPage = new EmbeddedPage(page);
await embeddedPage.exposeTokenFetcher(async () =>
getGuestToken(page, dashboardId),
);
await embeddedPage.goto({
uuid: embedUuid,
supersetDomain: SUPERSET_DOMAIN,
});
await embeddedPage.waitForIframe();
await embeddedPage.waitForDashboardContent();
return embeddedPage;
}
test.beforeAll(async ({ browser }) => {
// Skip all tests if the SDK bundle hasn't been built
test.skip(
!existsSync(SDK_BUNDLE_PATH),
'Embedded SDK bundle not found. Build it with: cd superset-embedded-sdk && npm ci && npm run build',
);
// Start the embedded test app server
server = createEmbedAppServer();
await new Promise<void>((resolve, reject) => {
server.on('error', reject);
server.listen(EMBEDDED.APP_PORT, () => resolve());
});
// Use a fresh context with auth to set up test data via API
const context = await createAdminContext(browser);
const setupPage = await context.newPage();
try {
// Find a well-known example dashboard
const dashboard = await getDashboardBySlug(setupPage, 'world_health');
if (!dashboard) {
throw new Error(
'Dashboard "world_health" not found. Ensure load_examples ran in CI setup.',
);
}
dashboardId = dashboard.id;
// Enable embedding on the dashboard (empty allowed_domains = allow all)
const embedded = await apiEnableEmbedding(setupPage, dashboardId);
embedUuid = embedded.uuid;
} finally {
await context.close();
}
});
test.afterAll(async () => {
if (server) {
await new Promise<void>(resolve => server.close(() => resolve()));
}
});
test('dashboard renders in embedded iframe', async ({ page }) => {
const embeddedPage = await setupEmbeddedPage(page);
// Verify the iframe src points to Superset's /embedded/ endpoint
const iframeSrc = await page
.locator('iframe[title="Embedded Dashboard"]')
.getAttribute('src');
expect(iframeSrc).toContain(`/embedded/${embedUuid}`);
// Verify no errors in the test app
const error = await embeddedPage.getError();
expect(error).toBe('');
// Baseline: title should be visible when hideTitle is not set
const titleVisible = await embeddedPage.isTitleVisible();
expect(titleVisible).toBe(true);
});
test('UI config hideTitle hides dashboard title', async ({ page }) => {
const embeddedPage = new EmbeddedPage(page);
await embeddedPage.exposeTokenFetcher(async () =>
getGuestToken(page, dashboardId),
);
await embeddedPage.goto({
uuid: embedUuid,
supersetDomain: SUPERSET_DOMAIN,
hideTitle: true,
});
await embeddedPage.waitForIframe();
await embeddedPage.waitForDashboardContent();
// The iframe URL should include uiConfig parameter
const iframeSrc = await page
.locator('iframe[title="Embedded Dashboard"]')
.getAttribute('src');
expect(iframeSrc).toContain('uiConfig=');
// Verify the title is actually hidden inside the iframe
const titleVisible = await embeddedPage.isTitleVisible();
expect(titleVisible).toBe(false);
});
test('charts render inside embedded iframe', async ({ page }) => {
const embeddedPage = await setupEmbeddedPage(page);
// Verify chart containers are present and visible in the iframe
const charts = embeddedPage.iframe.locator(
'.chart-container, [data-test="chart-container"]',
);
await expect(charts.first()).toBeVisible({
timeout: EMBEDDED.DASHBOARD_RENDER,
});
});
test('allowed_domains blocks unauthorized referrer', async ({
page,
browser,
}) => {
const context = await createAdminContext(browser);
const setupPage = await context.newPage();
try {
// Restrict to a domain that is NOT localhost:9000
const restrictedEmbed = await apiEnableEmbedding(setupPage, dashboardId, [
'https://allowed.example.com',
]);
const embeddedPage = new EmbeddedPage(page);
await embeddedPage.exposeTokenFetcher(async () =>
getGuestToken(page, dashboardId),
);
await embeddedPage.goto({
uuid: restrictedEmbed.uuid,
supersetDomain: SUPERSET_DOMAIN,
});
// The iframe should load but get a 403 from Superset's referrer check
await embeddedPage.waitForIframe();
// The dashboard content should NOT render (403 blocks the embedded page)
const content = embeddedPage.iframe.locator(
'.grid-container, [data-test="grid-container"]',
);
await expect(content).not.toBeVisible({ timeout: 5000 });
} finally {
// Restore the open embedding config for other tests
await apiEnableEmbedding(setupPage, dashboardId, []);
await context.close();
}
});
test('guest token enables dashboard data access', async ({ page }) => {
const embeddedPage = new EmbeddedPage(page);
let tokenCallCount = 0;
await embeddedPage.exposeTokenFetcher(async () => {
tokenCallCount += 1;
return getGuestToken(page, dashboardId);
});
await embeddedPage.goto({
uuid: embedUuid,
supersetDomain: SUPERSET_DOMAIN,
});
await embeddedPage.waitForIframe();
await embeddedPage.waitForDashboardContent();
// The SDK should have called fetchGuestToken at least once
expect(tokenCallCount).toBeGreaterThanOrEqual(1);
// Verify charts are actually rendering data (not just loading spinners)
const charts = embeddedPage.iframe.locator(
'.chart-container, [data-test="chart-container"]',
);
const chartCount = await charts.count();
expect(chartCount).toBeGreaterThan(0);
});
});

View File

@@ -75,3 +75,18 @@ export const TIMEOUT = {
*/
SLOW_TEST: 60000, // 60s for tests that chain multiple slow operations
} as const;
/**
* Embedded dashboard test app configuration.
* The test app is served by a Node.js http server started in the test fixture.
*/
export const EMBEDDED = {
/** Port for the embedded test app static server */
APP_PORT: 9000,
/** Full URL for the embedded test app */
APP_URL: 'http://localhost:9000',
/** Timeout for iframe to appear in the DOM */
IFRAME_LOAD: 15000, // 15s
/** Timeout for dashboard content to render inside the iframe */
DASHBOARD_RENDER: 30000, // 30s
} as const;

View File

@@ -22,7 +22,6 @@ from datetime import datetime
from pprint import pformat
from typing import Any, NamedTuple, TYPE_CHECKING
from flask import g
from flask_babel import gettext as _
from jinja2.exceptions import TemplateError
from pandas import DataFrame
@@ -38,6 +37,7 @@ from superset.extensions import event_logger
from superset.sql.parse import sanitize_clause, transpile_to_dialect
from superset.superset_typing import Column, Metric, OrderBy, QueryObjectDict
from superset.utils import json, pandas_postprocessing
from superset.utils.cache_keys import add_impersonation_cache_key_if_needed
from superset.utils.core import (
DTTM_ALIAS,
find_duplicates,
@@ -479,24 +479,7 @@ class QueryObject: # pylint: disable=too-many-instance-attributes
# or if the CACHE_QUERY_BY_USER flag is on or per_user_caching is enabled on
# the database
try:
database = self.datasource.database # type: ignore
extra = json.loads(database.extra or "{}")
if (
(
feature_flag_manager.is_feature_enabled("CACHE_IMPERSONATION")
and database.impersonate_user
)
or feature_flag_manager.is_feature_enabled("CACHE_QUERY_BY_USER")
or extra.get("per_user_caching", False)
):
if key := database.db_engine_spec.get_impersonation_key(
getattr(g, "user", None)
):
logger.debug(
"Adding impersonation key to QueryObject cache dict: %s", key
)
cache_dict["impersonation_key"] = key
add_impersonation_cache_key_if_needed(self.datasource.database, cache_dict) # type: ignore
except AttributeError:
# datasource or database do not exist
pass

View File

@@ -0,0 +1,54 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from typing import Any, TYPE_CHECKING
from flask import g
from superset import feature_flag_manager
from superset.utils.json import loads as json_loads
if TYPE_CHECKING:
from superset.models.core import Database
logger = logging.getLogger(__name__)
def add_impersonation_cache_key_if_needed(
database: Database,
cache_dict: dict[str, Any],
) -> None:
"""
Add a per-user cache-key when the DB connection is configured for
per-user caching, no-op otherwise.
"""
extra = json_loads(database.extra or "{}")
if (
(
feature_flag_manager.is_feature_enabled("CACHE_IMPERSONATION")
and database.impersonate_user
)
or feature_flag_manager.is_feature_enabled("CACHE_QUERY_BY_USER")
or extra.get("per_user_caching", False)
):
if key := database.db_engine_spec.get_impersonation_key(
getattr(g, "user", None)
):
logger.debug("Adding impersonation key to cache dict: %s", key)
cache_dict["impersonation_key"] = key

View File

@@ -65,6 +65,7 @@ from superset.superset_typing import (
)
from superset.utils import core as utils, csv, json
from superset.utils.cache import set_and_log_cache
from superset.utils.cache_keys import add_impersonation_cache_key_if_needed
from superset.utils.core import (
apply_max_row_limit,
DateColumn,
@@ -472,6 +473,16 @@ class BaseViz: # pylint: disable=too-many-public-methods
cache_dict["extra_cache_keys"] = self.datasource.get_extra_cache_keys(query_obj)
cache_dict["rls"] = security_manager.get_rls_cache_key(self.datasource)
cache_dict["changed_on"] = self.datasource.changed_on
# Add an impersonation key to cache if impersonation is enabled on the db
# or if the CACHE_QUERY_BY_USER flag is on or per_user_caching is enabled on
# the database
try:
add_impersonation_cache_key_if_needed(self.datasource.database, cache_dict)
except AttributeError:
# datasource or database do not exist
pass
json_data = self.json_dumps(cache_dict, sort_keys=True)
return hash_from_str(json_data)

View File

@@ -78,6 +78,15 @@ FEATURE_FLAGS = {
WEBDRIVER_BASEURL = "http://0.0.0.0:8081/"
# Enable CORS for embedded dashboard E2E tests (test app on port 9000)
ENABLE_CORS = True
CORS_OPTIONS: dict = {
"origins": [
"http://localhost:9000",
],
"supports_credentials": True,
}
def GET_FEATURE_FLAGS_FUNC(ff): # noqa: N802
ff_copy = copy(ff)
@@ -86,6 +95,7 @@ def GET_FEATURE_FLAGS_FUNC(ff): # noqa: N802
TESTING = True
TALISMAN_ENABLED = False
WTF_CSRF_ENABLED = False
FAB_ROLES = {"TestRole": [["Security", "menu_access"], ["List Users", "menu_access"]]}

View File

@@ -95,7 +95,7 @@ def test_cache_key_changes_for_new_query_object_same_params():
assert query_object2.cache_key() == cache_key1
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_key_cache_query_by_user_on_no_datasource(feature_flag_mock):
"""
When CACHE_QUERY_BY_USER flag is on and there is no datasource,
@@ -112,7 +112,7 @@ def test_cache_key_cache_query_by_user_on_no_datasource(feature_flag_mock):
assert query_object.cache_key() == cache_key
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_query_by_user_on_no_user(logger_mock, feature_flag_mock):
"""
@@ -140,16 +140,13 @@ def test_cache_key_cache_query_by_user_on_no_user(logger_mock, feature_flag_mock
logger_mock.debug.assert_called()
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.common.query_object.logger")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.utils.cache_keys.logger")
def test_cache_key_cache_query_by_user_on_with_user(logger_mock, feature_flag_mock):
"""
When the same user is requesting a cache key with CACHE_QUERY_BY_USER
flag on, the key will be the same
"""
# Configure logger to enable DEBUG level for isEnabledFor check
logger_mock.isEnabledFor.return_value = True
datasource = SqlaTable(
table_name="test_table",
columns=[],
@@ -167,17 +164,17 @@ def test_cache_key_cache_query_by_user_on_with_user(logger_mock, feature_flag_mo
cache_key1 = query_object.cache_key()
assert query_object.cache_key() == cache_key1
# Should have both impersonation and cache key generation logs
# Should have impersonation log emitted by the cache_keys helper
logger_mock.debug.assert_has_calls(
[
call("Adding impersonation key to QueryObject cache dict: %s", "test_user"),
call("Adding impersonation key to cache dict: %s", "test_user"),
],
any_order=True,
)
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.common.query_object.logger")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.utils.cache_keys.logger")
def test_cache_key_cache_query_by_user_on_with_different_user(
logger_mock, feature_flag_mock
):
@@ -185,9 +182,6 @@ def test_cache_key_cache_query_by_user_on_with_different_user(
When two different users are requesting a cache key with CACHE_QUERY_BY_USER
flag on, the key will be different
"""
# Configure logger to enable DEBUG level for isEnabledFor check
logger_mock.isEnabledFor.return_value = True
datasource = SqlaTable(
table_name="test_table",
columns=[],
@@ -209,21 +203,17 @@ def test_cache_key_cache_query_by_user_on_with_different_user(
assert cache_key1 != cache_key2
# Should have both impersonation and cache key generation logs (any order)
# Should have impersonation logs emitted by the cache_keys helper
logger_mock.debug.assert_has_calls(
[
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user1"
),
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user2"
),
call("Adding impersonation key to cache dict: %s", "test_user1"),
call("Adding impersonation key to cache dict: %s", "test_user2"),
],
any_order=True,
)
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_impersonation_on_no_user(logger_mock, feature_flag_mock):
"""
@@ -251,7 +241,7 @@ def test_cache_key_cache_impersonation_on_no_user(logger_mock, feature_flag_mock
logger_mock.debug.assert_called()
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_impersonation_on_with_user(logger_mock, feature_flag_mock):
"""
@@ -290,7 +280,7 @@ def test_cache_key_cache_impersonation_on_with_user(logger_mock, feature_flag_mo
assert len(impersonation_calls) == 0
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.common.query_object.logger")
def test_cache_key_cache_impersonation_on_with_different_user(
logger_mock, feature_flag_mock
@@ -335,8 +325,8 @@ def test_cache_key_cache_impersonation_on_with_different_user(
assert len(impersonation_calls) == 0
@patch("superset.common.query_object.feature_flag_manager")
@patch("superset.common.query_object.logger")
@patch("superset.utils.cache_keys.feature_flag_manager")
@patch("superset.utils.cache_keys.logger")
def test_cache_key_cache_impersonation_on_with_different_user_and_db_impersonation(
logger_mock,
feature_flag_mock,
@@ -346,9 +336,6 @@ def test_cache_key_cache_impersonation_on_with_different_user_and_db_impersonati
flag on, and cache_impersonation is enabled on the database,
the keys will be different
"""
# Configure logger to enable DEBUG level for isEnabledFor check
logger_mock.isEnabledFor.return_value = True
datasource = SqlaTable(
table_name="test_table",
columns=[],
@@ -374,15 +361,11 @@ def test_cache_key_cache_impersonation_on_with_different_user_and_db_impersonati
assert cache_key1 != cache_key2
# Should have both impersonation and cache key generation logs (any order)
# Should have impersonation logs emitted by the cache_keys helper
logger_mock.debug.assert_has_calls(
[
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user1"
),
call(
"Adding impersonation key to QueryObject cache dict: %s", "test_user2"
),
call("Adding impersonation key to cache dict: %s", "test_user1"),
call("Adding impersonation key to cache dict: %s", "test_user2"),
],
any_order=True,
)

View File

@@ -0,0 +1,111 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Behavioral tests for ``viz.BaseViz.cache_key`` covering per-user cache-key
inclusion.
"""
from typing import Any
from unittest.mock import patch
from flask_appbuilder.security.sqla.models import User
from superset import viz
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.utils.core import override_user
QUERY_OBJ: dict[str, Any] = {"row_limit": 100, "from_dttm": None, "to_dttm": None}
def _viz_for(database: Database) -> viz.BaseViz:
datasource = SqlaTable(
table_name="t",
columns=[],
metrics=[],
main_dttm_col=None,
database=database,
)
return viz.BaseViz(datasource=datasource, form_data={"viz_type": "table"})
def test_no_per_user_opt_in_keys_match_across_users():
"""
Without any per-user caching opt-in, two different users on the same
database/query must produce the *same* cache key (regression guard — we
must not accidentally make every cache key per-user).
"""
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
obj = _viz_for(database)
with override_user(User(username="alice")):
key_a = obj.cache_key(QUERY_OBJ)
with override_user(User(username="bob")):
key_b = obj.cache_key(QUERY_OBJ)
assert key_a == key_b
def test_per_user_caching_in_extra_yields_distinct_keys_per_user():
"""
With ``per_user_caching: true`` set on the database, two different users
must produce *different* cache keys for the same query.
"""
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
obj = _viz_for(database)
with override_user(User(username="alice")):
key_a = obj.cache_key(QUERY_OBJ)
with override_user(User(username="bob")):
key_b = obj.cache_key(QUERY_OBJ)
assert key_a != key_b
def test_same_user_same_query_idempotent():
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
obj = _viz_for(database)
with override_user(User(username="alice")):
assert obj.cache_key(QUERY_OBJ) == obj.cache_key(QUERY_OBJ)
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_query_by_user_flag_yields_distinct_keys(feature_flag_mock):
"""
Global ``CACHE_QUERY_BY_USER`` flag also reaches the legacy viz path.
"""
feature_flag_mock.is_feature_enabled.side_effect = (
lambda feature=None: feature == "CACHE_QUERY_BY_USER"
)
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
obj = _viz_for(database)
with override_user(User(username="alice")):
key_a = obj.cache_key(QUERY_OBJ)
with override_user(User(username="bob")):
key_b = obj.cache_key(QUERY_OBJ)
assert key_a != key_b

View File

@@ -0,0 +1,107 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any
from unittest.mock import patch
from flask_appbuilder.security.sqla.models import User
from superset.models.core import Database
from superset.utils.cache_keys import add_impersonation_cache_key_if_needed
from superset.utils.core import override_user
def _flag(name: str):
"""Build a feature-flag side_effect that returns True only for ``name``."""
def side_effect(feature=None):
return feature == name
return side_effect
def _run(database: Database) -> dict[str, Any]:
"""Run the helper against a fresh dict and return that dict."""
cache_dict: dict[str, Any] = {}
add_impersonation_cache_key_if_needed(database, cache_dict)
return cache_dict
def test_no_per_user_caching_yields_no_key():
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
with override_user(User(username="u")):
assert "impersonation_key" not in _run(database)
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_query_by_user_adds_username(feature_flag_mock):
feature_flag_mock.is_feature_enabled.side_effect = _flag("CACHE_QUERY_BY_USER")
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
with override_user(User(username="alice")):
assert _run(database)["impersonation_key"] == "alice"
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_query_by_user_distinct_per_user(feature_flag_mock):
feature_flag_mock.is_feature_enabled.side_effect = _flag("CACHE_QUERY_BY_USER")
database = Database(database_name="d", sqlalchemy_uri="sqlite://")
with override_user(User(username="alice")):
key_a = _run(database)["impersonation_key"]
with override_user(User(username="bob")):
key_b = _run(database)["impersonation_key"]
assert key_a != key_b
@patch("superset.utils.cache_keys.feature_flag_manager")
def test_cache_impersonation_requires_database_flag(feature_flag_mock):
"""
CACHE_IMPERSONATION alone is not enough; ``database.impersonate_user`` must
also be set on the database for the per-user key to apply.
"""
feature_flag_mock.is_feature_enabled.side_effect = _flag("CACHE_IMPERSONATION")
db_no_impersonation = Database(database_name="d", sqlalchemy_uri="sqlite://")
db_with_impersonation = Database(
database_name="d", sqlalchemy_uri="sqlite://", impersonate_user=True
)
with override_user(User(username="alice")):
assert "impersonation_key" not in _run(db_no_impersonation)
assert _run(db_with_impersonation)["impersonation_key"] == "alice"
def test_per_user_caching_in_extra_json_enables_key():
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
with override_user(User(username="alice")):
assert _run(database)["impersonation_key"] == "alice"
def test_no_user_yields_no_key(app_context): # noqa: ARG001
"""
With no logged-in user, the engine spec returns None even when per-user
caching is enabled — there's no identity to key on.
"""
database = Database(
database_name="d",
sqlalchemy_uri="sqlite://",
extra='{"per_user_caching": true}',
)
# No override_user — g.user is unset
assert "impersonation_key" not in _run(database)