Compare commits

...

9 Commits

Author SHA1 Message Date
Joe Li
354a143171 fix(playwright): address review feedback on de-flake change
Three follow-ups from /review-code on the list-spec de-flake work:

- Revert toast assertions to expect(toast.getSuccess()).toBeVisible().
  The previous switch to toast.getSuccess().waitFor({ state: 'visible' })
  was a no-op: both APIs poll for visibility, so neither catches a fast
  auto-dismiss between polls. The expect() form fails as an assertion
  (clearer diff, counted in reports). Misleading "use waitFor so we
  detect auto-dismiss" comments are gone too.

- Drop redundant waitFor({ state: 'visible' }) calls in BulkSelect's
  selectRow / deselectRow / clickAction. Locator.check()/click() already
  auto-wait for visibility, stability, and enabled state. The
  expect(checkbox).toBeChecked() assertion -- which is the actual race
  guard against React state propagation -- stays.

- Expand the DeleteConfirmationModal.clickDelete docstring to explain
  why it bypasses Modal.clickFooterButton: the footer button label is
  i18n'd, so name-based lookups break in non-English locales.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-11 16:10:20 -07:00
Joe Li
556a93e8f3 fix(playwright): extend de-flake hardening across list specs
Apply the same flake-pattern fixes from 4c4f56cd8a to the rest of the
list-spec test surface so the hardening is consistent end-to-end.

Spec-level (dashboard/chart/dataset):
- Row-visibility waits on freshly-created rows now use TIMEOUT.API_RESPONSE
  (15s) so the asynchronous list query has time on slow CI.
- Toast assertions switched from `expect(toast).toBeVisible()` to
  `toast.waitFor({ state: 'visible' })` so a fast auto-dismiss is detected.
- Post-delete row assertions switched from `not.toBeVisible()` to
  `toHaveCount(0)` since deleted rows are removed from the DOM.
- Bulk export specs (chart, dataset) now set `test.setTimeout(SLOW_TEST)`
  and `waitForGet({ timeout: SLOW_TEST })` to match the bulk-delete budget;
  the prior implicit budget was capped by Playwright's 30s test timeout.

Page object:
- BulkSelect.deselectRow mirrors selectRow's visibility wait + state
  assertion so any lingering selection surfaces at the call site.
2026-05-11 16:10:20 -07:00
Joe Li
839b5a8d0b fix(playwright): de-flake dashboard-list delete and bulk-export tests
Harden the delete-confirmation modal and bulk-select page objects against
known Playwright race patterns: explicit data-test selectors, an
enabled-state wait before clicking the disabled-by-default Delete button,
a header-toggle wait after enabling bulk select, and visibility gates on
checkboxes and action buttons that mount only after a row is selected.
Also extend list-row visibility timeouts for slow CI and switch the
post-delete assertion to toHaveCount(0).

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-11 16:10:20 -07:00
Evan Rusackas
cfb704dbeb test(sqllab): stabilize SaveDatasetModal overwrite-flow test helper (#40036)
Co-authored-by: Superset Dev <dev@superset.apache.org>
Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-11 15:48:10 -07:00
Evan Rusackas
e77f6ece92 fix(ci): serialize Docs Deployment runs to avoid push races (#40030)
Co-authored-by: Claude Code <noreply@anthropic.com>
2026-05-11 11:25:31 -07:00
Evan Rusackas
785a08c7d5 chore(frontend): export typed useAppDispatch / useAppSelector hooks (#40027)
Co-authored-by: Claude Code <noreply@anthropic.com>
2026-05-11 11:01:57 -07:00
Maxime Beauchemin
d90d3a2dea fix(importexport): honor overwrite flag on /api/v1/assets/import (#39502)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-05-11 10:24:42 -07:00
Maxime Beauchemin
6ee4d694bc fix(sqllab): include template_params when overwriting a dataset (#39501)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-05-11 10:24:15 -07:00
Evan Rusackas
006a1800be chore(lint): convert react-pivottable components to function components (#39453)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-05-11 10:19:05 -07:00
17 changed files with 2432 additions and 2295 deletions

View File

@@ -17,6 +17,16 @@ on:
workflow_dispatch: {}
# Serialize deploys: the action pushes to apache/superset-site without
# rebasing, so concurrent runs race on the final push and the loser fails
# with `! [rejected] asf-site -> asf-site (fetch first)`. Cancel any
# in-progress run as soon as a newer one starts — the destination repo
# isn't touched until the final push step, so canceling mid-build is safe,
# and the freshest content always wins.
concurrency:
group: docs-deploy-asf-site
cancel-in-progress: true
jobs:
config:
runs-on: ubuntu-24.04

View File

@@ -17,12 +17,13 @@
* under the License.
*/
import { Locator, Page } from '@playwright/test';
import { Locator, Page, expect } from '@playwright/test';
import { Button, Checkbox, Table } from '../core';
const BULK_SELECT_SELECTORS = {
CONTROLS: '[data-test="bulk-select-controls"]',
ACTION: '[data-test="bulk-select-action"]',
HEADER_TOGGLE: '[data-test="header-toggle-all"]',
} as const;
/**
@@ -56,10 +57,17 @@ export class BulkSelect {
}
/**
* Enables bulk selection mode by clicking the toggle button
* Enables bulk selection mode by clicking the toggle button.
*
* Waits for the bulk-select column header checkbox to render so the next
* row interaction does not race the table re-render that adds the
* checkbox column.
*/
async enable(): Promise<void> {
await this.getToggleButton().click();
await this.page
.locator(BULK_SELECT_SELECTORS.HEADER_TOGGLE)
.waitFor({ state: 'visible' });
}
/**
@@ -72,19 +80,27 @@ export class BulkSelect {
}
/**
* Selects a row's checkbox in bulk select mode
* Selects a row's checkbox in bulk select mode.
* Asserts the checkbox is checked afterwards so any state-update race
* surfaces here rather than as a missing bulk-action button later.
* @param rowName - The name/text identifying the row to select
*/
async selectRow(rowName: string): Promise<void> {
await this.getRowCheckbox(rowName).check();
const checkbox = this.getRowCheckbox(rowName);
await checkbox.check();
await expect(checkbox.element).toBeChecked();
}
/**
* Deselects a row's checkbox in bulk select mode
* Deselects a row's checkbox in bulk select mode.
* Mirrors selectRow: asserts the unchecked state so any lingering selection
* surfaces here rather than as a stale bulk-action count later.
* @param rowName - The name/text identifying the row to deselect
*/
async deselectRow(rowName: string): Promise<void> {
await this.getRowCheckbox(rowName).uncheck();
const checkbox = this.getRowCheckbox(rowName);
await checkbox.uncheck();
await expect(checkbox.element).not.toBeChecked();
}
/**
@@ -107,10 +123,11 @@ export class BulkSelect {
}
/**
* Clicks a bulk action button by name (e.g., "Export", "Delete")
* Clicks a bulk action button by name (e.g., "Export", "Delete").
* @param actionName - The name of the bulk action to click
*/
async clickAction(actionName: string): Promise<void> {
await this.getActionButton(actionName).click();
const button = this.getActionButton(actionName);
await button.click();
}
}

View File

@@ -17,6 +17,7 @@
* under the License.
*/
import { expect } from '@playwright/test';
import { Modal, Input } from '../core';
/**
@@ -27,7 +28,8 @@ import { Modal, Input } from '../core';
*/
export class DeleteConfirmationModal extends Modal {
private static readonly SELECTORS = {
CONFIRMATION_INPUT: 'input[type="text"]',
CONFIRMATION_INPUT: '[data-test="delete-modal-input"]',
CONFIRM_BUTTON: '[data-test="modal-confirm-button"]',
};
/**
@@ -36,12 +38,16 @@ export class DeleteConfirmationModal extends Modal {
private get confirmationInput(): Input {
return new Input(
this.page,
this.body.locator(DeleteConfirmationModal.SELECTORS.CONFIRMATION_INPUT),
this.element.locator(
DeleteConfirmationModal.SELECTORS.CONFIRMATION_INPUT,
),
);
}
/**
* Fills the confirmation input with the specified text.
* Waits for the input to be visible before filling so callers don't race
* with the modal's open animation / focus effect.
*
* @param confirmationText - The text to type
* @param options - Optional fill options (timeout, force)
@@ -57,11 +63,25 @@ export class DeleteConfirmationModal extends Modal {
confirmationText: string,
options?: { timeout?: number; force?: boolean },
): Promise<void> {
await this.confirmationInput.element.waitFor({
state: 'visible',
timeout: options?.timeout,
});
await this.confirmationInput.fill(confirmationText, options);
}
/**
* Clicks the Delete button in the footer
* Clicks the Delete button in the footer.
*
* Targets the confirm button by data-test rather than going through
* Modal.clickFooterButton, which finds buttons by their visible text. The
* button label is i18n'd ("Delete" / "Supprimer" / …) so name-based lookups
* break in non-English locales.
*
* Also waits for the button to become enabled before clicking: it is
* disabled until the confirmation text matches "DELETE", and React's state
* update from fillConfirmationInput is asynchronous, so an immediate click
* can race the disabled→enabled transition.
*
* @param options - Optional click options (timeout, force, delay)
*/
@@ -70,6 +90,10 @@ export class DeleteConfirmationModal extends Modal {
force?: boolean;
delay?: number;
}): Promise<void> {
await this.clickFooterButton('Delete', options);
const confirmButton = this.element.locator(
DeleteConfirmationModal.SELECTORS.CONFIRM_BUTTON,
);
await expect(confirmButton).toBeEnabled({ timeout: options?.timeout });
await confirmButton.click(options);
}
}

View File

@@ -32,6 +32,7 @@ import {
expectStatusOneOf,
expectValidExportZip,
} from '../../helpers/api/assertions';
import { TIMEOUT } from '../../utils/constants';
/**
* Extend testWithAssets with chartListPage navigation (beforeEach equivalent).
@@ -62,8 +63,11 @@ test('should delete a chart with confirmation', async ({
await chartListPage.goto();
await chartListPage.waitForTableLoad();
// Verify chart is visible in list
await expect(chartListPage.getChartRow(chartName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created chart appears.
await expect(chartListPage.getChartRow(chartName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Click delete action button
await chartListPage.clickDeleteAction(chartName);
@@ -81,12 +85,12 @@ test('should delete a chart with confirmation', async ({
// Modal should close
await deleteModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
// Verify chart is removed from list
await expect(chartListPage.getChartRow(chartName)).not.toBeVisible();
// Verify chart is removed from list (deleted rows leave the DOM)
await expect(chartListPage.getChartRow(chartName)).toHaveCount(0);
// Backend verification: API returns 404
await expectDeleted(page, ENDPOINTS.CHART, chartId, {
@@ -111,8 +115,11 @@ test('should edit chart name via properties modal', async ({
await chartListPage.goto();
await chartListPage.waitForTableLoad();
// Verify chart is visible in list
await expect(chartListPage.getChartRow(chartName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created chart appears.
await expect(chartListPage.getChartRow(chartName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Click edit action to open properties modal
await chartListPage.clickEditAction(chartName);
@@ -137,7 +144,7 @@ test('should edit chart name via properties modal', async ({
// Modal should close
await propertiesModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
@@ -164,8 +171,11 @@ test('should export a chart as a zip file', async ({
await chartListPage.goto();
await chartListPage.waitForTableLoad();
// Verify chart is visible in list
await expect(chartListPage.getChartRow(chartName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created chart appears.
await expect(chartListPage.getChartRow(chartName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Set up API response intercept for export endpoint
const exportResponsePromise = waitForGet(page, ENDPOINTS.CHART_EXPORT);
@@ -202,9 +212,14 @@ test('should bulk delete multiple charts', async ({
await chartListPage.goto();
await chartListPage.waitForTableLoad();
// Verify both charts are visible in list
await expect(chartListPage.getChartRow(chart1.name)).toBeVisible();
await expect(chartListPage.getChartRow(chart2.name)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created charts appear.
await expect(chartListPage.getChartRow(chart1.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(chartListPage.getChartRow(chart2.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Enable bulk select mode
await chartListPage.clickBulkSelectButton();
@@ -229,13 +244,13 @@ test('should bulk delete multiple charts', async ({
// Modal should close
await deleteModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
// Verify both charts are removed from list
await expect(chartListPage.getChartRow(chart1.name)).not.toBeVisible();
await expect(chartListPage.getChartRow(chart2.name)).not.toBeVisible();
// Verify both charts are removed from list (deleted rows leave the DOM)
await expect(chartListPage.getChartRow(chart1.name)).toHaveCount(0);
await expect(chartListPage.getChartRow(chart2.name)).toHaveCount(0);
// Backend verification: Both return 404
for (const chart of [chart1, chart2]) {
@@ -259,8 +274,11 @@ test('should edit chart name from card view', async ({ page, testAssets }) => {
await cardListPage.gotoCardView();
await cardListPage.waitForCardLoad();
// Verify chart card is visible
await expect(cardListPage.getChartCard(chartName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created chart card appears.
await expect(cardListPage.getChartCard(chartName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Open card dropdown and click edit
await cardListPage.clickCardEditAction(chartName);
@@ -285,13 +303,16 @@ test('should edit chart name from card view', async ({ page, testAssets }) => {
// Modal should close
await propertiesModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
// Verify the renamed card appears in card view and old name is gone
await expect(cardListPage.getChartCard(newName)).toBeVisible();
await expect(cardListPage.getChartCard(chartName)).not.toBeVisible();
// (the old card name is removed from the DOM after the rename re-render).
await expect(cardListPage.getChartCard(newName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(cardListPage.getChartCard(chartName)).toHaveCount(0);
// Backend verification: API returns updated name
const response = await apiGetChart(page, chartId);
@@ -304,6 +325,11 @@ test('should bulk export multiple charts', async ({
chartListPage,
testAssets,
}) => {
// Chains create×2 → refresh → bulk select → export. Matches the
// sibling bulk-delete test's budget so the export response wait below
// can exceed the 30s default without hitting the test timeout.
test.setTimeout(TIMEOUT.SLOW_TEST);
// Create 2 throwaway charts for bulk export
const [chart1, chart2] = await Promise.all([
createTestChart(page, testAssets, test.info(), {
@@ -318,9 +344,14 @@ test('should bulk export multiple charts', async ({
await chartListPage.goto();
await chartListPage.waitForTableLoad();
// Verify both charts are visible in list
await expect(chartListPage.getChartRow(chart1.name)).toBeVisible();
await expect(chartListPage.getChartRow(chart2.name)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created charts appear.
await expect(chartListPage.getChartRow(chart1.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(chartListPage.getChartRow(chart2.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Enable bulk select mode
await chartListPage.clickBulkSelectButton();
@@ -329,8 +360,12 @@ test('should bulk export multiple charts', async ({
await chartListPage.selectChartCheckbox(chart1.name);
await chartListPage.selectChartCheckbox(chart2.name);
// Set up API response intercept for export endpoint
const exportResponsePromise = waitForGet(page, ENDPOINTS.CHART_EXPORT);
// Set up API response intercept BEFORE the click that triggers it.
// Exports of multiple charts can take longer than 30s under load,
// so use SLOW_TEST instead of the default test-timeout-bound budget.
const exportResponsePromise = waitForGet(page, ENDPOINTS.CHART_EXPORT, {
timeout: TIMEOUT.SLOW_TEST,
});
// Click bulk export action
await chartListPage.clickBulkAction('Export');

View File

@@ -68,33 +68,34 @@ test('should delete a dashboard with confirmation', async ({
await dashboardListPage.goto();
await dashboardListPage.waitForTableLoad();
// Verify dashboard is visible in list
await expect(dashboardListPage.getDashboardRow(dashboardName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created dashboard appears.
await expect(dashboardListPage.getDashboardRow(dashboardName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Click delete action button
await dashboardListPage.clickDeleteAction(dashboardName);
// Delete confirmation modal should appear
const deleteModal = new DeleteConfirmationModal(page);
await deleteModal.waitForVisible();
await deleteModal.waitForReady();
// Type "DELETE" to confirm
await deleteModal.fillConfirmationInput('DELETE');
// Click the Delete button
// Click the Delete button (waits for it to become enabled)
await deleteModal.clickDelete();
// Modal should close
await deleteModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
// Verify dashboard is removed from list
await expect(
dashboardListPage.getDashboardRow(dashboardName),
).not.toBeVisible();
await expect(dashboardListPage.getDashboardRow(dashboardName)).toHaveCount(0);
// Backend verification: API returns 404
await expectDeleted(page, ENDPOINTS.DASHBOARD, dashboardId, {
@@ -119,8 +120,11 @@ test('should export a dashboard as a zip file', async ({
await dashboardListPage.goto();
await dashboardListPage.waitForTableLoad();
// Verify dashboard is visible in list
await expect(dashboardListPage.getDashboardRow(dashboardName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created dashboard appears.
await expect(dashboardListPage.getDashboardRow(dashboardName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Set up API response intercept for export endpoint
const exportResponsePromise = waitForGet(page, ENDPOINTS.DASHBOARD_EXPORT);
@@ -157,13 +161,14 @@ test('should bulk delete multiple dashboards', async ({
await dashboardListPage.goto();
await dashboardListPage.waitForTableLoad();
// Verify both dashboards are visible in list
await expect(
dashboardListPage.getDashboardRow(dashboard1.name),
).toBeVisible();
await expect(
dashboardListPage.getDashboardRow(dashboard2.name),
).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created dashboards appear.
await expect(dashboardListPage.getDashboardRow(dashboard1.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(dashboardListPage.getDashboardRow(dashboard2.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Enable bulk select mode
await dashboardListPage.clickBulkSelectButton();
@@ -188,17 +193,17 @@ test('should bulk delete multiple dashboards', async ({
// Modal should close
await deleteModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
// Verify both dashboards are removed from list
await expect(
dashboardListPage.getDashboardRow(dashboard1.name),
).not.toBeVisible();
await expect(
dashboardListPage.getDashboardRow(dashboard2.name),
).not.toBeVisible();
// Verify both dashboards are removed from list (deleted rows leave the DOM)
await expect(dashboardListPage.getDashboardRow(dashboard1.name)).toHaveCount(
0,
);
await expect(dashboardListPage.getDashboardRow(dashboard2.name)).toHaveCount(
0,
);
// Backend verification: Both return 404
for (const dashboard of [dashboard1, dashboard2]) {
@@ -213,6 +218,11 @@ test('should bulk export multiple dashboards', async ({
dashboardListPage,
testAssets,
}) => {
// Chains create×2 → refresh → bulk select → export. Matches the
// sibling bulk-delete test's budget so the export response wait below
// can exceed the 30s default without hitting the test timeout.
test.setTimeout(TIMEOUT.SLOW_TEST);
// Create 2 throwaway dashboards for bulk export
const [dashboard1, dashboard2] = await Promise.all([
createTestDashboard(page, testAssets, test.info(), {
@@ -227,25 +237,30 @@ test('should bulk export multiple dashboards', async ({
await dashboardListPage.goto();
await dashboardListPage.waitForTableLoad();
// Verify both dashboards are visible in list
await expect(
dashboardListPage.getDashboardRow(dashboard1.name),
).toBeVisible();
await expect(
dashboardListPage.getDashboardRow(dashboard2.name),
).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created dashboards appear.
await expect(dashboardListPage.getDashboardRow(dashboard1.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(dashboardListPage.getDashboardRow(dashboard2.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Enable bulk select mode
// Enable bulk select mode (waits for the checkbox column to render)
await dashboardListPage.clickBulkSelectButton();
// Select both dashboards
// Select both dashboards (each call asserts the checkbox is checked)
await dashboardListPage.selectDashboardCheckbox(dashboard1.name);
await dashboardListPage.selectDashboardCheckbox(dashboard2.name);
// Set up API response intercept for export endpoint
const exportResponsePromise = waitForGet(page, ENDPOINTS.DASHBOARD_EXPORT);
// Set up API response intercept BEFORE the click that triggers it.
// Exports of multiple dashboards can take longer than 30s under load,
// so use SLOW_TEST instead of the default test-timeout-bound budget.
const exportResponsePromise = waitForGet(page, ENDPOINTS.DASHBOARD_EXPORT, {
timeout: TIMEOUT.SLOW_TEST,
});
// Click bulk export action
// Click bulk export action (waits for the action button to render)
await dashboardListPage.clickBulkAction('Export');
// Wait for export API response and validate zip contains both dashboards
@@ -293,12 +308,12 @@ test.describe('import dashboard', () => {
label: `Dashboard ${dashboardId}`,
});
// Refresh to confirm dashboard is no longer in the list
// Refresh to confirm dashboard is no longer in the list (deleted rows leave the DOM)
await dashboardListPage.goto();
await dashboardListPage.waitForTableLoad();
await expect(
dashboardListPage.getDashboardRow(dashboardName),
).not.toBeVisible();
await expect(dashboardListPage.getDashboardRow(dashboardName)).toHaveCount(
0,
);
// Click the import button
await dashboardListPage.clickImportButton();
@@ -350,7 +365,7 @@ test.describe('import dashboard', () => {
// Modal should close on success
await importModal.waitForHidden({ timeout: TIMEOUT.FILE_IMPORT });
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
@@ -358,10 +373,11 @@ test.describe('import dashboard', () => {
await dashboardListPage.goto();
await dashboardListPage.waitForTableLoad();
// Verify dashboard appears in list
await expect(
dashboardListPage.getDashboardRow(dashboardName),
).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-imported dashboard appears.
await expect(dashboardListPage.getDashboardRow(dashboardName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Track for cleanup: look up the reimported dashboard by title
const reimported = await getDashboardByName(page, dashboardName);

View File

@@ -107,8 +107,11 @@ test('should delete a dataset with confirmation', async ({
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
// Verify dataset is visible in list
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created dataset appears.
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Click delete action button
await datasetListPage.clickDeleteAction(datasetName);
@@ -126,14 +129,13 @@ test('should delete a dataset with confirmation', async ({
// Modal should close
await deleteModal.waitForHidden();
// Verify success toast appears with correct message
// Verify success toast appears with correct message.
const toast = new Toast(page);
const successToast = toast.getSuccess();
await expect(successToast).toBeVisible();
await expect(toast.getSuccess()).toBeVisible();
await expect(toast.getMessage()).toContainText('Deleted');
// Verify dataset is removed from list
await expect(datasetListPage.getDatasetRow(datasetName)).not.toBeVisible();
// Verify dataset is removed from list (deleted rows leave the DOM)
await expect(datasetListPage.getDatasetRow(datasetName)).toHaveCount(0);
// Verify via API that dataset no longer exists (404)
await expectDeleted(page, ENDPOINTS.DATASET, datasetId, {
@@ -155,10 +157,13 @@ test('should duplicate a dataset with new name', async ({
);
const duplicateName = `duplicate_${Date.now()}_${test.info().parallelIndex}`;
// Navigate to list and verify original dataset is visible
// Navigate to list and verify original dataset is visible.
// The list query is asynchronous; allow extra time on slow CI.
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Set up response intercept to capture duplicate dataset ID
const duplicateResponsePromise = waitForPost(
@@ -201,9 +206,14 @@ test('should duplicate a dataset with new name', async ({
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
// Verify both datasets exist in list
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
await expect(datasetListPage.getDatasetRow(duplicateName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// duplicate appears alongside the original.
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(datasetListPage.getDatasetRow(duplicateName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// API Verification: Fetch both datasets via detail API for consistent comparison
// (list API may return undefined for fields that detail API returns as null)
@@ -256,6 +266,11 @@ test('should export multiple datasets via bulk select action', async ({
datasetListPage,
testAssets,
}) => {
// Chains create×2 → refresh → bulk select → export. Matches the
// sibling bulk-delete test's budget so the export response wait below
// can exceed the 30s default without hitting the test timeout.
test.setTimeout(TIMEOUT.SLOW_TEST);
// Create 2 throwaway datasets for bulk export
const [dataset1, dataset2] = await Promise.all([
createTestDataset(page, testAssets, test.info(), {
@@ -270,9 +285,14 @@ test('should export multiple datasets via bulk select action', async ({
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
// Verify both datasets are visible in list
await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible();
await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created datasets appear.
await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Enable bulk select mode
await datasetListPage.clickBulkSelectButton();
@@ -281,8 +301,12 @@ test('should export multiple datasets via bulk select action', async ({
await datasetListPage.selectDatasetCheckbox(dataset1.name);
await datasetListPage.selectDatasetCheckbox(dataset2.name);
// Set up API response intercept for export endpoint
const exportResponsePromise = waitForGet(page, ENDPOINTS.DATASET_EXPORT);
// Set up API response intercept BEFORE the click that triggers it.
// Exports of multiple datasets can take longer than 30s under load,
// so use SLOW_TEST instead of the default test-timeout-bound budget.
const exportResponsePromise = waitForGet(page, ENDPOINTS.DATASET_EXPORT, {
timeout: TIMEOUT.SLOW_TEST,
});
// Click bulk export action
await datasetListPage.clickBulkAction('Export');
@@ -312,8 +336,11 @@ test('should edit dataset name via modal', async ({
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
// Verify dataset is visible in list
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created dataset appears.
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Click edit action to open modal
await datasetListPage.clickEditAction(datasetName);
@@ -348,7 +375,7 @@ test('should edit dataset name via modal', async ({
// Modal should close
await editModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
@@ -377,9 +404,14 @@ test('should bulk delete multiple datasets', async ({
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
// Verify both datasets are visible in list
await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible();
await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-created datasets appear.
await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Enable bulk select mode
await datasetListPage.clickBulkSelectButton();
@@ -404,13 +436,13 @@ test('should bulk delete multiple datasets', async ({
// Modal should close
await deleteModal.waitForHidden();
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible();
// Verify both datasets are removed from list
await expect(datasetListPage.getDatasetRow(dataset1.name)).not.toBeVisible();
await expect(datasetListPage.getDatasetRow(dataset2.name)).not.toBeVisible();
// Verify both datasets are removed from list (deleted rows leave the DOM)
await expect(datasetListPage.getDatasetRow(dataset1.name)).toHaveCount(0);
await expect(datasetListPage.getDatasetRow(dataset2.name)).toHaveCount(0);
// Verify via API that datasets no longer exist (404)
await expectDeleted(page, ENDPOINTS.DATASET, dataset1.id, {
@@ -455,10 +487,10 @@ test.describe('import dataset', () => {
label: `Dataset ${datasetId}`,
});
// Refresh to confirm dataset is no longer in the list
// Refresh to confirm dataset is no longer in the list (deleted rows leave the DOM)
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
await expect(datasetListPage.getDatasetRow(datasetName)).not.toBeVisible();
await expect(datasetListPage.getDatasetRow(datasetName)).toHaveCount(0);
// Click the import button
await datasetListPage.clickImportButton();
@@ -507,7 +539,7 @@ test.describe('import dataset', () => {
// Modal should close on success
await importModal.waitForHidden({ timeout: TIMEOUT.FILE_IMPORT });
// Verify success toast appears
// Verify success toast appears.
const toast = new Toast(page);
await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
@@ -515,8 +547,11 @@ test.describe('import dataset', () => {
await datasetListPage.goto();
await datasetListPage.waitForTableLoad();
// Verify dataset appears in list
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
// The list query is asynchronous; allow extra time on slow CI before the
// freshly-imported dataset appears.
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible({
timeout: TIMEOUT.API_RESPONSE,
});
// Track for cleanup: the dataset import API returns {"message": "OK"}
// with no ID, so look up the reimported dataset by name.

View File

@@ -17,16 +17,14 @@
* under the License.
*/
import { PureComponent } from 'react';
import { memo } from 'react';
import { TableRenderer } from './TableRenderers';
import type { ComponentProps } from 'react';
type PivotTableProps = ComponentProps<typeof TableRenderer>;
class PivotTable extends PureComponent<PivotTableProps> {
render() {
return <TableRenderer {...this.props} />;
}
function PivotTable(props: PivotTableProps) {
return <TableRenderer {...props} />;
}
export default PivotTable;
export default memo(PivotTable);

View File

@@ -30,7 +30,7 @@ import fetchMock from 'fetch-mock';
import { SaveDatasetModal } from 'src/SqlLab/components/SaveDatasetModal';
import { createDatasource } from 'src/SqlLab/actions/sqlLab';
import { user, testQuery, mockdatasets } from 'src/SqlLab/fixtures';
import { FeatureFlag } from '@superset-ui/core';
import { FeatureFlag, SupersetClient } from '@superset-ui/core';
const mockedProps = {
visible: true,
@@ -354,6 +354,131 @@ describe('SaveDatasetModal', () => {
});
});
const setupOverwriteFlow = async () => {
// Select the "Overwrite existing" radio
await userEvent.click(
screen.getByRole('radio', { name: /overwrite existing/i }),
);
// Open the select to load existing-dataset options
await userEvent.click(
screen.getByRole('combobox', { name: /existing dataset/i }),
);
// Advance timers to flush debounced fetches in AsyncSelect
await act(async () => {
jest.runAllTimers();
});
// Wait for the loading indicator to clear
await waitFor(() => {
const loading = screen.queryByText('Loading...');
expect(loading === null || !loading.checkVisibility()).toBe(true);
});
// Pick an existing dataset (use the listbox item, not the input mirror)
const options = await screen.findAllByText('coolest table 0');
await userEvent.click(options[1]);
// First overwrite click → confirmation screen
await userEvent.click(screen.getByRole('button', { name: /overwrite/i }));
// Wait for the confirmation screen to render
await screen.findByText(/are you sure you want to overwrite this dataset/i);
// Second overwrite click → triggers the PUT
await userEvent.click(screen.getByRole('button', { name: /overwrite/i }));
};
test('sends template_params when overwriting a dataset with include template parameters checked', async () => {
// @ts-expect-error
global.featureFlags = {
[FeatureFlag.EnableTemplateProcessing]: true,
};
const putSpy = jest
.spyOn(SupersetClient, 'put')
.mockResolvedValue({ json: { result: { id: 0 } } } as any);
const dummyDispatch = jest.fn().mockResolvedValue({});
useDispatchMock.mockReturnValue(dummyDispatch);
useSelectorMock.mockReturnValue({ ...user });
const propsWithTemplateParam = {
...mockedProps,
datasource: {
...testQuery,
templateParams: JSON.stringify({ my_param: 12, _filters: 'foo' }),
},
};
render(<SaveDatasetModal {...propsWithTemplateParam} />, {
useRedux: true,
});
// Check the "Include Template Parameters" checkbox
await userEvent.click(screen.getByRole('checkbox'));
await setupOverwriteFlow();
await waitFor(() => {
expect(
putSpy.mock.calls.some(([req]) =>
req.endpoint?.includes('api/v1/dataset/'),
),
).toBe(true);
});
const datasetPutCall = putSpy.mock.calls.find(([req]) =>
req.endpoint?.includes('api/v1/dataset/'),
)!;
const [req] = datasetPutCall;
expect(req.endpoint).toContain('override_columns=true');
const body = JSON.parse(req.body as string);
// _filters should be stripped, but my_param should be preserved
expect(body.template_params).toEqual(JSON.stringify({ my_param: 12 }));
putSpy.mockRestore();
});
test('does not send template_params when overwriting a dataset with include template parameters unchecked', async () => {
// @ts-expect-error
global.featureFlags = {
[FeatureFlag.EnableTemplateProcessing]: true,
};
const putSpy = jest
.spyOn(SupersetClient, 'put')
.mockResolvedValue({ json: { result: { id: 0 } } } as any);
const dummyDispatch = jest.fn().mockResolvedValue({});
useDispatchMock.mockReturnValue(dummyDispatch);
useSelectorMock.mockReturnValue({ ...user });
const propsWithTemplateParam = {
...mockedProps,
datasource: {
...testQuery,
templateParams: JSON.stringify({ my_param: 12 }),
},
};
render(<SaveDatasetModal {...propsWithTemplateParam} />, {
useRedux: true,
});
// Do NOT check the "Include Template Parameters" checkbox
await setupOverwriteFlow();
await waitFor(() => {
expect(
putSpy.mock.calls.some(([req]) =>
req.endpoint?.includes('api/v1/dataset/'),
),
).toBe(true);
});
const datasetPutCall = putSpy.mock.calls.find(([req]) =>
req.endpoint?.includes('api/v1/dataset/'),
)!;
const [req] = datasetPutCall;
const body = JSON.parse(req.body as string);
expect(body.template_params).toBeUndefined();
putSpy.mockRestore();
});
test('clears dataset cache when creating new dataset', async () => {
const clearDatasetCache = jest.spyOn(
require('src/utils/cachedSupersetGet'),

View File

@@ -149,14 +149,25 @@ const Styles = styled.div`
}
`}
`;
const updateDataset = async (
dbId: number,
datasetId: number,
sql: string,
columns: Array<Record<string, any>>,
owners: [number],
overrideColumns: boolean,
) => {
type UpdateDatasetPayload = {
dbId: number;
datasetId: number;
sql: string;
columns: Array<Record<string, any>>;
owners: number[];
overrideColumns: boolean;
templateParams?: string;
};
const updateDataset = async ({
dbId,
datasetId,
sql,
columns,
owners,
overrideColumns,
templateParams,
}: UpdateDatasetPayload) => {
const endpoint = `api/v1/dataset/${datasetId}?override_columns=${overrideColumns}`;
const headers = { 'Content-Type': 'application/json' };
const body = JSON.stringify({
@@ -164,6 +175,7 @@ const updateDataset = async (
columns,
owners,
database_id: dbId,
...(templateParams !== undefined && { template_params: templateParams }),
});
const data: JsonResponse = await SupersetClient.put({
@@ -179,6 +191,26 @@ const updateDataset = async (
const UNTITLED = t('Untitled Dataset');
// The filters param is only used to test jinja templates.
// Remove the special filters entry from the templateParams
// before saving the dataset.
const sanitizeTemplateParams = (
templateParams: string | object | null | undefined,
): string | undefined => {
if (typeof templateParams !== 'string') {
return undefined;
}
try {
const parsed = JSON.parse(templateParams) as Record<string, unknown>;
// Remove the special _filters entry — it is only used to test jinja templates.
const { _filters: _ignored, ...clean } = parsed;
return JSON.stringify(clean);
} catch (e) {
// malformed templateParams, do not include it
return undefined;
}
};
export const SaveDatasetModal = ({
visible,
onHide,
@@ -232,22 +264,27 @@ export const SaveDatasetModal = ({
}
setLoading(true);
const templateParams = includeTemplateParameters
? sanitizeTemplateParams(datasource?.templateParams)
: undefined;
try {
const [, key] = await Promise.all([
updateDataset(
datasource?.dbId,
datasetToOverwrite?.datasetid,
datasource?.sql,
datasource?.columns?.map(
updateDataset({
dbId: datasource?.dbId,
datasetId: datasetToOverwrite?.datasetid,
sql: datasource?.sql,
columns: datasource?.columns?.map(
(d: { column_name: string; type: string; is_dttm: boolean }) => ({
column_name: d.column_name,
type: d.type,
is_dttm: d.is_dttm,
}),
),
datasetToOverwrite?.owners?.map((o: DatasetOwner) => o.id),
true,
),
owners: datasetToOverwrite?.owners?.map((o: DatasetOwner) => o.id),
overrideColumns: true,
templateParams,
}),
postFormData(datasetToOverwrite.datasetid, 'table', {
...formDataWithDefaults,
datasource: `${datasetToOverwrite.datasetid}__table`,
@@ -319,27 +356,9 @@ export const SaveDatasetModal = ({
setLoading(true);
const selectedColumns = datasource?.columns ?? [];
// The filters param is only used to test jinja templates.
// Remove the special filters entry from the templateParams
// before saving the dataset.
let templateParams;
if (
typeof datasource?.templateParams === 'string' &&
includeTemplateParameters
) {
try {
const p = JSON.parse(datasource.templateParams);
/* eslint-disable-next-line no-underscore-dangle */
if (p._filters) {
/* eslint-disable-next-line no-underscore-dangle */
delete p._filters;
}
templateParams = JSON.stringify(p);
} catch (e) {
// malformed templateParams, do not include it
templateParams = undefined;
}
}
const templateParams = includeTemplateParameters
? sanitizeTemplateParams(datasource?.templateParams)
: undefined;
dispatch(
createDatasource({

View File

@@ -251,7 +251,7 @@ describe('useTables hook', () => {
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, {
result: fakeSchemaApiResult,
});
const { result, waitFor } = renderHook(
const { result } = renderHook(
() =>
useTables({
dbId: expectDbId,

View File

@@ -22,6 +22,11 @@ import {
createListenerMiddleware,
StoreEnhancer,
} from '@reduxjs/toolkit';
import {
useDispatch,
useSelector,
type TypedUseSelectorHook,
} from 'react-redux';
import thunk from 'redux-thunk';
import { api } from 'src/hooks/apiResources/queryApi';
import messageToastReducer from 'src/components/MessageToasts/reducers';
@@ -177,3 +182,12 @@ export function setupStore({
export const store = setupStore();
export type RootState = ReturnType<typeof store.getState>;
// Typed Redux hooks. Prefer these over the raw `useDispatch` / `useSelector`
// from react-redux: `useAppDispatch` understands the store's middleware (so
// thunks resolve correctly), and `useAppSelector` infers `RootState` without
// callers having to annotate every selector. Required ahead of the
// react-redux v8+ bump, which tightens dispatch typing — see #39927.
export type AppDispatch = typeof store.dispatch;
export const useAppDispatch: () => AppDispatch = useDispatch;
export const useAppSelector: TypedUseSelectorHook<RootState> = useSelector;

View File

@@ -49,8 +49,9 @@ from superset.datasets.schemas import ImportV1DatasetSchema
from superset.extensions import feature_flag_manager
from superset.migrations.shared.native_filters import migrate_dashboard
from superset.models.core import Database
from superset.models.dashboard import dashboard_slices
from superset.models.dashboard import Dashboard, dashboard_slices
from superset.models.slice import Slice
from superset.models.sql_lab import SavedQuery
from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema
from superset.utils.decorators import on_error, transaction
@@ -89,6 +90,9 @@ class ImportAssetsCommand(BaseCommand):
)
self._configs: dict[str, Any] = {}
self.sparse = kwargs.get("sparse", False)
# Defaults to ``True`` for backwards compatibility: historically this
# command always overwrote existing assets.
self.overwrite: bool = kwargs.get("overwrite", True)
# pylint: disable=too-many-locals
@staticmethod
@@ -96,6 +100,7 @@ class ImportAssetsCommand(BaseCommand):
configs: dict[str, Any],
sparse: bool = False,
contents: Optional[dict[str, Any]] = None,
overwrite: bool = True,
) -> None:
contents = {} if contents is None else contents
# import databases first
@@ -116,20 +121,20 @@ class ImportAssetsCommand(BaseCommand):
for file_name, config in configs.items():
if file_name.startswith("databases/"):
database = import_database(config, overwrite=True)
database = import_database(config, overwrite=overwrite)
database_ids[str(database.uuid)] = database.id
# import saved queries
for file_name, config in configs.items():
if file_name.startswith("queries/"):
config["db_id"] = database_ids[config["database_uuid"]]
import_saved_query(config, overwrite=True)
import_saved_query(config, overwrite=overwrite)
# import datasets
for file_name, config in configs.items():
if file_name.startswith("datasets/"):
config["database_id"] = database_ids[config["database_uuid"]]
dataset = import_dataset(config, overwrite=True)
dataset = import_dataset(config, overwrite=overwrite)
dataset_info[str(dataset.uuid)] = {
"datasource_id": dataset.id,
"datasource_type": dataset.datasource_type,
@@ -142,7 +147,7 @@ class ImportAssetsCommand(BaseCommand):
if file_name.startswith("charts/"):
dataset_dict = dataset_info[config["dataset_uuid"]]
config = update_chart_config_dataset(config, dataset_dict)
chart = import_chart(config, overwrite=True)
chart = import_chart(config, overwrite=overwrite)
charts.append(chart)
chart_ids[str(chart.uuid)] = chart.id
@@ -157,7 +162,7 @@ class ImportAssetsCommand(BaseCommand):
for file_name, config in configs.items():
if file_name.startswith("dashboards/"):
config = update_id_refs(config, chart_ids, dataset_info)
dashboard = import_dashboard(config, overwrite=True)
dashboard = import_dashboard(config, overwrite=overwrite)
# set ref in the dashboard_slices table
dashboard_chart_ids: list[dict[str, int]] = []
@@ -206,7 +211,73 @@ class ImportAssetsCommand(BaseCommand):
)
def run(self) -> None:
self.validate()
self._import(self._configs, self.sparse, self.contents)
self._import(self._configs, self.sparse, self.contents, self.overwrite)
# Maps asset file prefixes to the model class used to look up UUIDs for
# the "already exists" validation check when ``overwrite`` is ``False``.
_MODEL_BY_PREFIX: dict[str, Any] = {
"databases/": Database,
"datasets/": SqlaTable,
"charts/": Slice,
"dashboards/": Dashboard,
"queries/": SavedQuery,
}
def _bundle_entries_by_prefix(self) -> dict[str, list[tuple[str, str]]]:
"""Group ``(file_name, uuid)`` pairs from the bundle by asset prefix."""
bundle_by_prefix: dict[str, list[tuple[str, str]]] = {
prefix: [] for prefix in self._MODEL_BY_PREFIX
}
for file_name, config in self._configs.items():
uuid = config.get("uuid")
if not uuid:
continue
for prefix in bundle_by_prefix:
if file_name.startswith(prefix):
bundle_by_prefix[prefix].append((file_name, str(uuid)))
break
return bundle_by_prefix
def _prevent_overwrite_existing_assets(
self, exceptions: list[ValidationError]
) -> None:
"""
When ``overwrite`` is ``False``, raise a clear validation error for any
asset in the bundle whose UUID already exists in the database.
Only the UUIDs present in the import bundle are queried (per prefix),
so the cost scales with the bundle size rather than with the total
number of stored assets.
"""
if self.overwrite:
return
for prefix, entries in self._bundle_entries_by_prefix().items():
if not entries:
continue
model_cls = self._MODEL_BY_PREFIX[prefix]
incoming_uuids = [uuid for _, uuid in entries]
existing_uuids = {
str(uuid)
for (uuid,) in db.session.query(model_cls.uuid)
.filter(model_cls.uuid.in_(incoming_uuids))
.all()
}
if not existing_uuids:
continue
model_name = model_cls.__name__
for file_name, uuid in entries:
if uuid in existing_uuids:
exceptions.append(
ValidationError(
{
file_name: (
f"{model_name} already exists "
"and `overwrite=true` was not passed"
),
}
)
)
def validate(self) -> None:
exceptions: list[ValidationError] = []
@@ -229,6 +300,7 @@ class ImportAssetsCommand(BaseCommand):
self.ssh_tunnel_priv_key_passwords,
self.encrypted_extra_secrets,
)
self._prevent_overwrite_existing_assets(exceptions)
if exceptions:
raise CommandInvalidError(

View File

@@ -30,6 +30,7 @@ from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.extensions import event_logger
from superset.utils import json
from superset.utils.core import parse_boolean_string
from superset.views.base_api import BaseSupersetApi, requires_form_data, statsd_metrics
@@ -157,6 +158,12 @@ class ImportExportRestApi(BaseSupersetApi):
sparse:
description: allow sparse update of resources
type: boolean
overwrite:
description: >-
overwrite existing assets? Defaults to ``true`` for
backwards compatibility. When ``false``, the import
fails if any of the assets already exist.
type: boolean
responses:
200:
description: Assets import result
@@ -188,6 +195,9 @@ class ImportExportRestApi(BaseSupersetApi):
if not contents:
raise NoValidFilesFoundError()
sparse = request.form.get("sparse") == "true"
# Defaults to True for backwards compatibility: historically this
# endpoint always overwrote existing assets.
overwrite = parse_boolean_string(request.form.get("overwrite", "true"))
passwords = (
json.loads(request.form["passwords"])
@@ -218,6 +228,7 @@ class ImportExportRestApi(BaseSupersetApi):
command = ImportAssetsCommand(
contents,
sparse=sparse,
overwrite=overwrite,
passwords=passwords,
ssh_tunnel_passwords=ssh_tunnel_passwords,
ssh_tunnel_private_keys=ssh_tunnel_private_keys,

View File

@@ -19,6 +19,7 @@ import copy
from typing import Any, cast
import yaml
from marshmallow.exceptions import ValidationError
from pytest_mock import MockerFixture
from sqlalchemy.orm.session import Session
from sqlalchemy.sql import select
@@ -32,6 +33,18 @@ from tests.unit_tests.fixtures.assets_configs import (
datasets_config,
)
saved_queries_config: dict[str, Any] = {
"queries/examples/my_query.yaml": {
"schema": "main",
"label": "My saved query",
"description": None,
"sql": "SELECT 1",
"uuid": "e3e4f1f0-5c9d-4a4c-a4e4-0000000000aa",
"version": "1.0.0",
"database_uuid": "a2dc77af-e654-49bb-b321-40f6b559a1ee",
},
}
def test_import_new_assets(mocker: MockerFixture, session: Session) -> None:
"""
@@ -227,6 +240,309 @@ def test_import_assets_skips_tags_when_feature_disabled(
assert db.session.query(TaggedObject).count() == 0
def test_import_overwrite_defaults_to_true(session: Session) -> None:
"""
``ImportAssetsCommand.overwrite`` defaults to ``True`` for backwards
compatibility — historically the command always overwrote existing assets.
"""
from superset.commands.importers.v1.assets import ImportAssetsCommand
command = ImportAssetsCommand({})
assert command.overwrite is True
explicit_false = ImportAssetsCommand({}, overwrite=False)
assert explicit_false.overwrite is False
def test_import_threads_overwrite_flag(mocker: MockerFixture, session: Session) -> None:
"""
``overwrite`` must be threaded through to ``import_database``,
``import_saved_query``, ``import_dataset``, ``import_chart`` and
``import_dashboard``. Previously these were hard-coded to ``overwrite=True``
which caused the API flag to be ignored.
"""
from superset import security_manager
from superset.commands.importers.v1 import assets as assets_module
from superset.commands.importers.v1.assets import ImportAssetsCommand
mocker.patch.object(security_manager, "can_access", return_value=True)
mocked_db = mocker.patch.object(assets_module, "import_database")
mocked_db.return_value.uuid = "a2dc77af-e654-49bb-b321-40f6b559a1ee"
mocked_db.return_value.id = 1
mocked_ds = mocker.patch.object(assets_module, "import_dataset")
mocked_ds.return_value.uuid = "53d47c0c-c03d-47f0-b9ac-81225f808283"
mocked_ds.return_value.id = 1
mocked_ds.return_value.datasource_type = "table"
mocked_ds.return_value.table_name = "video_game_sales"
mocked_chart = mocker.patch.object(assets_module, "import_chart")
mocked_chart.return_value.viz_type = "table"
mocked_dash = mocker.patch.object(assets_module, "import_dashboard")
mocker.patch.object(assets_module, "find_chart_uuids", return_value=[])
mocker.patch.object(assets_module, "update_id_refs", side_effect=lambda c, *_: c)
mocker.patch.object(assets_module, "migrate_dashboard")
mocker.patch("superset.db.session.execute")
configs = {
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
**copy.deepcopy(charts_config_1),
**copy.deepcopy(dashboards_config_1),
}
ImportAssetsCommand._import(configs, overwrite=False)
assert mocked_db.called
for call in mocked_db.call_args_list:
assert call.kwargs["overwrite"] is False
for call in mocked_ds.call_args_list:
assert call.kwargs["overwrite"] is False
for call in mocked_chart.call_args_list:
assert call.kwargs["overwrite"] is False
for call in mocked_dash.call_args_list:
assert call.kwargs["overwrite"] is False
def test_prevent_overwrite_flags_existing_assets(
mocker: MockerFixture, session: Session
) -> None:
"""
With ``overwrite=False``, ``_prevent_overwrite_existing_assets`` must
surface a clear ``ValidationError`` for each asset whose UUID already
exists in the database.
"""
from superset import db, security_manager
from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.models.slice import Slice
mocker.patch.object(security_manager, "can_access", return_value=True)
engine = db.session.get_bind()
Slice.metadata.create_all(engine) # pylint: disable=no-member
# seed the database with the fixture assets
seed_configs = {
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
**copy.deepcopy(charts_config_1),
**copy.deepcopy(dashboards_config_1),
}
ImportAssetsCommand._import(seed_configs)
command = ImportAssetsCommand({}, overwrite=False)
command._configs = {
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
**copy.deepcopy(charts_config_1),
**copy.deepcopy(dashboards_config_1),
}
exceptions: list[ValidationError] = []
command._prevent_overwrite_existing_assets(exceptions)
# one exception for each of the seeded assets (db + datasets + charts + dashboards)
expected_count = (
len(databases_config)
+ len(datasets_config)
+ len(charts_config_1)
+ len(dashboards_config_1)
)
assert len(exceptions) == expected_count
for exc in exceptions:
assert isinstance(exc, ValidationError)
[(_, message)] = exc.messages.items()
assert "already exists" in message
assert "`overwrite=true` was not passed" in message
def test_prevent_overwrite_allows_new_assets(
mocker: MockerFixture, session: Session
) -> None:
"""
With ``overwrite=False`` and no conflicting UUIDs in the database, the
validation step must not raise.
"""
from superset import db, security_manager
from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.models.slice import Slice
mocker.patch.object(security_manager, "can_access", return_value=True)
engine = db.session.get_bind()
Slice.metadata.create_all(engine) # pylint: disable=no-member
command = ImportAssetsCommand({}, overwrite=False)
command._configs = {
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
**copy.deepcopy(charts_config_1),
**copy.deepcopy(dashboards_config_1),
}
exceptions: list[ValidationError] = []
command._prevent_overwrite_existing_assets(exceptions)
assert exceptions == []
def test_prevent_overwrite_noop_when_overwrite_true(
mocker: MockerFixture, session: Session
) -> None:
"""
With ``overwrite=True`` (the default) the "already exists" validation must
be a no-op even when assets exist in the database — this preserves the
historical behavior.
"""
from superset import db, security_manager
from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.models.slice import Slice
mocker.patch.object(security_manager, "can_access", return_value=True)
engine = db.session.get_bind()
Slice.metadata.create_all(engine) # pylint: disable=no-member
seed_configs = {
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
**copy.deepcopy(charts_config_1),
**copy.deepcopy(dashboards_config_1),
}
ImportAssetsCommand._import(seed_configs)
command = ImportAssetsCommand({}) # overwrite defaults to True
command._configs = copy.deepcopy(seed_configs)
exceptions: list[ValidationError] = []
command._prevent_overwrite_existing_assets(exceptions)
assert exceptions == []
def test_prevent_overwrite_flags_existing_saved_queries(
mocker: MockerFixture, session: Session
) -> None:
"""
Saved queries (``queries/`` prefix) must also be covered by the
"already exists" validation when ``overwrite=False`` — otherwise
``import_saved_query`` silently returns existing rows and the endpoint
would appear to succeed despite the conflict.
"""
from superset import db, security_manager
from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.models.slice import Slice
from superset.models.sql_lab import SavedQuery
mocker.patch.object(security_manager, "can_access", return_value=True)
engine = db.session.get_bind()
Slice.metadata.create_all(engine) # pylint: disable=no-member
SavedQuery.metadata.create_all(engine) # pylint: disable=no-member
# seed a saved query with a UUID that matches the fixture below
saved_query_uuid = next(iter(saved_queries_config.values()))["uuid"]
db.session.add(SavedQuery(uuid=saved_query_uuid, label="seeded"))
db.session.flush()
command = ImportAssetsCommand({}, overwrite=False)
command._configs = copy.deepcopy(saved_queries_config)
exceptions: list[ValidationError] = []
command._prevent_overwrite_existing_assets(exceptions)
assert len(exceptions) == 1
[(file_name, message)] = exceptions[0].messages.items()
assert file_name.startswith("queries/")
assert "SavedQuery already exists" in message
def test_prevent_overwrite_partial_conflict(
mocker: MockerFixture, session: Session
) -> None:
"""
When only some of the incoming assets already exist, validation must flag
exactly the conflicting ones and leave brand-new assets untouched.
"""
from superset import db, security_manager
from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.models.slice import Slice
mocker.patch.object(security_manager, "can_access", return_value=True)
engine = db.session.get_bind()
Slice.metadata.create_all(engine) # pylint: disable=no-member
# seed only databases + datasets; charts and dashboards stay new
ImportAssetsCommand._import(
{
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
}
)
command = ImportAssetsCommand({}, overwrite=False)
command._configs = {
**copy.deepcopy(databases_config),
**copy.deepcopy(datasets_config),
**copy.deepcopy(charts_config_1),
**copy.deepcopy(dashboards_config_1),
}
exceptions: list[ValidationError] = []
command._prevent_overwrite_existing_assets(exceptions)
flagged_files = {next(iter(exc.messages)) for exc in exceptions}
assert flagged_files == set(databases_config) | set(datasets_config)
def test_prevent_overwrite_queries_only_bundle_uuids(
mocker: MockerFixture, session: Session
) -> None:
"""
The validation must scope its UUID lookup to the UUIDs present in the
import bundle (one ``WHERE uuid IN (...)`` query per prefix that has
incoming entries) and skip prefixes with no entries entirely. Otherwise
every import with ``overwrite=false`` would scan all asset tables in
full, regardless of bundle size.
"""
from superset import db, security_manager
from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import SavedQuery
mocker.patch.object(security_manager, "can_access", return_value=True)
engine = db.session.get_bind()
Slice.metadata.create_all(engine) # pylint: disable=no-member
SavedQuery.metadata.create_all(engine) # pylint: disable=no-member
# bundle only contains a database — no datasets/charts/dashboards/queries
bundle = copy.deepcopy(databases_config)
spy = mocker.spy(db.session, "query")
command = ImportAssetsCommand({}, overwrite=False)
command._configs = bundle
exceptions: list[ValidationError] = []
command._prevent_overwrite_existing_assets(exceptions)
# exactly one UUID query — for the only prefix with bundle entries — and
# it targets the Database UUID column. Empty-bundle prefixes (datasets/
# charts/dashboards/queries) must not be queried at all, otherwise this
# validation degrades to a full-table scan per asset type.
queried_columns = [
call.args[0]
for call in spy.call_args_list
if call.args and getattr(call.args[0], "key", None) == "uuid"
]
assert len(queried_columns) == 1
assert queried_columns[0].class_ is Database
queried_models = {col.class_ for col in queried_columns}
for model_cls in (SqlaTable, Slice, Dashboard, SavedQuery):
assert model_cls not in queried_models
# no row matches in an empty table, so no validation errors are raised
assert exceptions == []
def test_import_removes_dashboard_charts(
mocker: MockerFixture, session: Session
) -> None:

View File

@@ -48,7 +48,9 @@ def test_export_assets(
mocked_export_result = [
(
"metadata.yaml",
lambda: "version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n", # noqa: E501
lambda: (
"version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n"
), # noqa: E501
),
("databases/example.yaml", lambda: "<DATABASE CONTENTS>"),
]
@@ -109,6 +111,7 @@ def test_import_assets(
ImportAssetsCommand.assert_called_with(
mocked_contents,
sparse=False,
overwrite=True,
passwords=passwords,
ssh_tunnel_passwords=None,
ssh_tunnel_private_keys=None,
@@ -160,6 +163,7 @@ def test_import_assets_with_encrypted_extra_secrets(
ImportAssetsCommand.assert_called_with(
mocked_contents,
sparse=False,
overwrite=True,
passwords=None,
ssh_tunnel_passwords=None,
ssh_tunnel_private_keys=None,
@@ -168,6 +172,54 @@ def test_import_assets_with_encrypted_extra_secrets(
)
def test_import_assets_overwrite_false(
mocker: MockerFixture,
client: Any,
full_api_access: None,
) -> None:
"""
Passing ``overwrite=false`` on the form must be forwarded to
``ImportAssetsCommand``. Previously the flag was ignored and assets were
always overwritten.
"""
mocked_contents = {
"metadata.yaml": (
"version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n"
),
"databases/example.yaml": "<DATABASE CONTENTS>",
}
ImportAssetsCommand = mocker.patch("superset.importexport.api.ImportAssetsCommand") # noqa: N806
root = Path("assets_export")
buf = BytesIO()
with ZipFile(buf, "w") as bundle:
for path, contents in mocked_contents.items():
with bundle.open(str(root / path), "w") as fp:
fp.write(contents.encode())
buf.seek(0)
form_data = {
"bundle": (buf, "assets_export.zip"),
"overwrite": "false",
}
response = client.post(
"/api/v1/assets/import/", data=form_data, content_type="multipart/form-data"
)
assert response.status_code == 200
ImportAssetsCommand.assert_called_with(
mocked_contents,
sparse=False,
overwrite=False,
passwords=None,
ssh_tunnel_passwords=None,
ssh_tunnel_private_keys=None,
ssh_tunnel_priv_key_passwords=None,
encrypted_extra_secrets=None,
)
def test_import_assets_not_zip(
mocker: MockerFixture,
client: Any,