diff --git a/.github/workflows/bashlib.sh b/.github/workflows/bashlib.sh index 91a131d4562..76f44d28f1b 100644 --- a/.github/workflows/bashlib.sh +++ b/.github/workflows/bashlib.sh @@ -127,6 +127,20 @@ playwright_testdata() { superset load_test_users superset load_examples superset init + # Enable DML on the examples database so Playwright tests can create/drop + # temporary tables via SQL Lab without depending on external data sources. + superset shell <<'PYEOF' +import sys +from superset.extensions import db +from superset.models.core import Database +examples_db = db.session.query(Database).filter_by(database_name='examples').first() +if not examples_db: + sys.exit('ERROR: examples database not found. load_examples may have failed.') + +examples_db.allow_dml = True +db.session.commit() +print('Enabled allow_dml on examples database') +PYEOF say "::endgroup::" } diff --git a/superset-frontend/playwright/components/modals/ImportDatasetModal.ts b/superset-frontend/playwright/components/modals/ImportDatasetModal.ts index 1399cc53547..269a0f42ebb 100644 --- a/superset-frontend/playwright/components/modals/ImportDatasetModal.ts +++ b/superset-frontend/playwright/components/modals/ImportDatasetModal.ts @@ -39,6 +39,24 @@ export class ImportDatasetModal extends Modal { .setInputFiles(filePath); } + /** + * Upload a file buffer to the import modal (no temp file needed) + * @param buffer - File contents as a Buffer + * @param fileName - Name to use for the uploaded file + */ + async uploadFileBuffer( + buffer: Buffer, + fileName: string = 'dataset_export.zip', + ): Promise { + await this.page + .locator(ImportDatasetModal.SELECTORS.FILE_INPUT) + .setInputFiles({ + name: fileName, + mimeType: 'application/zip', + buffer, + }); + } + /** * Fill the overwrite confirmation input (only needed if dataset exists) */ diff --git a/superset-frontend/playwright/fixtures/dataset_export.zip b/superset-frontend/playwright/fixtures/dataset_export.zip deleted file mode 100644 index 5acf5396269..00000000000 Binary files a/superset-frontend/playwright/fixtures/dataset_export.zip and /dev/null differ diff --git a/superset-frontend/playwright/helpers/api/assertions.ts b/superset-frontend/playwright/helpers/api/assertions.ts index a6fdb1937fe..7fb7b1c1787 100644 --- a/superset-frontend/playwright/helpers/api/assertions.ts +++ b/superset-frontend/playwright/helpers/api/assertions.ts @@ -17,9 +17,10 @@ * under the License. */ -import type { Response, APIResponse } from '@playwright/test'; +import type { Page, Response, APIResponse } from '@playwright/test'; import { expect } from '@playwright/test'; import * as unzipper from 'unzipper'; +import { apiGet } from './requests'; /** * Common interface for response types with status() method. @@ -61,6 +62,35 @@ export function expectStatusOneOf( return response; } +/** + * Poll an API endpoint until it returns 404, confirming a resource was deleted. + * Shared across chart, dashboard, and dataset list tests. + * @param page - Playwright page instance (provides authentication context) + * @param endpoint - API endpoint path (e.g. 'api/v1/dataset/') + * @param id - Resource ID to check + * @param options - Optional timeout (default 10000ms) and label for error messages + */ +export async function expectDeleted( + page: Page, + endpoint: string, + id: number, + options?: { timeout?: number; label?: string }, +): Promise { + const timeout = options?.timeout ?? 10000; + const label = options?.label ?? `Resource ${id}`; + await expect + .poll( + async () => { + const response = await apiGet(page, `${endpoint}${id}`, { + failOnStatusCode: false, + }); + return response.status(); + }, + { timeout, message: `${label} should return 404 after delete` }, + ) + .toBe(404); +} + /** * Extract the resource ID from a JSON response body. * Handles both `{ result: { id } }` and `{ id }` shapes. diff --git a/superset-frontend/playwright/helpers/api/dataset.ts b/superset-frontend/playwright/helpers/api/dataset.ts index 2d3175de770..a1d2924c202 100644 --- a/superset-frontend/playwright/helpers/api/dataset.ts +++ b/superset-frontend/playwright/helpers/api/dataset.ts @@ -203,6 +203,21 @@ export async function apiDeleteDataset( return apiDelete(page, `${ENDPOINTS.DATASET}${datasetId}`, options); } +/** + * Export datasets as a zip file via the API. + * Uses Rison encoding for the query parameter (required by the endpoint). + * @param page - Playwright page instance (provides authentication context) + * @param datasetIds - Array of dataset IDs to export + * @returns API response containing the export zip + */ +export async function apiExportDatasets( + page: Page, + datasetIds: number[], +): Promise { + const query = rison.encode(datasetIds); + return apiGet(page, `${ENDPOINTS.DATASET_EXPORT}?q=${query}`); +} + /** * Duplicate a dataset via the API * @param page - Playwright page instance (provides authentication context) diff --git a/superset-frontend/playwright/helpers/api/sqllab.ts b/superset-frontend/playwright/helpers/api/sqllab.ts new file mode 100644 index 00000000000..f0342590e37 --- /dev/null +++ b/superset-frontend/playwright/helpers/api/sqllab.ts @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Page, APIResponse } from '@playwright/test'; +import { apiPost, ApiRequestOptions } from './requests'; + +const ENDPOINTS = { + SQLLAB_EXECUTE: 'api/v1/sqllab/execute/', +} as const; + +/** + * Execute a SQL query via SQL Lab API. + * Requires `allow_dml=True` on the target database for DDL/DML statements. + * @param page - Playwright page instance (provides authentication context) + * @param databaseId - ID of the database to execute against + * @param sql - SQL statement to execute + * @param schema - Optional schema context for the query + * @returns API response from SQL Lab execution + */ +export async function apiExecuteSql( + page: Page, + databaseId: number, + sql: string, + schema?: string, + options?: ApiRequestOptions, +): Promise { + return apiPost( + page, + ENDPOINTS.SQLLAB_EXECUTE, + { + database_id: databaseId, + sql, + schema: schema ?? null, + }, + options, + ); +} diff --git a/superset-frontend/playwright/pages/CreateDatasetPage.ts b/superset-frontend/playwright/pages/CreateDatasetPage.ts index ff129c7364c..e7cf750a01d 100644 --- a/superset-frontend/playwright/pages/CreateDatasetPage.ts +++ b/superset-frontend/playwright/pages/CreateDatasetPage.ts @@ -32,7 +32,7 @@ export class CreateDatasetPage { */ private static readonly SELECTORS = { DATABASE: '[data-test="select-database"]', - SCHEMA: '[data-test="Select schema or type to search schemas"]', + SCHEMA: '[data-test="Select schema"]', TABLE: '[data-test="Select table or type to search tables"]', } as const; diff --git a/superset-frontend/playwright/tests/chart/chart-list.spec.ts b/superset-frontend/playwright/tests/chart/chart-list.spec.ts index 57f7b2dd2ea..b7aad6ffda8 100644 --- a/superset-frontend/playwright/tests/chart/chart-list.spec.ts +++ b/superset-frontend/playwright/tests/chart/chart-list.spec.ts @@ -28,6 +28,7 @@ import { apiGetChart, ENDPOINTS } from '../../helpers/api/chart'; import { createTestChart } from './chart-test-helpers'; import { waitForGet, waitForPut } from '../../helpers/api/intercepts'; import { + expectDeleted, expectStatusOneOf, expectValidExportZip, } from '../../helpers/api/assertions'; @@ -88,17 +89,9 @@ test('should delete a chart with confirmation', async ({ await expect(chartListPage.getChartRow(chartName)).not.toBeVisible(); // Backend verification: API returns 404 - await expect - .poll( - async () => { - const response = await apiGetChart(page, chartId, { - failOnStatusCode: false, - }); - return response.status(); - }, - { timeout: 10000, message: `Chart ${chartId} should return 404` }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.CHART, chartId, { + label: `Chart ${chartId}`, + }); }); test('should edit chart name via properties modal', async ({ @@ -246,17 +239,9 @@ test('should bulk delete multiple charts', async ({ // Backend verification: Both return 404 for (const chart of [chart1, chart2]) { - await expect - .poll( - async () => { - const response = await apiGetChart(page, chart.id, { - failOnStatusCode: false, - }); - return response.status(); - }, - { timeout: 10000, message: `Chart ${chart.id} should return 404` }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.CHART, chart.id, { + label: `Chart ${chart.id}`, + }); } }); diff --git a/superset-frontend/playwright/tests/dashboard/dashboard-list.spec.ts b/superset-frontend/playwright/tests/dashboard/dashboard-list.spec.ts index a305b65ffa0..1def93acecb 100644 --- a/superset-frontend/playwright/tests/dashboard/dashboard-list.spec.ts +++ b/superset-frontend/playwright/tests/dashboard/dashboard-list.spec.ts @@ -25,7 +25,6 @@ import { } from '../../components/modals'; import { Toast } from '../../components/core'; import { - apiGetDashboard, apiDeleteDashboard, apiExportDashboards, getDashboardByName, @@ -34,6 +33,7 @@ import { import { createTestDashboard } from './dashboard-test-helpers'; import { waitForGet, waitForPost } from '../../helpers/api/intercepts'; import { + expectDeleted, expectStatusOneOf, expectValidExportZip, } from '../../helpers/api/assertions'; @@ -97,17 +97,9 @@ test('should delete a dashboard with confirmation', async ({ ).not.toBeVisible(); // Backend verification: API returns 404 - await expect - .poll( - async () => { - const response = await apiGetDashboard(page, dashboardId, { - failOnStatusCode: false, - }); - return response.status(); - }, - { timeout: 10000, message: `Dashboard ${dashboardId} should return 404` }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.DASHBOARD, dashboardId, { + label: `Dashboard ${dashboardId}`, + }); }); test('should export a dashboard as a zip file', async ({ @@ -210,20 +202,9 @@ test('should bulk delete multiple dashboards', async ({ // Backend verification: Both return 404 for (const dashboard of [dashboard1, dashboard2]) { - await expect - .poll( - async () => { - const response = await apiGetDashboard(page, dashboard.id, { - failOnStatusCode: false, - }); - return response.status(); - }, - { - timeout: 10000, - message: `Dashboard ${dashboard.id} should return 404`, - }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.DASHBOARD, dashboard.id, { + label: `Dashboard ${dashboard.id}`, + }); } }); @@ -308,20 +289,9 @@ test.describe('import dashboard', () => { await apiDeleteDashboard(page, dashboardId); // Verify it's gone - await expect - .poll( - async () => { - const response = await apiGetDashboard(page, dashboardId, { - failOnStatusCode: false, - }); - return response.status(); - }, - { - timeout: 10000, - message: `Dashboard ${dashboardId} should return 404 after delete`, - }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.DASHBOARD, dashboardId, { + label: `Dashboard ${dashboardId}`, + }); // Refresh to confirm dashboard is no longer in the list await dashboardListPage.goto(); diff --git a/superset-frontend/playwright/tests/dataset/create-dataset.spec.ts b/superset-frontend/playwright/tests/dataset/create-dataset.spec.ts index a1a69e54a93..7dcfa81dd61 100644 --- a/superset-frontend/playwright/tests/dataset/create-dataset.spec.ts +++ b/superset-frontend/playwright/tests/dataset/create-dataset.spec.ts @@ -27,193 +27,190 @@ import { ChartCreationPage } from '../../pages/ChartCreationPage'; import { ENDPOINTS } from '../../helpers/api/dataset'; import { waitForPost } from '../../helpers/api/intercepts'; import { expectStatusOneOf } from '../../helpers/api/assertions'; -import { apiPostDatabase } from '../../helpers/api/database'; +import { getDatabaseByName } from '../../helpers/api/database'; +import { apiExecuteSql } from '../../helpers/api/sqllab'; -interface GsheetsSetupResult { - sheetName: string; - dbName: string; +interface ExamplesSetupResult { + tableName: string; + dbId: number; createDatasetPage: CreateDatasetPage; } /** - * Sets up gsheets database and navigates to create dataset page. - * Skips test if gsheets connector unavailable (test.skip() throws, so no return). - * @param testInfo - Test info for parallelIndex to avoid name collisions in parallel runs - * @returns Setup result with names and page object + * Creates a temporary table in the examples database via SQL Lab, + * then navigates to the create dataset wizard with it pre-selected. + * + * Requires `allow_dml=True` on the examples database (configured in CI setup). + * + * @param page - Playwright page instance + * @param testAssets - Test assets tracker for cleanup + * @param testInfo - Test info for parallelIndex to avoid name collisions + * @returns Setup result with table name, database ID, and page object */ -async function setupGsheetsDataset( +async function setupExamplesDataset( page: Page, - testAssets: TestAssets, + _testAssets: TestAssets, testInfo: TestInfo, -): Promise { - // Public Google Sheet for testing (published to web, no auth required). - // This is a Netflix dataset that is publicly accessible via the Google Visualization API. - // NOTE: This sheet is hosted on an external Google account and is not created by the test itself. - // If this sheet is deleted, its ID changes, or its sharing settings are restricted, - // these tests will start failing when they attempt to create a database pointing at it. - // In that case, create or select a new publicly readable test sheet, update `sheetUrl` - // to use its URL, and update this comment to describe who owns/maintains that sheet - // and the expected access controls (e.g., "anyone with the link can view"). - const sheetUrl = - 'https://docs.google.com/spreadsheets/d/19XNqckHGKGGPh83JGFdFGP4Bw9gdXeujq5EoIGwttdM/edit#gid=347941303'; - // Include parallelIndex to avoid collisions when tests run in parallel +): Promise { + // Look up the examples database (always available in CI via load_examples) + const examplesDb = await getDatabaseByName(page, 'examples'); + if (!examplesDb) { + throw new Error( + 'Examples database not found. Ensure "superset load_examples" has run.', + ); + } + const dbId = examplesDb.id; + + // Create a uniquely-named temporary table via SQL Lab const uniqueSuffix = `${Date.now()}_${testInfo.parallelIndex}`; - const sheetName = `test_netflix_${uniqueSuffix}`; - const dbName = `test_gsheets_db_${uniqueSuffix}`; + const tableName = `test_pw_${uniqueSuffix}`; - // Create a Google Sheets database via API - // The catalog must be in `extra` as JSON with engine_params.catalog format - const catalogDict = { [sheetName]: sheetUrl }; - const createDbRes = await apiPostDatabase(page, { - database_name: dbName, - engine: 'gsheets', - sqlalchemy_uri: 'gsheets://', - configuration_method: 'dynamic_form', - expose_in_sqllab: true, - extra: JSON.stringify({ - engine_params: { - catalog: catalogDict, - }, - }), - }); - - // Check if gsheets connector is available - if (!createDbRes.ok()) { - const errorBody = await createDbRes.json(); - const errorText = JSON.stringify(errorBody); - // Skip test if gsheets connector not installed - if ( - errorText.includes('gsheets') || - errorText.includes('No such DB engine') - ) { - await test.info().attach('skip-reason', { - body: `Google Sheets connector unavailable: ${errorText}`, - contentType: 'text/plain', - }); - test.skip(); // throws, no return needed - } - throw new Error(`Failed to create gsheets database: ${errorText}`); + // CI examples DB is always PostgreSQL, so 'public' is the correct schema. + const createTableRes = await apiExecuteSql( + page, + dbId, + `CREATE TABLE ${tableName} AS SELECT 1 AS id, 'test' AS name`, + 'public', + ); + if (!createTableRes.ok()) { + const errorBody = await createTableRes.json().catch(() => ({})); + throw new Error( + `Failed to create temp table "${tableName}": ${JSON.stringify(errorBody)}`, + ); } - const createDbBody = await createDbRes.json(); - const dbId = createDbBody.result?.id ?? createDbBody.id; - if (!dbId) { - throw new Error('Database creation did not return an ID'); - } - testAssets.trackDatabase(dbId); - // Navigate to create dataset page const createDatasetPage = new CreateDatasetPage(page); await createDatasetPage.goto(); await createDatasetPage.waitForPageLoad(); - // Select the Google Sheets database - await createDatasetPage.selectDatabase(dbName); + // Select the examples database, public schema, and temp table. + // Schema is 'public' because the CI examples DB is always PostgreSQL. + await createDatasetPage.selectDatabase('examples'); + await createDatasetPage.selectSchema('public'); + await createDatasetPage.selectTable(tableName); - // Try to select the sheet - if not found due to timeout, skip - try { - await createDatasetPage.selectTable(sheetName); - } catch (error) { - // Only skip on TimeoutError (sheet not loaded); re-throw everything else - if (!(error instanceof Error) || error.name !== 'TimeoutError') { - throw error; - } - await test.info().attach('skip-reason', { - body: `Table "${sheetName}" not found in dropdown after timeout.`, - contentType: 'text/plain', - }); - test.skip(); // throws, no return needed - } - - return { sheetName, dbName, createDatasetPage }; + return { tableName, dbId, createDatasetPage }; } -test('should create a dataset via wizard', async ({ page, testAssets }) => { - const { sheetName, createDatasetPage } = await setupGsheetsDataset( +/** + * Drop a temporary table created during test setup. + * Uses failOnStatusCode: false so cleanup doesn't throw if the table was already removed. + */ +async function dropTempTable( + page: Page, + dbId: number, + tableName: string, +): Promise { + // Schema matches 'public' used in setupExamplesDataset (CI examples DB is PostgreSQL). + await apiExecuteSql( page, - testAssets, - test.info(), + dbId, + `DROP TABLE IF EXISTS ${tableName}`, + 'public', + { failOnStatusCode: false }, ); +} - // Set up response intercept to capture new dataset ID - const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, { - pathMatch: true, +// Both tests create a temp table and use the dataset wizard, so they must run serially. +// Uses test.describe only because Playwright's serial mode API requires it - +// (Deviation from "avoid describe" guideline is necessary for functional reasons) +test.describe('create dataset wizard', () => { + test.describe.configure({ mode: 'serial' }); + + test('should create a dataset via wizard', async ({ page, testAssets }) => { + const { tableName, dbId, createDatasetPage } = await setupExamplesDataset( + page, + testAssets, + test.info(), + ); + + // Set up response intercept to capture new dataset ID + const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, { + pathMatch: true, + }); + + // Click "Create and explore dataset" button + await createDatasetPage.clickCreateAndExploreDataset(); + + // Wait for dataset creation and capture ID for cleanup + const createResponse = expectStatusOneOf( + await createResponsePromise, + [200, 201], + ); + const createBody = await createResponse.json(); + const newDatasetId = createBody.result?.id ?? createBody.id; + + if (newDatasetId) { + testAssets.trackDataset(newDatasetId); + } + + // Verify we navigated to Chart Creation page with dataset pre-selected + await page.waitForURL(/.*\/chart\/add.*/); + const chartCreationPage = new ChartCreationPage(page); + await chartCreationPage.waitForPageLoad(); + + // Verify the dataset is pre-selected + await chartCreationPage.expectDatasetSelected(tableName); + + // Select a visualization type and create chart + await chartCreationPage.selectVizType('Table'); + + // Click "Create new chart" to go to Explore + await chartCreationPage.clickCreateNewChart(); + + // Verify we navigated to Explore page + await page.waitForURL(/.*\/explore\/.*/); + const explorePage = new ExplorePage(page); + await explorePage.waitForPageLoad(); + + // Verify the dataset name is shown in Explore + const loadedDatasetName = await explorePage.getDatasetName(); + expect(loadedDatasetName).toContain(tableName); + + // Clean up temp table (dataset cleanup handled by testAssets) + await dropTempTable(page, dbId, tableName); }); - // Click "Create and explore dataset" button - await createDatasetPage.clickCreateAndExploreDataset(); - - // Wait for dataset creation and capture ID for cleanup - const createResponse = expectStatusOneOf( - await createResponsePromise, - [200, 201], - ); - const createBody = await createResponse.json(); - const newDatasetId = createBody.result?.id ?? createBody.id; - - if (newDatasetId) { - testAssets.trackDataset(newDatasetId); - } - - // Verify we navigated to Chart Creation page with dataset pre-selected - await page.waitForURL(/.*\/chart\/add.*/); - const chartCreationPage = new ChartCreationPage(page); - await chartCreationPage.waitForPageLoad(); - - // Verify the dataset is pre-selected - await chartCreationPage.expectDatasetSelected(sheetName); - - // Select a visualization type and create chart - await chartCreationPage.selectVizType('Table'); - - // Click "Create new chart" to go to Explore - await chartCreationPage.clickCreateNewChart(); - - // Verify we navigated to Explore page - await page.waitForURL(/.*\/explore\/.*/); - const explorePage = new ExplorePage(page); - await explorePage.waitForPageLoad(); - - // Verify the dataset name is shown in Explore - const loadedDatasetName = await explorePage.getDatasetName(); - expect(loadedDatasetName).toContain(sheetName); -}); - -test('should create a dataset without exploring', async ({ - page, - testAssets, -}) => { - const { sheetName, createDatasetPage } = await setupGsheetsDataset( + test('should create a dataset without exploring', async ({ page, testAssets, - test.info(), - ); + }) => { + const { tableName, dbId, createDatasetPage } = await setupExamplesDataset( + page, + testAssets, + test.info(), + ); - // Set up response intercept to capture dataset ID - const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, { - pathMatch: true, + // Set up response intercept to capture dataset ID + const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, { + pathMatch: true, + }); + + // Click "Create dataset" (not explore) + await createDatasetPage.clickCreateDataset(); + + // Capture dataset ID from response for cleanup + const createResponse = expectStatusOneOf( + await createResponsePromise, + [200, 201], + ); + const createBody = await createResponse.json(); + const datasetId = createBody.result?.id ?? createBody.id; + if (datasetId) { + testAssets.trackDataset(datasetId); + } + + // Verify redirect to dataset list (not chart creation) + // Note: "Create dataset" action does not show a toast + await page.waitForURL(/.*tablemodelview\/list.*/); + + // Wait for table load, verify row visible + const datasetListPage = new DatasetListPage(page); + await datasetListPage.waitForTableLoad(); + await expect(datasetListPage.getDatasetRow(tableName)).toBeVisible(); + + // Clean up temp table (dataset cleanup handled by testAssets) + await dropTempTable(page, dbId, tableName); }); - - // Click "Create dataset" (not explore) - await createDatasetPage.clickCreateDataset(); - - // Capture dataset ID from response for cleanup - const createResponse = expectStatusOneOf( - await createResponsePromise, - [200, 201], - ); - const createBody = await createResponse.json(); - const datasetId = createBody.result?.id ?? createBody.id; - if (datasetId) { - testAssets.trackDataset(datasetId); - } - - // Verify redirect to dataset list (not chart creation) - // Note: "Create dataset" action does not show a toast - await page.waitForURL(/.*tablemodelview\/list.*/); - - // Wait for table load, verify row visible - const datasetListPage = new DatasetListPage(page); - await datasetListPage.waitForTableLoad(); - await expect(datasetListPage.getDatasetRow(sheetName)).toBeVisible(); }); diff --git a/superset-frontend/playwright/tests/dataset/dataset-list.spec.ts b/superset-frontend/playwright/tests/dataset/dataset-list.spec.ts index d9b781507ea..e0b281f6232 100644 --- a/superset-frontend/playwright/tests/dataset/dataset-list.spec.ts +++ b/superset-frontend/playwright/tests/dataset/dataset-list.spec.ts @@ -18,7 +18,6 @@ */ import { testWithAssets, expect } from '../../helpers/fixtures'; -import path from 'path'; import { DatasetListPage } from '../../pages/DatasetListPage'; import { ExplorePage } from '../../pages/ExplorePage'; import { @@ -31,6 +30,7 @@ import { import { Toast } from '../../components/core'; import { apiDeleteDataset, + apiExportDatasets, apiGetDataset, apiPostVirtualDataset, getDatasetByName, @@ -43,6 +43,7 @@ import { waitForPut, } from '../../helpers/api/intercepts'; import { + expectDeleted, expectStatusOneOf, expectValidExportZip, } from '../../helpers/api/assertions'; @@ -135,17 +136,9 @@ test('should delete a dataset with confirmation', async ({ await expect(datasetListPage.getDatasetRow(datasetName)).not.toBeVisible(); // Verify via API that dataset no longer exists (404) - await expect - .poll( - async () => { - const response = await apiGetDataset(page, datasetId, { - failOnStatusCode: false, - }); - return response.status(); - }, - { timeout: 10000, message: `Dataset ${datasetId} should return 404` }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.DATASET, datasetId, { + label: `Dataset ${datasetId}`, + }); }); test('should duplicate a dataset with new name', async ({ @@ -420,34 +413,17 @@ test('should bulk delete multiple datasets', async ({ await expect(datasetListPage.getDatasetRow(dataset2.name)).not.toBeVisible(); // Verify via API that datasets no longer exist (404) - // Use polling with explicit timeout since deletes may be async - await expect - .poll( - async () => { - const response = await apiGetDataset(page, dataset1.id, { - failOnStatusCode: false, - }); - return response.status(); - }, - { timeout: 10000, message: `Dataset ${dataset1.id} should return 404` }, - ) - .toBe(404); - await expect - .poll( - async () => { - const response = await apiGetDataset(page, dataset2.id, { - failOnStatusCode: false, - }); - return response.status(); - }, - { timeout: 10000, message: `Dataset ${dataset2.id} should return 404` }, - ) - .toBe(404); + await expectDeleted(page, ENDPOINTS.DATASET, dataset1.id, { + label: `Dataset ${dataset1.id}`, + }); + await expectDeleted(page, ENDPOINTS.DATASET, dataset2.id, { + label: `Dataset ${dataset2.id}`, + }); }); -// Import test uses a fixed dataset name from the zip fixture. +// Import test uses export-then-reimport approach (no static fixture needed). // Uses test.describe only because Playwright's serial mode API requires it - -// this prevents race conditions when parallel workers import the same fixture. +// this prevents race conditions when parallel workers import the same dataset. // (Deviation from "avoid describe" guideline is necessary for functional reasons) test.describe('import dataset', () => { test.describe.configure({ mode: 'serial' }); @@ -456,22 +432,33 @@ test.describe('import dataset', () => { datasetListPage, testAssets, }) => { - // Dataset name from fixture (test_netflix_1768502050965) - // Note: Fixture contains a Google Sheets dataset backed by shillelagh[gsheetsapi], - // which is a base dependency — import failure fails the test hard (no skip). - const importedDatasetName = 'test_netflix_1768502050965'; - const fixturePath = path.resolve( - __dirname, - '../../fixtures/dataset_export.zip', + test.setTimeout(60_000); + + // Create a dataset, export it via API, then delete it, then reimport via UI + const { id: datasetId, name: datasetName } = await createTestDataset( + page, + testAssets, + test.info(), + { prefix: 'test_import' }, ); - // Cleanup: Delete any existing dataset with the same name from previous runs - const existingDataset = await getDatasetByName(page, importedDatasetName); - if (existingDataset) { - await apiDeleteDataset(page, existingDataset.id, { - failOnStatusCode: false, - }); - } + // Export the dataset via API to get a zip buffer + const exportResponse = await apiExportDatasets(page, [datasetId]); + expect(exportResponse.ok()).toBe(true); + const exportBuffer = await exportResponse.body(); + + // Delete the dataset so reimport creates it fresh + await apiDeleteDataset(page, datasetId); + + // Verify it's gone + await expectDeleted(page, ENDPOINTS.DATASET, datasetId, { + label: `Dataset ${datasetId}`, + }); + + // Refresh to confirm dataset is no longer in the list + await datasetListPage.goto(); + await datasetListPage.waitForTableLoad(); + await expect(datasetListPage.getDatasetRow(datasetName)).not.toBeVisible(); // Click the import button await datasetListPage.clickImportButton(); @@ -480,11 +467,10 @@ test.describe('import dataset', () => { const importModal = new ImportDatasetModal(page); await importModal.waitForReady(); - // Upload the fixture zip file - await importModal.uploadFile(fixturePath); + // Upload the exported zip via buffer (no temp file needed) + await importModal.uploadFileBuffer(exportBuffer); // Set up response intercept to catch the import POST - // Use pathMatch to avoid false matches if URL lacks trailing slash let importResponsePromise = waitForPost(page, ENDPOINTS.DATASET_IMPORT, { pathMatch: true, }); @@ -496,35 +482,27 @@ test.describe('import dataset', () => { let importResponse = await importResponsePromise; // Handle overwrite confirmation if dataset already exists - // First response may be 409/422 indicating overwrite is required - this is expected + // First response may be 409/422 indicating overwrite is required const overwriteInput = importModal.getOverwriteInput(); await overwriteInput .waitFor({ state: 'visible', timeout: 3000 }) .catch(error => { - // Only ignore TimeoutError (input not visible); re-throw other errors if (!(error instanceof Error) || error.name !== 'TimeoutError') { throw error; } }); if (await overwriteInput.isVisible()) { - // Set up new intercept for the actual import after overwrite confirmation importResponsePromise = waitForPost(page, ENDPOINTS.DATASET_IMPORT, { pathMatch: true, }); await importModal.fillOverwriteConfirmation(); await importModal.clickImport(); - // Wait for the second (final) import response importResponse = await importResponsePromise; } - // Fail hard if dataset import fails. - // The fixture contains a gsheets dataset; shillelagh[gsheetsapi] is a base - // dependency (pyproject.toml), so the engine is always available in CI. - if (!importResponse.ok()) { - const errorBody = await importResponse.json().catch(() => ({})); - throw new Error(`Import failed: ${JSON.stringify(errorBody)}`); - } + // Verify import succeeded + expectStatusOneOf(importResponse, [200]); // Modal should close on success await importModal.waitForHidden({ timeout: TIMEOUT.FILE_IMPORT }); @@ -533,19 +511,19 @@ test.describe('import dataset', () => { const toast = new Toast(page); await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 }); - // Refresh the page to see the imported dataset + // Refresh to see the imported dataset await datasetListPage.goto(); await datasetListPage.waitForTableLoad(); // Verify dataset appears in list - await expect( - datasetListPage.getDatasetRow(importedDatasetName), - ).toBeVisible(); + await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible(); - // Get dataset ID for cleanup - const importedDataset = await getDatasetByName(page, importedDatasetName); - expect(importedDataset).not.toBeNull(); - testAssets.trackDataset(importedDataset!.id); + // Track for cleanup: the dataset import API returns {"message": "OK"} + // with no ID, so look up the reimported dataset by name. + const reimported = await getDatasetByName(page, datasetName); + if (reimported) { + testAssets.trackDataset(reimported.id); + } }); });