mirror of
https://github.com/apache/superset.git
synced 2026-04-19 16:14:52 +00:00
test(playwright): additional dataset list playwright tests (#36684)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -0,0 +1,219 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { test, expect } from '../../../helpers/fixtures/testAssets';
|
||||
import type { TestAssets } from '../../../helpers/fixtures/testAssets';
|
||||
import type { Page, TestInfo } from '@playwright/test';
|
||||
import { ExplorePage } from '../../../pages/ExplorePage';
|
||||
import { CreateDatasetPage } from '../../../pages/CreateDatasetPage';
|
||||
import { DatasetListPage } from '../../../pages/DatasetListPage';
|
||||
import { ChartCreationPage } from '../../../pages/ChartCreationPage';
|
||||
import { ENDPOINTS } from '../../../helpers/api/dataset';
|
||||
import { waitForPost } from '../../../helpers/api/intercepts';
|
||||
import { expectStatusOneOf } from '../../../helpers/api/assertions';
|
||||
import { apiPostDatabase } from '../../../helpers/api/database';
|
||||
|
||||
interface GsheetsSetupResult {
|
||||
sheetName: string;
|
||||
dbName: string;
|
||||
createDatasetPage: CreateDatasetPage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up gsheets database and navigates to create dataset page.
|
||||
* Skips test if gsheets connector unavailable (test.skip() throws, so no return).
|
||||
* @param testInfo - Test info for parallelIndex to avoid name collisions in parallel runs
|
||||
* @returns Setup result with names and page object
|
||||
*/
|
||||
async function setupGsheetsDataset(
|
||||
page: Page,
|
||||
testAssets: TestAssets,
|
||||
testInfo: TestInfo,
|
||||
): Promise<GsheetsSetupResult> {
|
||||
// Public Google Sheet for testing (published to web, no auth required).
|
||||
// This is a Netflix dataset that is publicly accessible via the Google Visualization API.
|
||||
// NOTE: This sheet is hosted on an external Google account and is not created by the test itself.
|
||||
// If this sheet is deleted, its ID changes, or its sharing settings are restricted,
|
||||
// these tests will start failing when they attempt to create a database pointing at it.
|
||||
// In that case, create or select a new publicly readable test sheet, update `sheetUrl`
|
||||
// to use its URL, and update this comment to describe who owns/maintains that sheet
|
||||
// and the expected access controls (e.g., "anyone with the link can view").
|
||||
const sheetUrl =
|
||||
'https://docs.google.com/spreadsheets/d/19XNqckHGKGGPh83JGFdFGP4Bw9gdXeujq5EoIGwttdM/edit#gid=347941303';
|
||||
// Include parallelIndex to avoid collisions when tests run in parallel
|
||||
const uniqueSuffix = `${Date.now()}_${testInfo.parallelIndex}`;
|
||||
const sheetName = `test_netflix_${uniqueSuffix}`;
|
||||
const dbName = `test_gsheets_db_${uniqueSuffix}`;
|
||||
|
||||
// Create a Google Sheets database via API
|
||||
// The catalog must be in `extra` as JSON with engine_params.catalog format
|
||||
const catalogDict = { [sheetName]: sheetUrl };
|
||||
const createDbRes = await apiPostDatabase(page, {
|
||||
database_name: dbName,
|
||||
engine: 'gsheets',
|
||||
sqlalchemy_uri: 'gsheets://',
|
||||
configuration_method: 'dynamic_form',
|
||||
expose_in_sqllab: true,
|
||||
extra: JSON.stringify({
|
||||
engine_params: {
|
||||
catalog: catalogDict,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
// Check if gsheets connector is available
|
||||
if (!createDbRes.ok()) {
|
||||
const errorBody = await createDbRes.json();
|
||||
const errorText = JSON.stringify(errorBody);
|
||||
// Skip test if gsheets connector not installed
|
||||
if (
|
||||
errorText.includes('gsheets') ||
|
||||
errorText.includes('No such DB engine')
|
||||
) {
|
||||
await test.info().attach('skip-reason', {
|
||||
body: `Google Sheets connector unavailable: ${errorText}`,
|
||||
contentType: 'text/plain',
|
||||
});
|
||||
test.skip(); // throws, no return needed
|
||||
}
|
||||
throw new Error(`Failed to create gsheets database: ${errorText}`);
|
||||
}
|
||||
|
||||
const createDbBody = await createDbRes.json();
|
||||
const dbId = createDbBody.result?.id ?? createDbBody.id;
|
||||
if (!dbId) {
|
||||
throw new Error('Database creation did not return an ID');
|
||||
}
|
||||
testAssets.trackDatabase(dbId);
|
||||
|
||||
// Navigate to create dataset page
|
||||
const createDatasetPage = new CreateDatasetPage(page);
|
||||
await createDatasetPage.goto();
|
||||
await createDatasetPage.waitForPageLoad();
|
||||
|
||||
// Select the Google Sheets database
|
||||
await createDatasetPage.selectDatabase(dbName);
|
||||
|
||||
// Try to select the sheet - if not found due to timeout, skip
|
||||
try {
|
||||
await createDatasetPage.selectTable(sheetName);
|
||||
} catch (error) {
|
||||
// Only skip on TimeoutError (sheet not loaded); re-throw everything else
|
||||
if (!(error instanceof Error) || error.name !== 'TimeoutError') {
|
||||
throw error;
|
||||
}
|
||||
await test.info().attach('skip-reason', {
|
||||
body: `Table "${sheetName}" not found in dropdown after timeout.`,
|
||||
contentType: 'text/plain',
|
||||
});
|
||||
test.skip(); // throws, no return needed
|
||||
}
|
||||
|
||||
return { sheetName, dbName, createDatasetPage };
|
||||
}
|
||||
|
||||
test('should create a dataset via wizard', async ({ page, testAssets }) => {
|
||||
const { sheetName, createDatasetPage } = await setupGsheetsDataset(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
);
|
||||
|
||||
// Set up response intercept to capture new dataset ID
|
||||
const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, {
|
||||
pathMatch: true,
|
||||
});
|
||||
|
||||
// Click "Create and explore dataset" button
|
||||
await createDatasetPage.clickCreateAndExploreDataset();
|
||||
|
||||
// Wait for dataset creation and capture ID for cleanup
|
||||
const createResponse = expectStatusOneOf(
|
||||
await createResponsePromise,
|
||||
[200, 201],
|
||||
);
|
||||
const createBody = await createResponse.json();
|
||||
const newDatasetId = createBody.result?.id ?? createBody.id;
|
||||
|
||||
if (newDatasetId) {
|
||||
testAssets.trackDataset(newDatasetId);
|
||||
}
|
||||
|
||||
// Verify we navigated to Chart Creation page with dataset pre-selected
|
||||
await page.waitForURL(/.*\/chart\/add.*/);
|
||||
const chartCreationPage = new ChartCreationPage(page);
|
||||
await chartCreationPage.waitForPageLoad();
|
||||
|
||||
// Verify the dataset is pre-selected
|
||||
await chartCreationPage.expectDatasetSelected(sheetName);
|
||||
|
||||
// Select a visualization type and create chart
|
||||
await chartCreationPage.selectVizType('Table');
|
||||
|
||||
// Click "Create new chart" to go to Explore
|
||||
await chartCreationPage.clickCreateNewChart();
|
||||
|
||||
// Verify we navigated to Explore page
|
||||
await page.waitForURL(/.*\/explore\/.*/);
|
||||
const explorePage = new ExplorePage(page);
|
||||
await explorePage.waitForPageLoad();
|
||||
|
||||
// Verify the dataset name is shown in Explore
|
||||
const loadedDatasetName = await explorePage.getDatasetName();
|
||||
expect(loadedDatasetName).toContain(sheetName);
|
||||
});
|
||||
|
||||
test('should create a dataset without exploring', async ({
|
||||
page,
|
||||
testAssets,
|
||||
}) => {
|
||||
const { sheetName, createDatasetPage } = await setupGsheetsDataset(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
);
|
||||
|
||||
// Set up response intercept to capture dataset ID
|
||||
const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, {
|
||||
pathMatch: true,
|
||||
});
|
||||
|
||||
// Click "Create dataset" (not explore)
|
||||
await createDatasetPage.clickCreateDataset();
|
||||
|
||||
// Capture dataset ID from response for cleanup
|
||||
const createResponse = expectStatusOneOf(
|
||||
await createResponsePromise,
|
||||
[200, 201],
|
||||
);
|
||||
const createBody = await createResponse.json();
|
||||
const datasetId = createBody.result?.id ?? createBody.id;
|
||||
if (datasetId) {
|
||||
testAssets.trackDataset(datasetId);
|
||||
}
|
||||
|
||||
// Verify redirect to dataset list (not chart creation)
|
||||
// Note: "Create dataset" action does not show a toast
|
||||
await page.waitForURL(/.*tablemodelview\/list.*/);
|
||||
|
||||
// Wait for table load, verify row visible
|
||||
const datasetListPage = new DatasetListPage(page);
|
||||
await datasetListPage.waitForTableLoad();
|
||||
await expect(datasetListPage.getDatasetRow(sheetName)).toBeVisible();
|
||||
});
|
||||
@@ -17,76 +17,91 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { test, expect } from '@playwright/test';
|
||||
import {
|
||||
test as testWithAssets,
|
||||
expect,
|
||||
} from '../../../helpers/fixtures/testAssets';
|
||||
import type { Response } from '@playwright/test';
|
||||
import path from 'path';
|
||||
import * as unzipper from 'unzipper';
|
||||
import { DatasetListPage } from '../../../pages/DatasetListPage';
|
||||
import { ExplorePage } from '../../../pages/ExplorePage';
|
||||
import { ConfirmDialog } from '../../../components/modals/ConfirmDialog';
|
||||
import { DeleteConfirmationModal } from '../../../components/modals/DeleteConfirmationModal';
|
||||
import { ImportDatasetModal } from '../../../components/modals/ImportDatasetModal';
|
||||
import { DuplicateDatasetModal } from '../../../components/modals/DuplicateDatasetModal';
|
||||
import { EditDatasetModal } from '../../../components/modals/EditDatasetModal';
|
||||
import { Toast } from '../../../components/core/Toast';
|
||||
import {
|
||||
apiDeleteDataset,
|
||||
apiGetDataset,
|
||||
apiPostVirtualDataset,
|
||||
getDatasetByName,
|
||||
createTestVirtualDataset,
|
||||
ENDPOINTS,
|
||||
} from '../../../helpers/api/dataset';
|
||||
import { createTestDataset } from './dataset-test-helpers';
|
||||
import {
|
||||
waitForGet,
|
||||
waitForPost,
|
||||
waitForPut,
|
||||
} from '../../../helpers/api/intercepts';
|
||||
import { expectStatusOneOf } from '../../../helpers/api/assertions';
|
||||
import { TIMEOUT } from '../../../utils/constants';
|
||||
|
||||
/**
|
||||
* Test data constants
|
||||
* PHYSICAL_DATASET: A physical dataset from examples (for navigation tests)
|
||||
* Tests that need virtual datasets (duplicate/delete) create their own hermetic data
|
||||
* Extend testWithAssets with datasetListPage navigation (beforeEach equivalent).
|
||||
*/
|
||||
const TEST_DATASETS = {
|
||||
/** Physical dataset for basic navigation tests */
|
||||
PHYSICAL_DATASET: 'birth_names',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Dataset List E2E Tests
|
||||
*
|
||||
* Uses flat test() structure per project convention (matches login.spec.ts).
|
||||
* Shared state and hooks are at file scope.
|
||||
*/
|
||||
|
||||
// File-scope state (reset in beforeEach)
|
||||
let datasetListPage: DatasetListPage;
|
||||
let explorePage: ExplorePage;
|
||||
let testResources: { datasetIds: number[] } = { datasetIds: [] };
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
datasetListPage = new DatasetListPage(page);
|
||||
explorePage = new ExplorePage(page);
|
||||
testResources = { datasetIds: [] }; // Reset for each test
|
||||
|
||||
// Navigate to dataset list page
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
const test = testWithAssets.extend<{ datasetListPage: DatasetListPage }>({
|
||||
datasetListPage: async ({ page }, use) => {
|
||||
const datasetListPage = new DatasetListPage(page);
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
await use(datasetListPage);
|
||||
},
|
||||
});
|
||||
|
||||
test.afterEach(async ({ page }) => {
|
||||
// Cleanup any resources created during the test
|
||||
const promises = [];
|
||||
for (const datasetId of testResources.datasetIds) {
|
||||
promises.push(
|
||||
apiDeleteDataset(page, datasetId, {
|
||||
failOnStatusCode: false,
|
||||
}).catch(error => {
|
||||
// Log cleanup failures to avoid silent resource leaks
|
||||
console.warn(
|
||||
`[Cleanup] Failed to delete dataset ${datasetId}:`,
|
||||
String(error),
|
||||
);
|
||||
}),
|
||||
/**
|
||||
* Helper to validate an export zip response.
|
||||
* Verifies headers, parses zip contents, and validates expected structure.
|
||||
*/
|
||||
async function expectValidExportZip(
|
||||
response: Response,
|
||||
options: { minDatasetCount?: number; checkContentDisposition?: boolean } = {},
|
||||
): Promise<void> {
|
||||
const { minDatasetCount = 1, checkContentDisposition = false } = options;
|
||||
|
||||
// Verify headers
|
||||
expect(response.headers()['content-type']).toContain('application/zip');
|
||||
if (checkContentDisposition) {
|
||||
expect(response.headers()['content-disposition']).toMatch(
|
||||
/filename=.*dataset_export.*\.zip/,
|
||||
);
|
||||
}
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
// Parse and validate zip contents
|
||||
const body = await response.body();
|
||||
expect(body.length).toBeGreaterThan(0);
|
||||
|
||||
const entries: string[] = [];
|
||||
const directory = await unzipper.Open.buffer(body);
|
||||
directory.files.forEach(file => entries.push(file.path));
|
||||
|
||||
// Validate structure
|
||||
const datasetYamlFiles = entries.filter(
|
||||
entry => entry.includes('datasets/') && entry.endsWith('.yaml'),
|
||||
);
|
||||
expect(datasetYamlFiles.length).toBeGreaterThanOrEqual(minDatasetCount);
|
||||
expect(entries.some(entry => entry.endsWith('metadata.yaml'))).toBe(true);
|
||||
}
|
||||
|
||||
test('should navigate to Explore when dataset name is clicked', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
}) => {
|
||||
// Use existing physical dataset (loaded in CI via --load-examples)
|
||||
const datasetName = TEST_DATASETS.PHYSICAL_DATASET;
|
||||
const explorePage = new ExplorePage(page);
|
||||
|
||||
// Use existing example dataset (hermetic - loaded in CI via --load-examples)
|
||||
const datasetName = 'members_channels_2';
|
||||
const dataset = await getDatasetByName(page, datasetName);
|
||||
expect(dataset).not.toBeNull();
|
||||
|
||||
@@ -108,16 +123,20 @@ test('should navigate to Explore when dataset name is clicked', async ({
|
||||
await expect(explorePage.getVizSwitcher()).toContainText('Table');
|
||||
});
|
||||
|
||||
test('should delete a dataset with confirmation', async ({ page }) => {
|
||||
// Create a virtual dataset for this test (hermetic - no dependency on examples)
|
||||
const datasetName = `test_delete_${Date.now()}`;
|
||||
const datasetId = await createTestVirtualDataset(page, datasetName);
|
||||
expect(datasetId).not.toBeNull();
|
||||
test('should delete a dataset with confirmation', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create throwaway dataset for deletion
|
||||
const { id: datasetId, name: datasetName } = await createTestDataset(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_delete' },
|
||||
);
|
||||
|
||||
// Track for cleanup in case test fails partway through
|
||||
testResources = { datasetIds: [datasetId!] };
|
||||
|
||||
// Refresh page to see new dataset
|
||||
// Refresh to see the new dataset
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
@@ -148,31 +167,44 @@ test('should delete a dataset with confirmation', async ({ page }) => {
|
||||
|
||||
// Verify dataset is removed from list
|
||||
await expect(datasetListPage.getDatasetRow(datasetName)).not.toBeVisible();
|
||||
|
||||
// Verify via API that dataset no longer exists (404)
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const response = await apiGetDataset(page, datasetId, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return response.status();
|
||||
},
|
||||
{ timeout: 10000, message: `Dataset ${datasetId} should return 404` },
|
||||
)
|
||||
.toBe(404);
|
||||
});
|
||||
|
||||
test('should duplicate a dataset with new name', async ({ page }) => {
|
||||
// Create a virtual dataset for this test (hermetic - no dependency on examples)
|
||||
const originalName = `test_original_${Date.now()}`;
|
||||
const originalId = await createTestVirtualDataset(page, originalName);
|
||||
expect(originalId).not.toBeNull();
|
||||
test('should duplicate a dataset with new name', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create a virtual dataset first (duplicate UI only works for virtual datasets)
|
||||
const { id: originalId, name: originalName } = await createTestDataset(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_duplicate_source' },
|
||||
);
|
||||
const duplicateName = `duplicate_${Date.now()}_${test.info().parallelIndex}`;
|
||||
|
||||
// Track original for cleanup
|
||||
testResources = { datasetIds: [originalId!] };
|
||||
|
||||
const duplicateName = `duplicate_${originalName}`;
|
||||
|
||||
// Refresh page to see new dataset
|
||||
// Navigate to list and verify original dataset is visible
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify original dataset is visible in list
|
||||
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
|
||||
|
||||
// Set up response intercept to capture duplicate dataset ID
|
||||
const duplicateResponsePromise = page.waitForResponse(
|
||||
response =>
|
||||
response.url().includes(`${ENDPOINTS.DATASET}duplicate`) &&
|
||||
response.status() === 201,
|
||||
const duplicateResponsePromise = waitForPost(
|
||||
page,
|
||||
ENDPOINTS.DATASET_DUPLICATE,
|
||||
);
|
||||
|
||||
// Click duplicate action button
|
||||
@@ -188,13 +220,17 @@ test('should duplicate a dataset with new name', async ({ page }) => {
|
||||
// Click the Duplicate button
|
||||
await duplicateModal.clickDuplicate();
|
||||
|
||||
// Get the duplicate dataset ID from response
|
||||
const duplicateResponse = await duplicateResponsePromise;
|
||||
// Get the duplicate dataset ID from response (handle both response shapes)
|
||||
const duplicateResponse = expectStatusOneOf(
|
||||
await duplicateResponsePromise,
|
||||
[200, 201],
|
||||
);
|
||||
const duplicateData = await duplicateResponse.json();
|
||||
const duplicateId = duplicateData.id;
|
||||
const duplicateId = duplicateData.result?.id ?? duplicateData.id;
|
||||
expect(duplicateId, 'Duplicate API should return dataset id').toBeTruthy();
|
||||
|
||||
// Track both original and duplicate for cleanup
|
||||
testResources = { datasetIds: [originalId!, duplicateId] };
|
||||
// Track duplicate for cleanup (original is already tracked by createTestDataset)
|
||||
testAssets.trackDataset(duplicateId);
|
||||
|
||||
// Modal should close
|
||||
await duplicateModal.waitForHidden();
|
||||
@@ -210,17 +246,437 @@ test('should duplicate a dataset with new name', async ({ page }) => {
|
||||
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
|
||||
await expect(datasetListPage.getDatasetRow(duplicateName)).toBeVisible();
|
||||
|
||||
// API Verification: Compare original and duplicate datasets
|
||||
const originalResponseData = await apiGetDataset(page, originalId!);
|
||||
const originalDataFull = await originalResponseData.json();
|
||||
const duplicateResponseData = await apiGetDataset(page, duplicateId);
|
||||
const duplicateDataFull = await duplicateResponseData.json();
|
||||
// API Verification: Fetch both datasets via detail API for consistent comparison
|
||||
// (list API may return undefined for fields that detail API returns as null)
|
||||
const [originalDetailRes, duplicateDetailRes] = await Promise.all([
|
||||
apiGetDataset(page, originalId),
|
||||
apiGetDataset(page, duplicateId),
|
||||
]);
|
||||
const originalDetail = (await originalDetailRes.json()).result;
|
||||
const duplicateDetail = (await duplicateDetailRes.json()).result;
|
||||
|
||||
// Verify key properties were copied correctly
|
||||
expect(duplicateDataFull.result.sql).toBe(originalDataFull.result.sql);
|
||||
expect(duplicateDataFull.result.database.id).toBe(
|
||||
originalDataFull.result.database.id,
|
||||
);
|
||||
expect(duplicateDetail.sql).toBe(originalDetail.sql);
|
||||
expect(duplicateDetail.database.id).toBe(originalDetail.database.id);
|
||||
expect(duplicateDetail.schema).toBe(originalDetail.schema);
|
||||
// Name should be different (the duplicate name)
|
||||
expect(duplicateDataFull.result.table_name).toBe(duplicateName);
|
||||
expect(duplicateDetail.table_name).toBe(duplicateName);
|
||||
});
|
||||
|
||||
test('should export a dataset as a zip file', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
}) => {
|
||||
// Use existing example dataset
|
||||
const datasetName = 'members_channels_2';
|
||||
const dataset = await getDatasetByName(page, datasetName);
|
||||
expect(dataset).not.toBeNull();
|
||||
|
||||
// Verify dataset is visible in list
|
||||
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
|
||||
|
||||
// Set up API response intercept for export endpoint
|
||||
// Note: We intercept the API response instead of relying on download events because
|
||||
// Superset uses blob downloads (createObjectURL) which don't trigger Playwright's
|
||||
// download event consistently, especially in app-prefix configurations.
|
||||
const exportResponsePromise = waitForGet(page, ENDPOINTS.DATASET_EXPORT);
|
||||
|
||||
// Click export action button
|
||||
await datasetListPage.clickExportAction(datasetName);
|
||||
|
||||
// Wait for export API response and validate zip contents
|
||||
const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
|
||||
await expectValidExportZip(exportResponse, { checkContentDisposition: true });
|
||||
});
|
||||
|
||||
test('should export multiple datasets via bulk select action', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create 2 throwaway datasets for bulk export
|
||||
const [dataset1, dataset2] = await Promise.all([
|
||||
createTestDataset(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_export_1',
|
||||
}),
|
||||
createTestDataset(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_export_2',
|
||||
}),
|
||||
]);
|
||||
|
||||
// Refresh to see new datasets
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify both datasets are visible in list
|
||||
await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible();
|
||||
await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible();
|
||||
|
||||
// Enable bulk select mode
|
||||
await datasetListPage.clickBulkSelectButton();
|
||||
|
||||
// Select both datasets
|
||||
await datasetListPage.selectDatasetCheckbox(dataset1.name);
|
||||
await datasetListPage.selectDatasetCheckbox(dataset2.name);
|
||||
|
||||
// Set up API response intercept for export endpoint
|
||||
const exportResponsePromise = waitForGet(page, ENDPOINTS.DATASET_EXPORT);
|
||||
|
||||
// Click bulk export action
|
||||
await datasetListPage.clickBulkAction('Export');
|
||||
|
||||
// Wait for export API response and validate zip contains multiple datasets
|
||||
const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
|
||||
await expectValidExportZip(exportResponse, { minDatasetCount: 2 });
|
||||
});
|
||||
|
||||
test('should edit dataset name via modal', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create throwaway dataset for editing
|
||||
const { id: datasetId, name: datasetName } = await createTestDataset(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_edit' },
|
||||
);
|
||||
|
||||
// Refresh to see new dataset
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify dataset is visible in list
|
||||
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
|
||||
|
||||
// Click edit action to open modal
|
||||
await datasetListPage.clickEditAction(datasetName);
|
||||
|
||||
// Wait for edit modal to be ready
|
||||
const editModal = new EditDatasetModal(page);
|
||||
await editModal.waitForReady();
|
||||
|
||||
// Enable edit mode by clicking the lock icon
|
||||
await editModal.enableEditMode();
|
||||
|
||||
// Edit the dataset name
|
||||
const newName = `test_renamed_${Date.now()}`;
|
||||
await editModal.fillName(newName);
|
||||
|
||||
// Set up response intercept for save
|
||||
const saveResponsePromise = waitForPut(
|
||||
page,
|
||||
`${ENDPOINTS.DATASET}${datasetId}`,
|
||||
);
|
||||
|
||||
// Click Save button
|
||||
await editModal.clickSave();
|
||||
|
||||
// Handle the "Confirm save" dialog that may appear for datasets with sync columns enabled
|
||||
const confirmDialog = new ConfirmDialog(page);
|
||||
await confirmDialog.clickOk({ timeout: TIMEOUT.CONFIRM_DIALOG });
|
||||
|
||||
// Wait for save to complete and verify success
|
||||
expectStatusOneOf(await saveResponsePromise, [200, 201]);
|
||||
|
||||
// Modal should close
|
||||
await editModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
|
||||
|
||||
// Verify via API that name was saved
|
||||
const updatedDatasetRes = await apiGetDataset(page, datasetId);
|
||||
const updatedDataset = (await updatedDatasetRes.json()).result;
|
||||
expect(updatedDataset.table_name).toBe(newName);
|
||||
});
|
||||
|
||||
test('should bulk delete multiple datasets', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create 2 throwaway datasets for bulk delete
|
||||
const [dataset1, dataset2] = await Promise.all([
|
||||
createTestDataset(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_delete_1',
|
||||
}),
|
||||
createTestDataset(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_delete_2',
|
||||
}),
|
||||
]);
|
||||
|
||||
// Refresh to see new datasets
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify both datasets are visible in list
|
||||
await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible();
|
||||
await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible();
|
||||
|
||||
// Enable bulk select mode
|
||||
await datasetListPage.clickBulkSelectButton();
|
||||
|
||||
// Select both datasets
|
||||
await datasetListPage.selectDatasetCheckbox(dataset1.name);
|
||||
await datasetListPage.selectDatasetCheckbox(dataset2.name);
|
||||
|
||||
// Click bulk delete action
|
||||
await datasetListPage.clickBulkAction('Delete');
|
||||
|
||||
// Delete confirmation modal should appear
|
||||
const deleteModal = new DeleteConfirmationModal(page);
|
||||
await deleteModal.waitForVisible();
|
||||
|
||||
// Type "DELETE" to confirm
|
||||
await deleteModal.fillConfirmationInput('DELETE');
|
||||
|
||||
// Click the Delete button
|
||||
await deleteModal.clickDelete();
|
||||
|
||||
// Modal should close
|
||||
await deleteModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
await expect(toast.getSuccess()).toBeVisible();
|
||||
|
||||
// Verify both datasets are removed from list
|
||||
await expect(datasetListPage.getDatasetRow(dataset1.name)).not.toBeVisible();
|
||||
await expect(datasetListPage.getDatasetRow(dataset2.name)).not.toBeVisible();
|
||||
|
||||
// Verify via API that datasets no longer exist (404)
|
||||
// Use polling with explicit timeout since deletes may be async
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const response = await apiGetDataset(page, dataset1.id, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return response.status();
|
||||
},
|
||||
{ timeout: 10000, message: `Dataset ${dataset1.id} should return 404` },
|
||||
)
|
||||
.toBe(404);
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const response = await apiGetDataset(page, dataset2.id, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return response.status();
|
||||
},
|
||||
{ timeout: 10000, message: `Dataset ${dataset2.id} should return 404` },
|
||||
)
|
||||
.toBe(404);
|
||||
});
|
||||
|
||||
// Import test uses a fixed dataset name from the zip fixture.
|
||||
// Uses test.describe only because Playwright's serial mode API requires it -
|
||||
// this prevents race conditions when parallel workers import the same fixture.
|
||||
// (Deviation from "avoid describe" guideline is necessary for functional reasons)
|
||||
test.describe('import dataset', () => {
|
||||
test.describe.configure({ mode: 'serial' });
|
||||
test('should import a dataset from a zip file', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Dataset name from fixture (test_netflix_1768502050965)
|
||||
// Note: Fixture contains a Google Sheets dataset - test will skip if gsheets connector unavailable
|
||||
const importedDatasetName = 'test_netflix_1768502050965';
|
||||
const fixturePath = path.resolve(
|
||||
__dirname,
|
||||
'../../../fixtures/dataset_export.zip',
|
||||
);
|
||||
|
||||
// Cleanup: Delete any existing dataset with the same name from previous runs
|
||||
const existingDataset = await getDatasetByName(page, importedDatasetName);
|
||||
if (existingDataset) {
|
||||
await apiDeleteDataset(page, existingDataset.id, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Click the import button
|
||||
await datasetListPage.clickImportButton();
|
||||
|
||||
// Wait for import modal to be ready
|
||||
const importModal = new ImportDatasetModal(page);
|
||||
await importModal.waitForReady();
|
||||
|
||||
// Upload the fixture zip file
|
||||
await importModal.uploadFile(fixturePath);
|
||||
|
||||
// Set up response intercept to catch the import POST
|
||||
// Use pathMatch to avoid false matches if URL lacks trailing slash
|
||||
let importResponsePromise = waitForPost(page, ENDPOINTS.DATASET_IMPORT, {
|
||||
pathMatch: true,
|
||||
});
|
||||
|
||||
// Click Import button
|
||||
await importModal.clickImport();
|
||||
|
||||
// Wait for first import response
|
||||
let importResponse = await importResponsePromise;
|
||||
|
||||
// Handle overwrite confirmation if dataset already exists
|
||||
// First response may be 409/422 indicating overwrite is required - this is expected
|
||||
const overwriteInput = importModal.getOverwriteInput();
|
||||
await overwriteInput
|
||||
.waitFor({ state: 'visible', timeout: 3000 })
|
||||
.catch(error => {
|
||||
// Only ignore TimeoutError (input not visible); re-throw other errors
|
||||
if (!(error instanceof Error) || error.name !== 'TimeoutError') {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
if (await overwriteInput.isVisible()) {
|
||||
// Set up new intercept for the actual import after overwrite confirmation
|
||||
importResponsePromise = waitForPost(page, ENDPOINTS.DATASET_IMPORT, {
|
||||
pathMatch: true,
|
||||
});
|
||||
await importModal.fillOverwriteConfirmation();
|
||||
await importModal.clickImport();
|
||||
// Wait for the second (final) import response
|
||||
importResponse = await importResponsePromise;
|
||||
}
|
||||
|
||||
// Check final import response for gsheets connector errors
|
||||
if (!importResponse.ok()) {
|
||||
const errorBody = await importResponse.json().catch(() => ({}));
|
||||
const errorText = JSON.stringify(errorBody);
|
||||
// Skip test if gsheets connector not installed
|
||||
if (
|
||||
errorText.includes('gsheets') ||
|
||||
errorText.includes('No such DB engine') ||
|
||||
errorText.includes('Could not load database driver')
|
||||
) {
|
||||
await test.info().attach('skip-reason', {
|
||||
body: `Import failed due to missing gsheets connector: ${errorText}`,
|
||||
contentType: 'text/plain',
|
||||
});
|
||||
test.skip();
|
||||
return;
|
||||
}
|
||||
// Re-throw other errors
|
||||
throw new Error(`Import failed: ${errorText}`);
|
||||
}
|
||||
|
||||
// Modal should close on success
|
||||
await importModal.waitForHidden({ timeout: TIMEOUT.FILE_IMPORT });
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
|
||||
|
||||
// Refresh the page to see the imported dataset
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify dataset appears in list
|
||||
await expect(
|
||||
datasetListPage.getDatasetRow(importedDatasetName),
|
||||
).toBeVisible();
|
||||
|
||||
// Get dataset ID for cleanup
|
||||
const importedDataset = await getDatasetByName(page, importedDatasetName);
|
||||
expect(importedDataset).not.toBeNull();
|
||||
testAssets.trackDataset(importedDataset!.id);
|
||||
});
|
||||
});
|
||||
|
||||
test('should edit column date format via modal', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create virtual dataset with a date column for testing
|
||||
// Using SQL to create a dataset with 'ds' column avoids duplication issues
|
||||
const datasetName = `test_date_format_${Date.now()}_${test.info().parallelIndex}`;
|
||||
const baseDataset = await getDatasetByName(page, 'members_channels_2');
|
||||
expect(baseDataset, 'members_channels_2 dataset must exist').not.toBeNull();
|
||||
|
||||
const createResponse = await apiPostVirtualDataset(page, {
|
||||
database: baseDataset!.database.id,
|
||||
schema: baseDataset!.schema ?? null,
|
||||
table_name: datasetName,
|
||||
sql: "SELECT CAST('2024-01-01' AS DATE) as ds, 'test' as name",
|
||||
});
|
||||
expectStatusOneOf(createResponse, [200, 201]);
|
||||
const createBody = await createResponse.json();
|
||||
const datasetId = createBody.result?.id ?? createBody.id;
|
||||
expect(datasetId, 'Virtual dataset creation should return id').toBeTruthy();
|
||||
testAssets.trackDataset(datasetId);
|
||||
|
||||
// Navigate to dataset list, click edit action
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
await datasetListPage.clickEditAction(datasetName);
|
||||
|
||||
// Enable edit mode, navigate to Columns tab
|
||||
const editModal = new EditDatasetModal(page);
|
||||
await editModal.waitForReady();
|
||||
await editModal.enableEditMode();
|
||||
await editModal.clickColumnsTab();
|
||||
|
||||
// Expand 'ds' column row and fill date format (scoped to row)
|
||||
const dateFormat = '%Y-%m-%d';
|
||||
await editModal.fillColumnDateFormat('ds', dateFormat);
|
||||
|
||||
// Save and handle confirmation dialog conditionally
|
||||
await editModal.clickSave();
|
||||
await new ConfirmDialog(page).clickOk({ timeout: TIMEOUT.CONFIRM_DIALOG });
|
||||
await editModal.waitForHidden();
|
||||
|
||||
// Verify via API
|
||||
const updatedRes = await apiGetDataset(page, datasetId);
|
||||
const columns = (await updatedRes.json()).result.columns;
|
||||
const dsColumn = columns.find(
|
||||
(c: { column_name: string }) => c.column_name === 'ds',
|
||||
);
|
||||
expect(dsColumn, 'ds column should exist in dataset').toBeDefined();
|
||||
expect(dsColumn.python_date_format).toBe(dateFormat);
|
||||
});
|
||||
|
||||
test('should edit dataset description via modal', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create throwaway dataset for editing description
|
||||
const { id: datasetId, name: datasetName } = await createTestDataset(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_description' },
|
||||
);
|
||||
|
||||
// Navigate to dataset list, click edit action
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
await datasetListPage.clickEditAction(datasetName);
|
||||
|
||||
// Enable edit mode, navigate to Settings tab
|
||||
const editModal = new EditDatasetModal(page);
|
||||
await editModal.waitForReady();
|
||||
await editModal.enableEditMode();
|
||||
await editModal.clickSettingsTab();
|
||||
|
||||
// Fill description field
|
||||
const description = `Test description ${Date.now()}`;
|
||||
await editModal.fillDescription(description);
|
||||
|
||||
// Save and handle confirmation dialog conditionally
|
||||
await editModal.clickSave();
|
||||
await new ConfirmDialog(page).clickOk({ timeout: TIMEOUT.CONFIRM_DIALOG });
|
||||
await editModal.waitForHidden();
|
||||
|
||||
// Verify via API
|
||||
const updatedRes = await apiGetDataset(page, datasetId);
|
||||
const result = (await updatedRes.json()).result;
|
||||
expect(result.description).toBe(description);
|
||||
});
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import type { Page, TestInfo } from '@playwright/test';
|
||||
import type { TestAssets } from '../../../helpers/fixtures/testAssets';
|
||||
import { createTestVirtualDataset } from '../../../helpers/api/dataset';
|
||||
|
||||
interface TestDatasetResult {
|
||||
id: number;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface CreateTestDatasetOptions {
|
||||
/** Prefix for generated name (default: 'test') */
|
||||
prefix?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a test virtual dataset.
|
||||
* Uses createTestVirtualDataset() to create a simple virtual dataset for testing.
|
||||
*
|
||||
* Note: The dataset duplicate API only works with virtual datasets. This helper
|
||||
* creates virtual datasets directly to avoid that limitation.
|
||||
*
|
||||
* @example
|
||||
* // Basic usage
|
||||
* const { id, name } = await createTestDataset(page, testAssets, test.info());
|
||||
*
|
||||
* @example
|
||||
* // Custom prefix
|
||||
* const { id, name } = await createTestDataset(page, testAssets, test.info(), {
|
||||
* prefix: 'test_delete',
|
||||
* });
|
||||
*/
|
||||
export async function createTestDataset(
|
||||
page: Page,
|
||||
testAssets: TestAssets,
|
||||
testInfo: TestInfo,
|
||||
options?: CreateTestDatasetOptions,
|
||||
): Promise<TestDatasetResult> {
|
||||
const prefix = options?.prefix ?? 'test';
|
||||
const name = `${prefix}_${Date.now()}_${testInfo.parallelIndex}`;
|
||||
|
||||
const id = await createTestVirtualDataset(page, name);
|
||||
if (!id) {
|
||||
throw new Error(`Failed to create test dataset: ${name}`);
|
||||
}
|
||||
testAssets.trackDataset(id);
|
||||
|
||||
return { id, name };
|
||||
}
|
||||
Reference in New Issue
Block a user