mirror of
https://github.com/apache/superset.git
synced 2026-05-03 06:54:19 +00:00
Compare commits
36 Commits
semantic-l
...
feat-conve
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b08b8e41e8 | ||
|
|
89b0ca0905 | ||
|
|
7f057b1ef8 | ||
|
|
8eb510f21a | ||
|
|
022cd3c2e4 | ||
|
|
6210bfa8b5 | ||
|
|
f726f26a16 | ||
|
|
3fae07398d | ||
|
|
078a57dc2d | ||
|
|
13513c35c4 | ||
|
|
46404692d1 | ||
|
|
8e3164a4f2 | ||
|
|
825b7edcc0 | ||
|
|
8952e80ffd | ||
|
|
e5ecf755fd | ||
|
|
c4369f907f | ||
|
|
f51f1cdd89 | ||
|
|
d65f5ac719 | ||
|
|
67e365b2e3 | ||
|
|
71cabc33e0 | ||
|
|
468cf9a686 | ||
|
|
382928b1e7 | ||
|
|
e830742f36 | ||
|
|
65e71dc429 | ||
|
|
a567408a63 | ||
|
|
f8538d104d | ||
|
|
16eb3229c9 | ||
|
|
e673ffbf6f | ||
|
|
b3e8fea741 | ||
|
|
aa75b805f7 | ||
|
|
bad24486b8 | ||
|
|
551f8968c8 | ||
|
|
f6e7d7bff5 | ||
|
|
51c3e2133d | ||
|
|
4cbdff6338 | ||
|
|
c39a2e89ab |
13
.github/workflows/bashlib.sh
vendored
13
.github/workflows/bashlib.sh
vendored
@@ -117,6 +117,19 @@ testdata() {
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
playwright_testdata() {
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
say "::group::Load all examples for Playwright tests"
|
||||
# must specify PYTHONPATH to make `tests.superset_test_config` importable
|
||||
export PYTHONPATH="$GITHUB_WORKSPACE"
|
||||
pip install -e .
|
||||
superset db upgrade
|
||||
superset load_test_users
|
||||
superset load_examples
|
||||
superset init
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
celery-worker() {
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
say "::group::Start Celery worker"
|
||||
|
||||
2
.github/workflows/superset-e2e.yml
vendored
2
.github/workflows/superset-e2e.yml
vendored
@@ -223,7 +223,7 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: testdata
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v5
|
||||
|
||||
2
.github/workflows/superset-playwright.yml
vendored
2
.github/workflows/superset-playwright.yml
vendored
@@ -97,7 +97,7 @@ jobs:
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: testdata
|
||||
run: playwright_testdata
|
||||
- name: Setup Node.js
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
uses: actions/setup-node@v5
|
||||
|
||||
3
superset-frontend/.gitignore
vendored
3
superset-frontend/.gitignore
vendored
@@ -1,6 +1,9 @@
|
||||
coverage/*
|
||||
cypress/screenshots
|
||||
cypress/videos
|
||||
playwright/.auth
|
||||
playwright-report/
|
||||
test-results/
|
||||
src/temp
|
||||
.temp_cache/
|
||||
.tsbuildinfo
|
||||
|
||||
@@ -33,6 +33,9 @@ export default defineConfig({
|
||||
? undefined
|
||||
: '**/experimental/**',
|
||||
|
||||
// Global setup - authenticate once before all tests
|
||||
globalSetup: './playwright/global-setup.ts',
|
||||
|
||||
// Timeout settings
|
||||
timeout: 30000,
|
||||
expect: { timeout: 8000 },
|
||||
@@ -77,10 +80,32 @@ export default defineConfig({
|
||||
|
||||
projects: [
|
||||
{
|
||||
// Default project - uses global authentication for speed
|
||||
// E2E tests login once via global-setup.ts and reuse auth state
|
||||
// Explicitly ignore auth tests (they run in chromium-unauth project)
|
||||
// Also respect the global experimental testIgnore setting
|
||||
name: 'chromium',
|
||||
testIgnore: [
|
||||
'**/tests/auth/**/*.spec.ts',
|
||||
...(process.env.INCLUDE_EXPERIMENTAL ? [] : ['**/experimental/**']),
|
||||
],
|
||||
use: {
|
||||
browserName: 'chromium',
|
||||
testIdAttribute: 'data-test',
|
||||
// Reuse authentication state from global setup (fast E2E tests)
|
||||
storageState: 'playwright/.auth/user.json',
|
||||
},
|
||||
},
|
||||
{
|
||||
// Separate project for unauthenticated tests (login, signup, etc.)
|
||||
// These tests use beforeEach for per-test navigation - no global auth
|
||||
// This hybrid approach: simple auth tests, fast E2E tests
|
||||
name: 'chromium-unauth',
|
||||
testMatch: '**/tests/auth/**/*.spec.ts',
|
||||
use: {
|
||||
browserName: 'chromium',
|
||||
testIdAttribute: 'data-test',
|
||||
// No storageState = clean browser with no cached cookies
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
105
superset-frontend/playwright/components/core/Modal.ts
Normal file
105
superset-frontend/playwright/components/core/Modal.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Locator, Page } from '@playwright/test';
|
||||
|
||||
/**
|
||||
* Base Modal component for Ant Design modals.
|
||||
* Provides minimal primitives - extend this for specific modal types.
|
||||
* Add methods to this class only when multiple modal types need them (YAGNI).
|
||||
*
|
||||
* @example
|
||||
* class DeleteConfirmationModal extends Modal {
|
||||
* async clickDelete(): Promise<void> {
|
||||
* await this.footer.locator('button', { hasText: 'Delete' }).click();
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
export class Modal {
|
||||
protected readonly page: Page;
|
||||
protected readonly modalSelector: string;
|
||||
|
||||
// Ant Design modal structure selectors (shared by all modal types)
|
||||
protected static readonly BASE_SELECTORS = {
|
||||
FOOTER: '.ant-modal-footer',
|
||||
BODY: '.ant-modal-body',
|
||||
};
|
||||
|
||||
constructor(page: Page, modalSelector = '[role="dialog"]') {
|
||||
this.page = page;
|
||||
this.modalSelector = modalSelector;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the modal element locator
|
||||
*/
|
||||
get element(): Locator {
|
||||
return this.page.locator(this.modalSelector);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the modal footer locator (contains action buttons)
|
||||
*/
|
||||
get footer(): Locator {
|
||||
return this.element.locator(Modal.BASE_SELECTORS.FOOTER);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the modal body locator (contains content)
|
||||
*/
|
||||
get body(): Locator {
|
||||
return this.element.locator(Modal.BASE_SELECTORS.BODY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a footer button by text content (private helper)
|
||||
* @param buttonText - The text content of the button
|
||||
*/
|
||||
private getFooterButton(buttonText: string): Locator {
|
||||
return this.footer.locator('button', { hasText: buttonText });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks a footer button by text content
|
||||
* @param buttonText - The text content of the button to click
|
||||
* @param options - Optional click options
|
||||
*/
|
||||
protected async clickFooterButton(
|
||||
buttonText: string,
|
||||
options?: { timeout?: number; force?: boolean; delay?: number },
|
||||
): Promise<void> {
|
||||
await this.getFooterButton(buttonText).click(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the modal to become visible
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForVisible(options?: { timeout?: number }): Promise<void> {
|
||||
await this.element.waitFor({ state: 'visible', ...options });
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the modal to be hidden
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForHidden(options?: { timeout?: number }): Promise<void> {
|
||||
await this.element.waitFor({ state: 'hidden', ...options });
|
||||
}
|
||||
}
|
||||
85
superset-frontend/playwright/components/core/Table.ts
Normal file
85
superset-frontend/playwright/components/core/Table.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Locator, Page } from '@playwright/test';
|
||||
|
||||
/**
|
||||
* Table component for Superset ListView tables.
|
||||
*/
|
||||
export class Table {
|
||||
private readonly page: Page;
|
||||
private readonly tableSelector: string;
|
||||
|
||||
private static readonly SELECTORS = {
|
||||
TABLE_ROW: '[data-test="table-row"]',
|
||||
};
|
||||
|
||||
constructor(page: Page, tableSelector = '[data-test="listview-table"]') {
|
||||
this.page = page;
|
||||
this.tableSelector = tableSelector;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the table element locator
|
||||
*/
|
||||
get element(): Locator {
|
||||
return this.page.locator(this.tableSelector);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a table row by exact text match in the first cell (dataset name column).
|
||||
* Uses exact match to avoid substring collisions (e.g., 'members_channels_2' vs 'duplicate_members_channels_2_123').
|
||||
* @param rowText - Exact text to find in the row's first cell
|
||||
*/
|
||||
getRow(rowText: string): Locator {
|
||||
return this.element
|
||||
.locator(Table.SELECTORS.TABLE_ROW)
|
||||
.filter({
|
||||
has: this.page.getByRole('cell', { name: rowText, exact: true }),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks a link within a specific row
|
||||
* @param rowText - Text to identify the row
|
||||
* @param linkSelector - Selector for the link within the row
|
||||
*/
|
||||
async clickRowLink(rowText: string, linkSelector: string): Promise<void> {
|
||||
const row = this.getRow(rowText);
|
||||
await row.locator(linkSelector).click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the table to be visible
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForVisible(options?: { timeout?: number }): Promise<void> {
|
||||
await this.element.waitFor({ state: 'visible', ...options });
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks an action button in a row by selector
|
||||
* @param rowText - Text to identify the row
|
||||
* @param selector - CSS selector for the action element
|
||||
*/
|
||||
async clickRowAction(rowText: string, selector: string): Promise<void> {
|
||||
const row = this.getRow(rowText);
|
||||
await row.locator(selector).first().click();
|
||||
}
|
||||
}
|
||||
105
superset-frontend/playwright/components/core/Toast.ts
Normal file
105
superset-frontend/playwright/components/core/Toast.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, Locator } from '@playwright/test';
|
||||
|
||||
export type ToastType = 'success' | 'danger' | 'warning' | 'info';
|
||||
|
||||
const SELECTORS = {
|
||||
CONTAINER: '[data-test="toast-container"][role="alert"]',
|
||||
CONTENT: '.toast__content',
|
||||
CLOSE_BUTTON: '[data-test="close-button"]',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Toast notification component
|
||||
* Handles success, danger, warning, and info toasts
|
||||
*/
|
||||
export class Toast {
|
||||
private page: Page;
|
||||
private container: Locator;
|
||||
|
||||
constructor(page: Page) {
|
||||
this.page = page;
|
||||
this.container = page.locator(SELECTORS.CONTAINER);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the toast container locator
|
||||
*/
|
||||
get(): Locator {
|
||||
return this.container;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the toast message text
|
||||
*/
|
||||
getMessage(): Locator {
|
||||
return this.container.locator(SELECTORS.CONTENT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a toast to appear
|
||||
*/
|
||||
async waitForVisible(): Promise<void> {
|
||||
await this.container.waitFor({ state: 'visible' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for toast to disappear
|
||||
*/
|
||||
async waitForHidden(): Promise<void> {
|
||||
await this.container.waitFor({ state: 'hidden' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a success toast
|
||||
*/
|
||||
getSuccess(): Locator {
|
||||
return this.page.locator(`${SELECTORS.CONTAINER}.toast--success`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a danger/error toast
|
||||
*/
|
||||
getDanger(): Locator {
|
||||
return this.page.locator(`${SELECTORS.CONTAINER}.toast--danger`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a warning toast
|
||||
*/
|
||||
getWarning(): Locator {
|
||||
return this.page.locator(`${SELECTORS.CONTAINER}.toast--warning`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an info toast
|
||||
*/
|
||||
getInfo(): Locator {
|
||||
return this.page.locator(`${SELECTORS.CONTAINER}.toast--info`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the toast by clicking the close button
|
||||
*/
|
||||
async close(): Promise<void> {
|
||||
await this.container.locator(SELECTORS.CLOSE_BUTTON).click();
|
||||
}
|
||||
}
|
||||
@@ -21,3 +21,5 @@
|
||||
export { Button } from './Button';
|
||||
export { Form } from './Form';
|
||||
export { Input } from './Input';
|
||||
export { Modal } from './Modal';
|
||||
export { Table } from './Table';
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Modal, Input } from '../core';
|
||||
|
||||
/**
|
||||
* Delete confirmation modal that requires typing "DELETE" to confirm.
|
||||
* Used throughout Superset for destructive delete operations.
|
||||
*
|
||||
* Provides primitives for tests to compose deletion flows.
|
||||
*/
|
||||
export class DeleteConfirmationModal extends Modal {
|
||||
private static readonly SELECTORS = {
|
||||
CONFIRMATION_INPUT: 'input[type="text"]',
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the confirmation input component
|
||||
*/
|
||||
private get confirmationInput(): Input {
|
||||
return new Input(
|
||||
this.page,
|
||||
this.body.locator(DeleteConfirmationModal.SELECTORS.CONFIRMATION_INPUT),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills the confirmation input with the specified text.
|
||||
*
|
||||
* @param confirmationText - The text to type
|
||||
* @param options - Optional fill options (timeout, force)
|
||||
*
|
||||
* @example
|
||||
* const deleteModal = new DeleteConfirmationModal(page);
|
||||
* await deleteModal.waitForVisible();
|
||||
* await deleteModal.fillConfirmationInput('DELETE');
|
||||
* await deleteModal.clickDelete();
|
||||
* await deleteModal.waitForHidden();
|
||||
*/
|
||||
async fillConfirmationInput(
|
||||
confirmationText: string,
|
||||
options?: { timeout?: number; force?: boolean },
|
||||
): Promise<void> {
|
||||
await this.confirmationInput.fill(confirmationText, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the Delete button in the footer
|
||||
*
|
||||
* @param options - Optional click options (timeout, force, delay)
|
||||
*/
|
||||
async clickDelete(options?: {
|
||||
timeout?: number;
|
||||
force?: boolean;
|
||||
delay?: number;
|
||||
}): Promise<void> {
|
||||
await this.clickFooterButton('Delete', options);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Modal, Input } from '../core';
|
||||
|
||||
/**
|
||||
* Duplicate dataset modal that requires entering a new dataset name.
|
||||
* Used for duplicating virtual datasets with custom SQL.
|
||||
*/
|
||||
export class DuplicateDatasetModal extends Modal {
|
||||
private static readonly SELECTORS = {
|
||||
NAME_INPUT: '[data-test="duplicate-modal-input"]',
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the new dataset name input component
|
||||
*/
|
||||
private get nameInput(): Input {
|
||||
return new Input(
|
||||
this.page,
|
||||
this.body.locator(DuplicateDatasetModal.SELECTORS.NAME_INPUT),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills the new dataset name input
|
||||
*
|
||||
* @param datasetName - The new name for the duplicated dataset
|
||||
* @param options - Optional fill options (timeout, force)
|
||||
*
|
||||
* @example
|
||||
* const duplicateModal = new DuplicateDatasetModal(page);
|
||||
* await duplicateModal.waitForVisible();
|
||||
* await duplicateModal.fillDatasetName('my_dataset_copy');
|
||||
* await duplicateModal.clickDuplicate();
|
||||
* await duplicateModal.waitForHidden();
|
||||
*/
|
||||
async fillDatasetName(
|
||||
datasetName: string,
|
||||
options?: { timeout?: number; force?: boolean },
|
||||
): Promise<void> {
|
||||
await this.nameInput.fill(datasetName, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the Duplicate button in the footer
|
||||
*
|
||||
* @param options - Optional click options (timeout, force, delay)
|
||||
*/
|
||||
async clickDuplicate(options?: {
|
||||
timeout?: number;
|
||||
force?: boolean;
|
||||
delay?: number;
|
||||
}): Promise<void> {
|
||||
await this.clickFooterButton('Duplicate', options);
|
||||
}
|
||||
}
|
||||
22
superset-frontend/playwright/components/modals/index.ts
Normal file
22
superset-frontend/playwright/components/modals/index.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
// Specific modal implementations
|
||||
export { DeleteConfirmationModal } from './DeleteConfirmationModal';
|
||||
export { DuplicateDatasetModal } from './DuplicateDatasetModal';
|
||||
87
superset-frontend/playwright/global-setup.ts
Normal file
87
superset-frontend/playwright/global-setup.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
chromium,
|
||||
FullConfig,
|
||||
Browser,
|
||||
BrowserContext,
|
||||
} from '@playwright/test';
|
||||
import { mkdir } from 'fs/promises';
|
||||
import { dirname } from 'path';
|
||||
import { AuthPage } from './pages/AuthPage';
|
||||
|
||||
/**
|
||||
* Global setup function that runs once before all tests.
|
||||
* Authenticates as admin user and saves the authentication state
|
||||
* to be reused by tests in the 'chromium' project (E2E tests).
|
||||
*
|
||||
* Auth tests (chromium-unauth project) don't use this - they login
|
||||
* per-test via beforeEach for isolation and simplicity.
|
||||
*/
|
||||
async function globalSetup(config: FullConfig) {
|
||||
// Get baseURL with fallback to default
|
||||
// FullConfig.use doesn't exist in the type - baseURL is only in projects[0].use
|
||||
const baseURL = config.projects[0]?.use?.baseURL || 'http://localhost:8088';
|
||||
|
||||
console.log('[Global Setup] Authenticating as admin user...');
|
||||
|
||||
let browser: Browser | null = null;
|
||||
let context: BrowserContext | null = null;
|
||||
|
||||
try {
|
||||
// Launch browser
|
||||
browser = await chromium.launch();
|
||||
} catch (error) {
|
||||
console.error('[Global Setup] Failed to launch browser:', error);
|
||||
throw new Error('Browser launch failed - check Playwright installation');
|
||||
}
|
||||
|
||||
try {
|
||||
context = await browser.newContext({ baseURL });
|
||||
const page = await context.newPage();
|
||||
|
||||
// Use AuthPage to handle login logic (DRY principle)
|
||||
const authPage = new AuthPage(page);
|
||||
await authPage.goto();
|
||||
await authPage.waitForLoginForm();
|
||||
await authPage.loginWithCredentials('admin', 'general');
|
||||
await authPage.waitForLoginSuccess();
|
||||
|
||||
// Save authentication state for all tests to reuse
|
||||
const authStatePath = 'playwright/.auth/user.json';
|
||||
await mkdir(dirname(authStatePath), { recursive: true });
|
||||
await context.storageState({
|
||||
path: authStatePath,
|
||||
});
|
||||
|
||||
console.log(
|
||||
'[Global Setup] Authentication successful - state saved to playwright/.auth/user.json',
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('[Global Setup] Authentication failed:', error);
|
||||
throw error;
|
||||
} finally {
|
||||
// Ensure cleanup even if auth fails
|
||||
if (context) await context.close();
|
||||
if (browser) await browser.close();
|
||||
}
|
||||
}
|
||||
|
||||
export default globalSetup;
|
||||
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page } from '@playwright/test';
|
||||
import { apiPostDatabase } from './database';
|
||||
|
||||
// Public Google Sheets URL for testing
|
||||
const NETFLIX_TITLES_SHEET =
|
||||
'https://docs.google.com/spreadsheets/d/19XNqckHGKGGPh83JGFdFGP4Bw9gdXeujq5EoIGwttdM/edit#gid=347941303';
|
||||
|
||||
/**
|
||||
* Create a Google Sheets database connection for testing
|
||||
* Uses a public Netflix titles dataset by default
|
||||
* @param page - Playwright page instance
|
||||
* @param databaseName - Name for the database connection
|
||||
* @param tableName - Name for the table/dataset
|
||||
* @returns Database ID from the created database
|
||||
*/
|
||||
export async function createGsheetsDatabase(
|
||||
page: Page,
|
||||
databaseName: string,
|
||||
tableName: string,
|
||||
): Promise<number> {
|
||||
const requestBody = {
|
||||
database_name: databaseName,
|
||||
engine: 'gsheets',
|
||||
configuration_method: 'dynamic_form',
|
||||
engine_information: {
|
||||
disable_ssh_tunneling: true,
|
||||
supports_dynamic_catalog: false,
|
||||
supports_file_upload: true,
|
||||
supports_oauth2: true,
|
||||
},
|
||||
driver: 'apsw',
|
||||
sqlalchemy_uri_placeholder: 'gsheets://',
|
||||
extra: JSON.stringify({
|
||||
allows_virtual_table_explore: true,
|
||||
engine_params: {
|
||||
catalog: {
|
||||
[tableName]: NETFLIX_TITLES_SHEET,
|
||||
},
|
||||
},
|
||||
}),
|
||||
expose_in_sqllab: true,
|
||||
catalog: [
|
||||
{
|
||||
name: tableName,
|
||||
value: NETFLIX_TITLES_SHEET,
|
||||
},
|
||||
],
|
||||
parameters: {
|
||||
service_account_info: '',
|
||||
catalog: {
|
||||
[tableName]: NETFLIX_TITLES_SHEET,
|
||||
},
|
||||
},
|
||||
masked_encrypted_extra: '{}',
|
||||
impersonate_user: true,
|
||||
};
|
||||
|
||||
const response = await apiPostDatabase(page, requestBody);
|
||||
|
||||
if (!response.ok()) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(
|
||||
`Failed to create database: ${response.status()} ${response.statusText()}\n${errorText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const body = await response.json();
|
||||
return body.id;
|
||||
}
|
||||
79
superset-frontend/playwright/helpers/api/database.ts
Normal file
79
superset-frontend/playwright/helpers/api/database.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, APIResponse } from '@playwright/test';
|
||||
import { apiPost, apiDelete, ApiRequestOptions } from './requests';
|
||||
|
||||
const ENDPOINTS = {
|
||||
DATABASE: 'api/v1/database/',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* TypeScript interface for database creation API payload
|
||||
* Provides compile-time safety for required fields
|
||||
*/
|
||||
export interface DatabaseCreatePayload {
|
||||
database_name: string;
|
||||
engine: string;
|
||||
configuration_method?: string;
|
||||
engine_information?: {
|
||||
disable_ssh_tunneling?: boolean;
|
||||
supports_dynamic_catalog?: boolean;
|
||||
supports_file_upload?: boolean;
|
||||
supports_oauth2?: boolean;
|
||||
};
|
||||
driver?: string;
|
||||
sqlalchemy_uri_placeholder?: string;
|
||||
extra?: string;
|
||||
expose_in_sqllab?: boolean;
|
||||
catalog?: Array<{ name: string; value: string }>;
|
||||
parameters?: {
|
||||
service_account_info?: string;
|
||||
catalog?: Record<string, string>;
|
||||
};
|
||||
masked_encrypted_extra?: string;
|
||||
impersonate_user?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* POST request to create a database connection
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param requestBody - Database configuration object with type safety
|
||||
* @returns API response from database creation
|
||||
*/
|
||||
export async function apiPostDatabase(
|
||||
page: Page,
|
||||
requestBody: DatabaseCreatePayload,
|
||||
): Promise<APIResponse> {
|
||||
return apiPost(page, ENDPOINTS.DATABASE, requestBody);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE request to remove a database connection
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param databaseId - ID of the database to delete
|
||||
* @returns API response from database deletion
|
||||
*/
|
||||
export async function apiDeleteDatabase(
|
||||
page: Page,
|
||||
databaseId: number,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return apiDelete(page, `${ENDPOINTS.DATABASE}${databaseId}`, options);
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page } from '@playwright/test';
|
||||
import { createGsheetsDatabase } from './database.factories';
|
||||
import { apiPostDataset } from './dataset';
|
||||
import { apiDeleteDatabase } from './database';
|
||||
|
||||
/**
|
||||
* Create a test dataset with Google Sheets database
|
||||
* Creates both the database connection and dataset in one call
|
||||
* @param page - Playwright page instance
|
||||
* @param datasetName - Name for the dataset/table
|
||||
* @returns Object containing database ID and dataset ID
|
||||
*/
|
||||
export async function createTestDataset(
|
||||
page: Page,
|
||||
datasetName: string,
|
||||
): Promise<{ dbId: number; datasetId: number }> {
|
||||
// Step 1: Create Google Sheets database with catalog entry
|
||||
// The tableName in the catalog must match the table_name used when creating the dataset
|
||||
const dbName = `test_db_${Date.now()}`;
|
||||
const tableName = datasetName; // Use same name for catalog entry and dataset table_name
|
||||
const dbId = await createGsheetsDatabase(page, dbName, tableName);
|
||||
|
||||
// Step 2: Create dataset using the database
|
||||
// Wrap in try/finally to ensure database cleanup on failure
|
||||
try {
|
||||
// For Google Sheets, table_name must reference the catalog entry name
|
||||
// catalog: null is required to avoid OAuth validation issues
|
||||
const datasetRequestBody = {
|
||||
database: dbId,
|
||||
catalog: null,
|
||||
schema: 'main',
|
||||
table_name: tableName, // Must match the catalog entry name
|
||||
};
|
||||
|
||||
const response = await apiPostDataset(page, datasetRequestBody);
|
||||
|
||||
if (!response.ok()) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(
|
||||
`Failed to create dataset: ${response.status()} ${response.statusText()}\n${errorText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const body = await response.json();
|
||||
const datasetId = body.id;
|
||||
|
||||
return { dbId, datasetId };
|
||||
} catch (error) {
|
||||
// Clean up the orphaned database before rethrowing
|
||||
await apiDeleteDatabase(page, dbId, { failOnStatusCode: false }).catch(
|
||||
() => {
|
||||
// Silently ignore cleanup errors - the original error is more important
|
||||
},
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
112
superset-frontend/playwright/helpers/api/dataset.ts
Normal file
112
superset-frontend/playwright/helpers/api/dataset.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, APIResponse } from '@playwright/test';
|
||||
import { apiGet, apiPost, apiDelete, ApiRequestOptions } from './requests';
|
||||
|
||||
const ENDPOINTS = {
|
||||
DATASET: 'api/v1/dataset/',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* TypeScript interface for dataset creation API payload
|
||||
* Provides compile-time safety for required fields
|
||||
*/
|
||||
export interface DatasetCreatePayload {
|
||||
database: number;
|
||||
catalog: string | null;
|
||||
schema: string;
|
||||
table_name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* POST request to create a dataset
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param requestBody - Dataset configuration object (database, schema, table_name)
|
||||
* @returns API response from dataset creation
|
||||
*/
|
||||
export async function apiPostDataset(
|
||||
page: Page,
|
||||
requestBody: DatasetCreatePayload,
|
||||
): Promise<APIResponse> {
|
||||
return apiPost(page, ENDPOINTS.DATASET, requestBody);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a dataset by its table name
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param tableName - The table_name to search for
|
||||
* @returns Object with id and data if found, null if not found
|
||||
*/
|
||||
export async function getDatasetByName(
|
||||
page: Page,
|
||||
tableName: string,
|
||||
): Promise<{ id: number; data: any } | null> {
|
||||
// Use Superset's filter API to search by table_name
|
||||
const filter = {
|
||||
filters: [
|
||||
{
|
||||
col: 'table_name',
|
||||
opr: 'eq',
|
||||
value: tableName,
|
||||
},
|
||||
],
|
||||
};
|
||||
const queryParam = encodeURIComponent(JSON.stringify(filter));
|
||||
const response = await apiGet(page, `${ENDPOINTS.DATASET}?q=${queryParam}`);
|
||||
|
||||
if (!response.ok()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const body = await response.json();
|
||||
if (body.result && body.result.length > 0) {
|
||||
return { id: body.result[0].id, data: body.result[0] };
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* GET request to fetch a dataset's details
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param datasetId - ID of the dataset to fetch
|
||||
* @returns API response with dataset details
|
||||
*/
|
||||
export async function apiGetDataset(
|
||||
page: Page,
|
||||
datasetId: number,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return apiGet(page, `${ENDPOINTS.DATASET}${datasetId}`, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE request to remove a dataset
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param datasetId - ID of the dataset to delete
|
||||
* @returns API response from dataset deletion
|
||||
*/
|
||||
export async function apiDeleteDataset(
|
||||
page: Page,
|
||||
datasetId: number,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return apiDelete(page, `${ENDPOINTS.DATASET}${datasetId}`, options);
|
||||
}
|
||||
178
superset-frontend/playwright/helpers/api/requests.ts
Normal file
178
superset-frontend/playwright/helpers/api/requests.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, APIResponse } from '@playwright/test';
|
||||
|
||||
export interface ApiRequestOptions {
|
||||
headers?: Record<string, string>;
|
||||
params?: Record<string, string>;
|
||||
failOnStatusCode?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get base URL for Referer header
|
||||
* Reads from environment variable configured in playwright.config.ts
|
||||
* Preserves full base URL including path prefix (e.g., /app/prefix)
|
||||
*/
|
||||
function getBaseUrl(_page: Page): string {
|
||||
// Use environment variable which includes path prefix if configured
|
||||
// This matches playwright.config.ts baseURL setting exactly
|
||||
return process.env.PLAYWRIGHT_BASE_URL || 'http://localhost:8088';
|
||||
}
|
||||
|
||||
/**
|
||||
* Get CSRF token from the API endpoint
|
||||
* Superset provides a CSRF token via api/v1/security/csrf_token/
|
||||
* The session cookie is automatically included by page.request
|
||||
*/
|
||||
async function getCsrfToken(page: Page): Promise<string> {
|
||||
try {
|
||||
const response = await page.request.get('api/v1/security/csrf_token/', {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
if (!response.ok()) {
|
||||
console.warn('[CSRF] Failed to fetch CSRF token:', response.status());
|
||||
return '';
|
||||
}
|
||||
|
||||
const json = await response.json();
|
||||
return json.result || '';
|
||||
} catch (error) {
|
||||
console.warn('[CSRF] Error fetching CSRF token:', error);
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build headers for mutation requests (POST, PUT, PATCH, DELETE)
|
||||
* Includes CSRF token and Referer for Flask-WTF CSRFProtect
|
||||
*/
|
||||
async function buildHeaders(
|
||||
page: Page,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<Record<string, string>> {
|
||||
const csrfToken = await getCsrfToken(page);
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
...options?.headers,
|
||||
};
|
||||
|
||||
// Include CSRF token and Referer for Flask-WTF CSRFProtect
|
||||
if (csrfToken) {
|
||||
headers['X-CSRFToken'] = csrfToken;
|
||||
headers['Referer'] = getBaseUrl(page);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a GET request
|
||||
* Uses page.request to automatically include browser authentication
|
||||
*/
|
||||
export async function apiGet(
|
||||
page: Page,
|
||||
url: string,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return page.request.get(url, {
|
||||
headers: options?.headers,
|
||||
params: options?.params,
|
||||
failOnStatusCode: options?.failOnStatusCode ?? true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a POST request
|
||||
* Uses page.request to automatically include browser authentication
|
||||
*/
|
||||
export async function apiPost(
|
||||
page: Page,
|
||||
url: string,
|
||||
data?: unknown,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
const headers = await buildHeaders(page, options);
|
||||
|
||||
return page.request.post(url, {
|
||||
data,
|
||||
headers,
|
||||
params: options?.params,
|
||||
failOnStatusCode: options?.failOnStatusCode ?? true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PUT request
|
||||
* Uses page.request to automatically include browser authentication
|
||||
*/
|
||||
export async function apiPut(
|
||||
page: Page,
|
||||
url: string,
|
||||
data?: unknown,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
const headers = await buildHeaders(page, options);
|
||||
|
||||
return page.request.put(url, {
|
||||
data,
|
||||
headers,
|
||||
params: options?.params,
|
||||
failOnStatusCode: options?.failOnStatusCode ?? true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a PATCH request
|
||||
* Uses page.request to automatically include browser authentication
|
||||
*/
|
||||
export async function apiPatch(
|
||||
page: Page,
|
||||
url: string,
|
||||
data?: unknown,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
const headers = await buildHeaders(page, options);
|
||||
|
||||
return page.request.patch(url, {
|
||||
data,
|
||||
headers,
|
||||
params: options?.params,
|
||||
failOnStatusCode: options?.failOnStatusCode ?? true,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a DELETE request
|
||||
* Uses page.request to automatically include browser authentication
|
||||
*/
|
||||
export async function apiDelete(
|
||||
page: Page,
|
||||
url: string,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
const headers = await buildHeaders(page, options);
|
||||
|
||||
return page.request.delete(url, {
|
||||
headers,
|
||||
params: options?.params,
|
||||
failOnStatusCode: options?.failOnStatusCode ?? true,
|
||||
});
|
||||
}
|
||||
@@ -17,9 +17,10 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, Response } from '@playwright/test';
|
||||
import { Page, Response, Cookie } from '@playwright/test';
|
||||
import { Form } from '../components/core';
|
||||
import { URL } from '../utils/urls';
|
||||
import { TIMEOUT } from '../utils/constants';
|
||||
|
||||
export class AuthPage {
|
||||
private readonly page: Page;
|
||||
@@ -56,7 +57,7 @@ export class AuthPage {
|
||||
* Wait for login form to be visible
|
||||
*/
|
||||
async waitForLoginForm(): Promise<void> {
|
||||
await this.loginForm.waitForVisible({ timeout: 5000 });
|
||||
await this.loginForm.waitForVisible({ timeout: TIMEOUT.FORM_LOAD });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -83,6 +84,54 @@ export class AuthPage {
|
||||
await loginButton.click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for successful login by verifying the login response and session cookie.
|
||||
* Call this after loginWithCredentials to ensure authentication completed.
|
||||
*
|
||||
* This does NOT assume a specific landing page (which is configurable).
|
||||
* Instead it:
|
||||
* 1. Checks if session cookie already exists (guards against race condition)
|
||||
* 2. Waits for POST /login/ response with redirect status
|
||||
* 3. Polls for session cookie to appear
|
||||
*
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForLoginSuccess(options?: { timeout?: number }): Promise<void> {
|
||||
const timeout = options?.timeout || TIMEOUT.PAGE_LOAD;
|
||||
const startTime = Date.now();
|
||||
|
||||
// 1. Guard: Check if session cookie already exists (race condition protection)
|
||||
const existingCookie = await this.getSessionCookie();
|
||||
if (existingCookie?.value) {
|
||||
// Already authenticated - login completed before we started waiting
|
||||
return;
|
||||
}
|
||||
|
||||
// 2. Wait for POST /login/ response
|
||||
const loginResponse = await this.waitForLoginRequest();
|
||||
|
||||
// 3. Verify it's a redirect (3xx status code indicates successful login)
|
||||
const status = loginResponse.status();
|
||||
if (status < 300 || status >= 400) {
|
||||
throw new Error(`Login failed: expected redirect (3xx), got ${status}`);
|
||||
}
|
||||
|
||||
// 4. Poll for session cookie to appear (may take a moment after redirect)
|
||||
const pollInterval = TIMEOUT.API_POLL_INTERVAL;
|
||||
while (Date.now() - startTime < timeout) {
|
||||
const sessionCookie = await this.getSessionCookie();
|
||||
if (sessionCookie && sessionCookie.value) {
|
||||
// Success - session cookie has landed
|
||||
return;
|
||||
}
|
||||
await this.page.waitForTimeout(pollInterval);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Login timeout: session cookie did not appear within ${timeout}ms`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current page URL
|
||||
*/
|
||||
@@ -93,9 +142,9 @@ export class AuthPage {
|
||||
/**
|
||||
* Get the session cookie specifically
|
||||
*/
|
||||
async getSessionCookie(): Promise<{ name: string; value: string } | null> {
|
||||
async getSessionCookie(): Promise<Cookie | null> {
|
||||
const cookies = await this.page.context().cookies();
|
||||
return cookies.find((c: any) => c.name === 'session') || null;
|
||||
return cookies.find(c => c.name === 'session') || null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -106,7 +155,7 @@ export class AuthPage {
|
||||
selector => this.page.locator(selector).isVisible(),
|
||||
);
|
||||
const visibilityResults = await Promise.all(visibilityPromises);
|
||||
return visibilityResults.some((isVisible: any) => isVisible);
|
||||
return visibilityResults.some(isVisible => isVisible);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -114,7 +163,7 @@ export class AuthPage {
|
||||
*/
|
||||
async waitForLoginRequest(): Promise<Response> {
|
||||
return this.page.waitForResponse(
|
||||
(response: any) =>
|
||||
response =>
|
||||
response.url().includes('/login/') &&
|
||||
response.request().method() === 'POST',
|
||||
);
|
||||
|
||||
115
superset-frontend/playwright/pages/DatasetListPage.ts
Normal file
115
superset-frontend/playwright/pages/DatasetListPage.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, Locator } from '@playwright/test';
|
||||
import { Table } from '../components/core';
|
||||
import { URL } from '../utils/urls';
|
||||
|
||||
/**
|
||||
* Dataset List Page object.
|
||||
*/
|
||||
export class DatasetListPage {
|
||||
private readonly page: Page;
|
||||
private readonly table: Table;
|
||||
|
||||
private static readonly SELECTORS = {
|
||||
DATASET_LINK: '[data-test="internal-link"]',
|
||||
DELETE_ACTION: '.action-button svg[data-icon="delete"]',
|
||||
EXPORT_ACTION: '.action-button svg[data-icon="upload"]',
|
||||
DUPLICATE_ACTION: '.action-button svg[data-icon="copy"]',
|
||||
} as const;
|
||||
|
||||
constructor(page: Page) {
|
||||
this.page = page;
|
||||
this.table = new Table(page);
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to the dataset list page
|
||||
*/
|
||||
async goto(): Promise<void> {
|
||||
await this.page.goto(URL.DATASET_LIST);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the table to load
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForTableLoad(options?: { timeout?: number }): Promise<void> {
|
||||
await this.table.waitForVisible(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a dataset row locator by name.
|
||||
* Returns a Locator that tests can use with expect().toBeVisible(), etc.
|
||||
*
|
||||
* @param datasetName - The name of the dataset
|
||||
* @returns Locator for the dataset row
|
||||
*
|
||||
* @example
|
||||
* await expect(datasetListPage.getDatasetRow('birth_names')).toBeVisible();
|
||||
*/
|
||||
getDatasetRow(datasetName: string): Locator {
|
||||
return this.table.getRow(datasetName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks on a dataset name to navigate to Explore
|
||||
* @param datasetName - The name of the dataset to click
|
||||
*/
|
||||
async clickDatasetName(datasetName: string): Promise<void> {
|
||||
await this.table.clickRowLink(
|
||||
datasetName,
|
||||
DatasetListPage.SELECTORS.DATASET_LINK,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the delete action button for a dataset
|
||||
* @param datasetName - The name of the dataset to delete
|
||||
*/
|
||||
async clickDeleteAction(datasetName: string): Promise<void> {
|
||||
await this.table.clickRowAction(
|
||||
datasetName,
|
||||
DatasetListPage.SELECTORS.DELETE_ACTION,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the export action button for a dataset
|
||||
* @param datasetName - The name of the dataset to export
|
||||
*/
|
||||
async clickExportAction(datasetName: string): Promise<void> {
|
||||
await this.table.clickRowAction(
|
||||
datasetName,
|
||||
DatasetListPage.SELECTORS.EXPORT_ACTION,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the duplicate action button for a dataset (virtual datasets only)
|
||||
* @param datasetName - The name of the dataset to duplicate
|
||||
*/
|
||||
async clickDuplicateAction(datasetName: string): Promise<void> {
|
||||
await this.table.clickRowAction(
|
||||
datasetName,
|
||||
DatasetListPage.SELECTORS.DUPLICATE_ACTION,
|
||||
);
|
||||
}
|
||||
}
|
||||
88
superset-frontend/playwright/pages/ExplorePage.ts
Normal file
88
superset-frontend/playwright/pages/ExplorePage.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, Locator } from '@playwright/test';
|
||||
import { TIMEOUT } from '../utils/constants';
|
||||
|
||||
/**
|
||||
* Explore Page object
|
||||
*/
|
||||
export class ExplorePage {
|
||||
private readonly page: Page;
|
||||
|
||||
private static readonly SELECTORS = {
|
||||
DATASOURCE_CONTROL: '[data-test="datasource-control"]',
|
||||
VIZ_SWITCHER: '[data-test="fast-viz-switcher"]',
|
||||
} as const;
|
||||
|
||||
constructor(page: Page) {
|
||||
this.page = page;
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the Explore page to load.
|
||||
* Validates URL contains /explore/ and datasource control is visible.
|
||||
*
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForPageLoad(options?: { timeout?: number }): Promise<void> {
|
||||
const timeout = options?.timeout || TIMEOUT.PAGE_LOAD;
|
||||
|
||||
await this.page.waitForURL('**/explore/**', { timeout });
|
||||
|
||||
await this.page.waitForSelector(ExplorePage.SELECTORS.DATASOURCE_CONTROL, {
|
||||
state: 'visible',
|
||||
timeout,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the datasource control locator.
|
||||
* Returns a Locator that tests can use with expect() or to read text.
|
||||
*
|
||||
* @returns Locator for the datasource control
|
||||
*
|
||||
* @example
|
||||
* const name = await explorePage.getDatasourceControl().textContent();
|
||||
*/
|
||||
getDatasourceControl(): Locator {
|
||||
return this.page.locator(ExplorePage.SELECTORS.DATASOURCE_CONTROL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the currently selected dataset name from the datasource control
|
||||
*/
|
||||
async getDatasetName(): Promise<string> {
|
||||
const text = await this.getDatasourceControl().textContent();
|
||||
return text?.trim() || '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the visualization switcher locator.
|
||||
* Returns a Locator that tests can use with expect().toBeVisible(), etc.
|
||||
*
|
||||
* @returns Locator for the viz switcher
|
||||
*
|
||||
* @example
|
||||
* await expect(explorePage.getVizSwitcher()).toBeVisible();
|
||||
*/
|
||||
getVizSwitcher(): Locator {
|
||||
return this.page.locator(ExplorePage.SELECTORS.VIZ_SWITCHER);
|
||||
}
|
||||
}
|
||||
@@ -20,69 +20,74 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { AuthPage } from '../../pages/AuthPage';
|
||||
import { URL } from '../../utils/urls';
|
||||
import { TIMEOUT } from '../../utils/constants';
|
||||
|
||||
test.describe('Login view', () => {
|
||||
let authPage: AuthPage;
|
||||
/**
|
||||
* Auth/login tests use per-test navigation via beforeEach.
|
||||
* Each test starts fresh on the login page without global authentication.
|
||||
* This follows the Cypress pattern for auth testing - simple and isolated.
|
||||
*/
|
||||
|
||||
test.beforeEach(async ({ page }: any) => {
|
||||
authPage = new AuthPage(page);
|
||||
await authPage.goto();
|
||||
await authPage.waitForLoginForm();
|
||||
});
|
||||
|
||||
test('should redirect to login with incorrect username and password', async ({
|
||||
page,
|
||||
}: any) => {
|
||||
// Setup request interception before login attempt
|
||||
const loginRequestPromise = authPage.waitForLoginRequest();
|
||||
|
||||
// Attempt login with incorrect credentials
|
||||
await authPage.loginWithCredentials('admin', 'wrongpassword');
|
||||
|
||||
// Wait for login request and verify response
|
||||
const loginResponse = await loginRequestPromise;
|
||||
// Failed login returns 401 Unauthorized or 302 redirect to login
|
||||
expect([401, 302]).toContain(loginResponse.status());
|
||||
|
||||
// Wait for redirect to complete before checking URL
|
||||
await page.waitForURL((url: any) => url.pathname.endsWith('login/'), {
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
// Verify we stay on login page
|
||||
const currentUrl = await authPage.getCurrentUrl();
|
||||
expect(currentUrl).toContain(URL.LOGIN);
|
||||
|
||||
// Verify error message is shown
|
||||
const hasError = await authPage.hasLoginError();
|
||||
expect(hasError).toBe(true);
|
||||
});
|
||||
|
||||
test('should login with correct username and password', async ({
|
||||
page,
|
||||
}: any) => {
|
||||
// Setup request interception before login attempt
|
||||
const loginRequestPromise = authPage.waitForLoginRequest();
|
||||
|
||||
// Login with correct credentials
|
||||
await authPage.loginWithCredentials('admin', 'general');
|
||||
|
||||
// Wait for login request and verify response
|
||||
const loginResponse = await loginRequestPromise;
|
||||
// Successful login returns 302 redirect
|
||||
expect(loginResponse.status()).toBe(302);
|
||||
|
||||
// Wait for successful redirect to welcome page
|
||||
await page.waitForURL(
|
||||
(url: any) => url.pathname.endsWith('superset/welcome/'),
|
||||
{
|
||||
timeout: 10000,
|
||||
},
|
||||
);
|
||||
|
||||
// Verify specific session cookie exists
|
||||
const sessionCookie = await authPage.getSessionCookie();
|
||||
expect(sessionCookie).not.toBeNull();
|
||||
expect(sessionCookie?.value).toBeTruthy();
|
||||
});
|
||||
test.beforeEach(async ({ page }) => {
|
||||
// Navigate to login page before each test (ensures clean state)
|
||||
const authPage = new AuthPage(page);
|
||||
await authPage.goto();
|
||||
await authPage.waitForLoginForm();
|
||||
});
|
||||
|
||||
test('should redirect to login with incorrect username and password', async ({
|
||||
page,
|
||||
}) => {
|
||||
// Create page object (already on login page from beforeEach)
|
||||
const authPage = new AuthPage(page);
|
||||
|
||||
// Setup request interception before login attempt
|
||||
const loginRequestPromise = authPage.waitForLoginRequest();
|
||||
|
||||
// Attempt login with incorrect credentials
|
||||
await authPage.loginWithCredentials('admin', 'wrongpassword');
|
||||
|
||||
// Wait for login request and verify response
|
||||
const loginResponse = await loginRequestPromise;
|
||||
// Failed login returns 401 Unauthorized or 302 redirect to login
|
||||
expect([401, 302]).toContain(loginResponse.status());
|
||||
|
||||
// Wait for redirect to complete before checking URL
|
||||
await page.waitForURL(url => url.pathname.endsWith(URL.LOGIN), {
|
||||
timeout: TIMEOUT.PAGE_LOAD,
|
||||
});
|
||||
|
||||
// Verify we stay on login page
|
||||
const currentUrl = await authPage.getCurrentUrl();
|
||||
expect(currentUrl).toContain(URL.LOGIN);
|
||||
|
||||
// Verify error message is shown
|
||||
const hasError = await authPage.hasLoginError();
|
||||
expect(hasError).toBe(true);
|
||||
});
|
||||
|
||||
test('should login with correct username and password', async ({ page }) => {
|
||||
// Create page object (already on login page from beforeEach)
|
||||
const authPage = new AuthPage(page);
|
||||
|
||||
// Setup request interception before login attempt
|
||||
const loginRequestPromise = authPage.waitForLoginRequest();
|
||||
|
||||
// Login with correct credentials
|
||||
await authPage.loginWithCredentials('admin', 'general');
|
||||
|
||||
// Wait for login request and verify response
|
||||
const loginResponse = await loginRequestPromise;
|
||||
// Successful login returns 302 redirect
|
||||
expect(loginResponse.status()).toBe(302);
|
||||
|
||||
// Wait for successful redirect to welcome page
|
||||
await page.waitForURL(url => url.pathname.endsWith(URL.WELCOME), {
|
||||
timeout: TIMEOUT.PAGE_LOAD,
|
||||
});
|
||||
|
||||
// Verify specific session cookie exists
|
||||
const sessionCookie = await authPage.getSessionCookie();
|
||||
expect(sessionCookie).not.toBeNull();
|
||||
expect(sessionCookie?.value).toBeTruthy();
|
||||
});
|
||||
|
||||
@@ -19,52 +19,98 @@ under the License.
|
||||
|
||||
# Experimental Playwright Tests
|
||||
|
||||
This directory contains Playwright tests that are still under development or validation.
|
||||
|
||||
## Purpose
|
||||
|
||||
Tests in this directory run in "shadow mode" with `continue-on-error: true` in CI:
|
||||
- Failures do NOT block PR merges
|
||||
- Allows tests to run in CI to validate stability before promotion
|
||||
- Provides visibility into test reliability over time
|
||||
This directory contains **experimental** Playwright E2E tests that are being developed and stabilized before becoming part of the required test suite.
|
||||
|
||||
## Promoting Tests to Stable
|
||||
## How Experimental Tests Work
|
||||
|
||||
Once a test has proven stable (no false positives/negatives over sufficient time):
|
||||
|
||||
1. Move the test file out of `experimental/` to the appropriate feature directory:
|
||||
```bash
|
||||
# From the repository root:
|
||||
git mv superset-frontend/playwright/tests/experimental/dashboard/test.spec.ts \
|
||||
superset-frontend/playwright/tests/dashboard/
|
||||
|
||||
# Or from the superset-frontend/ directory:
|
||||
git mv playwright/tests/experimental/dashboard/test.spec.ts \
|
||||
playwright/tests/dashboard/
|
||||
```
|
||||
|
||||
2. The test will automatically become required for merge
|
||||
|
||||
## Test Organization
|
||||
|
||||
Organize tests by feature area:
|
||||
- `auth/` - Authentication and authorization tests
|
||||
- `dashboard/` - Dashboard functionality tests
|
||||
- `explore/` - Chart builder tests
|
||||
- `sqllab/` - SQL Lab tests
|
||||
- etc.
|
||||
|
||||
## Running Tests
|
||||
### Running Tests
|
||||
|
||||
**By default (CI and local), experimental tests are EXCLUDED:**
|
||||
```bash
|
||||
# Run all experimental tests (requires INCLUDE_EXPERIMENTAL env var)
|
||||
INCLUDE_EXPERIMENTAL=true npm run playwright:test -- experimental/
|
||||
|
||||
# Run specific experimental test
|
||||
INCLUDE_EXPERIMENTAL=true npm run playwright:test -- experimental/dashboard/test.spec.ts
|
||||
|
||||
# Run in UI mode for debugging
|
||||
INCLUDE_EXPERIMENTAL=true npm run playwright:ui -- experimental/
|
||||
npm run playwright:test
|
||||
# Only runs stable tests (tests/auth/*)
|
||||
```
|
||||
|
||||
**Note**: The `INCLUDE_EXPERIMENTAL=true` environment variable is required because experimental tests are filtered out by default in `playwright.config.ts`. Without it, Playwright will report "No tests found".
|
||||
**To include experimental tests, set the environment variable:**
|
||||
```bash
|
||||
INCLUDE_EXPERIMENTAL=true npm run playwright:test
|
||||
# Runs all tests including experimental/
|
||||
```
|
||||
|
||||
### CI Behavior
|
||||
|
||||
- **Required CI jobs**: Experimental tests are excluded by default
|
||||
- Tests in `experimental/` do NOT block merges
|
||||
- Failures in `experimental/` do NOT fail the build
|
||||
|
||||
- **Experimental CI jobs** (optional): Use `TEST_PATH=experimental/`
|
||||
- `.github/workflows/bashlib.sh` sets `INCLUDE_EXPERIMENTAL=true` when `TEST_PATH` is provided
|
||||
- These jobs can use `continue-on-error: true` for shadow mode
|
||||
|
||||
### Configuration
|
||||
|
||||
The experimental pattern is configured in `playwright.config.ts`:
|
||||
|
||||
```typescript
|
||||
testIgnore: process.env.INCLUDE_EXPERIMENTAL
|
||||
? undefined
|
||||
: '**/experimental/**',
|
||||
```
|
||||
|
||||
This ensures:
|
||||
- Without `INCLUDE_EXPERIMENTAL`: Tests in `experimental/` are ignored
|
||||
- With `INCLUDE_EXPERIMENTAL=true`: All tests run, including experimental
|
||||
|
||||
## When to Use Experimental
|
||||
|
||||
Add tests to `experimental/` when:
|
||||
|
||||
1. **Testing new infrastructure** - New page objects, components, or patterns that need real-world validation
|
||||
2. **Flaky tests** - Tests that pass locally but have intermittent CI failures that need investigation
|
||||
3. **New test types** - E2E tests for new features that need to prove stability before becoming required
|
||||
4. **Prototyping** - Experimental approaches that may or may not become standard patterns
|
||||
|
||||
## Moving Tests to Stable
|
||||
|
||||
Once an experimental test has proven stable (consistent CI passes over time):
|
||||
|
||||
1. **Move the test file** from `experimental/` to the appropriate stable directory:
|
||||
```bash
|
||||
git mv tests/experimental/dataset/my-test.spec.ts tests/dataset/my-test.spec.ts
|
||||
```
|
||||
|
||||
2. **Commit the move** with a clear message:
|
||||
```bash
|
||||
git commit -m "test(playwright): promote my-test from experimental to stable"
|
||||
```
|
||||
|
||||
3. **Test will now be required** - It will run by default and block merges on failure
|
||||
|
||||
## Current Experimental Tests
|
||||
|
||||
### Dataset Tests
|
||||
|
||||
- **`dataset/dataset-list.spec.ts`** - Dataset list E2E tests
|
||||
- Status: Infrastructure complete, validating stability
|
||||
- Includes: Delete dataset test with API-based test data
|
||||
- Supporting infrastructure: API helpers, Modal components, page objects
|
||||
|
||||
## Infrastructure Location
|
||||
|
||||
**Important**: Supporting infrastructure (components, page objects, API helpers) should live in **stable locations**, NOT under `experimental/`:
|
||||
|
||||
✅ **Correct locations:**
|
||||
- `playwright/components/` - Components used by any tests
|
||||
- `playwright/pages/` - Page objects for any features
|
||||
- `playwright/helpers/api/` - API helpers for test data setup
|
||||
|
||||
❌ **Avoid:**
|
||||
- `playwright/tests/experimental/components/` - Makes it hard to share infrastructure
|
||||
|
||||
This keeps infrastructure reusable and avoids duplication when tests graduate from experimental to stable.
|
||||
|
||||
## Questions?
|
||||
|
||||
See [Superset Testing Documentation](https://superset.apache.org/docs/contributing/development#testing) or ask in the `#testing` Slack channel.
|
||||
|
||||
@@ -0,0 +1,222 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { test, expect, Page } from '@playwright/test';
|
||||
import { DatasetListPage } from '../../../pages/DatasetListPage';
|
||||
import { ExplorePage } from '../../../pages/ExplorePage';
|
||||
import { DeleteConfirmationModal } from '../../../components/modals/DeleteConfirmationModal';
|
||||
import { DuplicateDatasetModal } from '../../../components/modals/DuplicateDatasetModal';
|
||||
import { Toast } from '../../../components/core/Toast';
|
||||
import { createTestDataset } from '../../../helpers/api/dataset.factories';
|
||||
import {
|
||||
apiDeleteDataset,
|
||||
apiGetDataset,
|
||||
getDatasetByName,
|
||||
} from '../../../helpers/api/dataset';
|
||||
import { apiDeleteDatabase } from '../../../helpers/api/database';
|
||||
|
||||
test.describe('Dataset List', () => {
|
||||
let datasetListPage: DatasetListPage;
|
||||
let explorePage: ExplorePage;
|
||||
let testResources: { datasetIds: number[]; dbId?: number } = {
|
||||
datasetIds: [],
|
||||
};
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
datasetListPage = new DatasetListPage(page);
|
||||
explorePage = new ExplorePage(page);
|
||||
testResources = { datasetIds: [] }; // Reset for each test
|
||||
|
||||
// Navigate to dataset list page
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
});
|
||||
|
||||
test.afterEach(async ({ page }) => {
|
||||
// Cleanup any resources created during the test
|
||||
await cleanupTestAssets(page, testResources);
|
||||
});
|
||||
|
||||
function cleanupTestAssets(
|
||||
page: Page,
|
||||
resources: { datasetIds: number[]; dbId?: number },
|
||||
) {
|
||||
const promises = [];
|
||||
|
||||
// Delete all datasets
|
||||
for (const datasetId of resources.datasetIds) {
|
||||
promises.push(
|
||||
apiDeleteDataset(page, datasetId, {
|
||||
failOnStatusCode: false,
|
||||
}).catch(() => {}),
|
||||
);
|
||||
}
|
||||
|
||||
// Delete database if exists
|
||||
if (resources.dbId) {
|
||||
promises.push(
|
||||
apiDeleteDatabase(page, resources.dbId, {
|
||||
failOnStatusCode: false,
|
||||
}).catch(() => {}),
|
||||
);
|
||||
}
|
||||
|
||||
return Promise.all(promises);
|
||||
}
|
||||
|
||||
test('should navigate to Explore when dataset name is clicked', async ({
|
||||
page,
|
||||
}) => {
|
||||
// Create test dataset (hermetic - no dependency on sample data)
|
||||
const datasetName = `test_nav_${Date.now()}`;
|
||||
const result = await createTestDataset(page, datasetName);
|
||||
testResources = { datasetIds: [result.datasetId], dbId: result.dbId };
|
||||
|
||||
// Refresh page to see new dataset
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify dataset is visible in list (uses page object + Playwright auto-wait)
|
||||
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
|
||||
|
||||
// Click on dataset name to navigate to Explore
|
||||
await datasetListPage.clickDatasetName(datasetName);
|
||||
|
||||
// Wait for Explore page to load (validates URL + datasource control)
|
||||
await explorePage.waitForPageLoad();
|
||||
|
||||
// Verify correct dataset is loaded in datasource control
|
||||
const loadedDatasetName = await explorePage.getDatasetName();
|
||||
expect(loadedDatasetName).toContain(datasetName);
|
||||
|
||||
// Verify visualization switcher shows default viz type (indicates full page load)
|
||||
await expect(explorePage.getVizSwitcher()).toBeVisible();
|
||||
await expect(explorePage.getVizSwitcher()).toContainText('Table');
|
||||
});
|
||||
|
||||
test('should delete a dataset with confirmation', async ({ page }) => {
|
||||
// Create test dataset (hermetic - creates own test data)
|
||||
const datasetName = `test_delete_${Date.now()}`;
|
||||
const result = await createTestDataset(page, datasetName);
|
||||
testResources = { datasetIds: [result.datasetId], dbId: result.dbId };
|
||||
|
||||
// Refresh page to see new dataset
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify dataset is visible in list
|
||||
await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
|
||||
|
||||
// Click delete action button
|
||||
await datasetListPage.clickDeleteAction(datasetName);
|
||||
|
||||
// Delete confirmation modal should appear
|
||||
const deleteModal = new DeleteConfirmationModal(page);
|
||||
await deleteModal.waitForVisible();
|
||||
|
||||
// Type "DELETE" to confirm
|
||||
await deleteModal.fillConfirmationInput('DELETE');
|
||||
|
||||
// Click the Delete button
|
||||
await deleteModal.clickDelete();
|
||||
|
||||
// Modal should close
|
||||
await deleteModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears with correct message
|
||||
const toast = new Toast(page);
|
||||
const successToast = toast.getSuccess();
|
||||
await expect(successToast).toBeVisible();
|
||||
await expect(toast.getMessage()).toContainText('Deleted');
|
||||
|
||||
// Verify dataset is removed from list
|
||||
await expect(datasetListPage.getDatasetRow(datasetName)).not.toBeVisible();
|
||||
});
|
||||
|
||||
test('should duplicate a dataset with new name', async ({ page }) => {
|
||||
// Use virtual example dataset (members_channels_2)
|
||||
const originalName = 'members_channels_2';
|
||||
const duplicateName = `duplicate_${originalName}_${Date.now()}`;
|
||||
|
||||
// Get the dataset by name (ID varies by environment)
|
||||
const original = await getDatasetByName(page, originalName);
|
||||
expect(original).not.toBeNull();
|
||||
expect(original!.id).toBeGreaterThan(0);
|
||||
|
||||
// Verify original dataset is visible in list
|
||||
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
|
||||
|
||||
// Set up response intercept to capture duplicate dataset ID
|
||||
const duplicateResponsePromise = page.waitForResponse(
|
||||
response =>
|
||||
response.url().includes('/dataset/duplicate') &&
|
||||
response.status() === 200,
|
||||
);
|
||||
|
||||
// Click duplicate action button
|
||||
await datasetListPage.clickDuplicateAction(originalName);
|
||||
|
||||
// Duplicate modal should appear
|
||||
const duplicateModal = new DuplicateDatasetModal(page);
|
||||
await duplicateModal.waitForVisible();
|
||||
|
||||
// Fill in new dataset name
|
||||
await duplicateModal.fillDatasetName(duplicateName);
|
||||
|
||||
// Click the Duplicate button
|
||||
await duplicateModal.clickDuplicate();
|
||||
|
||||
// Get the duplicate dataset ID from response
|
||||
const duplicateResponse = await duplicateResponsePromise;
|
||||
const duplicateData = await duplicateResponse.json();
|
||||
const duplicateId = duplicateData.id;
|
||||
|
||||
// Track duplicate for cleanup (original is example data, don't delete it)
|
||||
testResources = { datasetIds: [duplicateId] };
|
||||
|
||||
// Modal should close
|
||||
await duplicateModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
const successToast = toast.getSuccess();
|
||||
await expect(successToast).toBeVisible();
|
||||
|
||||
// Refresh to see the duplicated dataset
|
||||
await datasetListPage.goto();
|
||||
await datasetListPage.waitForTableLoad();
|
||||
|
||||
// Verify both datasets exist in list
|
||||
await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
|
||||
await expect(datasetListPage.getDatasetRow(duplicateName)).toBeVisible();
|
||||
|
||||
// API Verification: Compare original and duplicate datasets
|
||||
const duplicateResponseData = await apiGetDataset(page, duplicateId);
|
||||
const duplicateDataFull = await duplicateResponseData.json();
|
||||
|
||||
// Verify key properties were copied correctly (original data already fetched)
|
||||
expect(duplicateDataFull.result.sql).toBe(original!.data.sql);
|
||||
expect(duplicateDataFull.result.database.id).toBe(
|
||||
original!.data.database.id,
|
||||
);
|
||||
expect(duplicateDataFull.result.schema).toBe(original!.data.schema);
|
||||
// Name should be different (the duplicate name)
|
||||
expect(duplicateDataFull.result.table_name).toBe(duplicateName);
|
||||
});
|
||||
});
|
||||
47
superset-frontend/playwright/utils/constants.ts
Normal file
47
superset-frontend/playwright/utils/constants.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Timeout constants for Playwright tests.
|
||||
* Only define timeouts that differ from Playwright defaults or are semantically important.
|
||||
*
|
||||
* Default Playwright timeouts (from playwright.config.ts):
|
||||
* - Test timeout: 30000ms (30s)
|
||||
* - Expect timeout: 8000ms (8s)
|
||||
*
|
||||
* Use these constants instead of magic numbers for better maintainability.
|
||||
*/
|
||||
|
||||
export const TIMEOUT = {
|
||||
/**
|
||||
* Page navigation and load timeouts
|
||||
*/
|
||||
PAGE_LOAD: 10000, // 10s for page transitions (login → welcome, dataset → explore)
|
||||
|
||||
/**
|
||||
* Form and UI element load timeouts
|
||||
*/
|
||||
FORM_LOAD: 5000, // 5s for forms to become visible (login form, modals)
|
||||
|
||||
/**
|
||||
* API polling intervals
|
||||
*/
|
||||
API_POLL_INTERVAL: 100, // 100ms between API polling attempts
|
||||
API_POLL_TIMEOUT: 5000, // 5s total timeout for API state changes
|
||||
} as const;
|
||||
@@ -18,6 +18,7 @@
|
||||
*/
|
||||
|
||||
export const URL = {
|
||||
DATASET_LIST: 'tablemodelview/list',
|
||||
LOGIN: 'login/',
|
||||
WELCOME: 'superset/welcome/',
|
||||
} as const;
|
||||
|
||||
@@ -0,0 +1,282 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { render, screen, waitFor } from 'spec/helpers/testing-library';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { ThemeProvider, supersetTheme } from '@apache-superset/core';
|
||||
import DuplicateDatasetModal from './DuplicateDatasetModal';
|
||||
|
||||
// Test-only fixture type that includes all fields from API responses
|
||||
// Matches VirtualDataset structure from DatasetList but defined locally for tests
|
||||
interface VirtualDatasetFixture {
|
||||
id: number;
|
||||
table_name: string;
|
||||
kind: string;
|
||||
schema: string;
|
||||
database: {
|
||||
id: string;
|
||||
database_name: string;
|
||||
};
|
||||
owners: Array<{ first_name: string; last_name: string; id: number }>;
|
||||
changed_by_name: string;
|
||||
changed_by: string;
|
||||
changed_on_delta_humanized: string;
|
||||
explore_url: string;
|
||||
extra: string;
|
||||
sql: string | null;
|
||||
}
|
||||
|
||||
// Test fixture with extra/sql fields that exist in actual API responses
|
||||
const mockDataset: VirtualDatasetFixture = {
|
||||
id: 1,
|
||||
table_name: 'original_dataset',
|
||||
kind: 'virtual',
|
||||
schema: 'public',
|
||||
database: {
|
||||
id: '1',
|
||||
database_name: 'PostgreSQL',
|
||||
},
|
||||
owners: [],
|
||||
changed_by_name: 'Admin',
|
||||
changed_by: 'Admin User',
|
||||
changed_on_delta_humanized: '1 day ago',
|
||||
explore_url: '/explore/?datasource=1__table',
|
||||
extra: '{}',
|
||||
sql: 'SELECT * FROM table',
|
||||
};
|
||||
|
||||
const Wrapper = ({
|
||||
dataset,
|
||||
onHide,
|
||||
onDuplicate,
|
||||
}: {
|
||||
dataset: VirtualDatasetFixture | null;
|
||||
onHide: jest.Mock;
|
||||
onDuplicate: jest.Mock;
|
||||
}) => (
|
||||
<ThemeProvider theme={supersetTheme}>
|
||||
<DuplicateDatasetModal
|
||||
dataset={dataset}
|
||||
onHide={onHide}
|
||||
onDuplicate={onDuplicate}
|
||||
/>
|
||||
</ThemeProvider>
|
||||
);
|
||||
|
||||
const renderModal = (
|
||||
dataset: VirtualDatasetFixture | null,
|
||||
onHide: jest.Mock,
|
||||
onDuplicate: jest.Mock,
|
||||
) =>
|
||||
render(
|
||||
<Wrapper dataset={dataset} onHide={onHide} onDuplicate={onDuplicate} />,
|
||||
);
|
||||
|
||||
test('modal opens when dataset is provided', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
// Modal should be visible
|
||||
expect(await screen.findByText('Duplicate dataset')).toBeInTheDocument();
|
||||
|
||||
// Input field should be present
|
||||
expect(screen.getByTestId('duplicate-modal-input')).toBeInTheDocument();
|
||||
|
||||
// Duplicate button should be present
|
||||
expect(
|
||||
screen.getByRole('button', { name: /duplicate/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('modal does not open when dataset is null', () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
renderModal(null, onHide, onDuplicate);
|
||||
|
||||
// Modal should not be visible
|
||||
expect(screen.queryByText('Duplicate dataset')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('duplicate button disabled after clearing input', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
const input = await screen.findByTestId('duplicate-modal-input');
|
||||
|
||||
// Type some text first
|
||||
await userEvent.type(input, 'test');
|
||||
|
||||
// Then clear it
|
||||
await userEvent.clear(input);
|
||||
|
||||
// Duplicate button should now be disabled (empty input)
|
||||
const duplicateButton = screen.getByRole('button', { name: /duplicate/i });
|
||||
expect(duplicateButton).toBeDisabled();
|
||||
});
|
||||
|
||||
test('duplicate button enabled when name is entered', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
const input = await screen.findByTestId('duplicate-modal-input');
|
||||
|
||||
// Type a new name
|
||||
await userEvent.type(input, 'new_dataset_copy');
|
||||
|
||||
// Duplicate button should now be enabled
|
||||
const duplicateButton = await screen.findByRole('button', {
|
||||
name: /duplicate/i,
|
||||
});
|
||||
expect(duplicateButton).toBeEnabled();
|
||||
});
|
||||
|
||||
test('clicking Duplicate calls onDuplicate with new name', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
const input = await screen.findByTestId('duplicate-modal-input');
|
||||
|
||||
// Type a new name
|
||||
await userEvent.type(input, 'new_dataset_copy');
|
||||
|
||||
// Click Duplicate button
|
||||
const duplicateButton = await screen.findByRole('button', {
|
||||
name: /duplicate/i,
|
||||
});
|
||||
await userEvent.click(duplicateButton);
|
||||
|
||||
// onDuplicate should be called with the new name
|
||||
await waitFor(() => {
|
||||
expect(onDuplicate).toHaveBeenCalledWith('new_dataset_copy');
|
||||
});
|
||||
});
|
||||
|
||||
test('pressing Enter key triggers duplicate action', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
const input = await screen.findByTestId('duplicate-modal-input');
|
||||
|
||||
// Clear any existing value and type new name with Enter at end
|
||||
await userEvent.clear(input);
|
||||
await userEvent.type(input, 'new_dataset_copy{enter}');
|
||||
|
||||
// onDuplicate should be called by onPressEnter handler
|
||||
await waitFor(() => {
|
||||
expect(onDuplicate).toHaveBeenCalledWith('new_dataset_copy');
|
||||
});
|
||||
});
|
||||
|
||||
test('modal closes when onHide is called', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
const { rerender } = renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
expect(await screen.findByText('Duplicate dataset')).toBeInTheDocument();
|
||||
|
||||
// Simulate closing the modal by setting dataset to null
|
||||
rerender(
|
||||
<Wrapper dataset={null} onHide={onHide} onDuplicate={onDuplicate} />,
|
||||
);
|
||||
|
||||
// Modal should no longer be visible (Ant Design keeps it in DOM but hides it)
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Duplicate dataset')).not.toBeVisible();
|
||||
});
|
||||
});
|
||||
|
||||
test('cancel button clears input and closes modal', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
const { rerender } = renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
const input = await screen.findByTestId('duplicate-modal-input');
|
||||
|
||||
// Type some text
|
||||
await userEvent.type(input, 'test_name');
|
||||
|
||||
expect(input).toHaveValue('test_name');
|
||||
|
||||
// Click cancel button
|
||||
const cancelButton = await screen.findByRole('button', { name: /cancel/i });
|
||||
await userEvent.click(cancelButton);
|
||||
|
||||
// onHide should be called
|
||||
expect(onHide).toHaveBeenCalled();
|
||||
|
||||
// Simulate closing the modal (parent sets dataset to null)
|
||||
rerender(
|
||||
<Wrapper dataset={null} onHide={onHide} onDuplicate={onDuplicate} />,
|
||||
);
|
||||
|
||||
// Modal should be hidden
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Duplicate dataset')).not.toBeVisible();
|
||||
});
|
||||
|
||||
// Reopen with same dataset - input should be cleared
|
||||
rerender(
|
||||
<Wrapper dataset={mockDataset} onHide={onHide} onDuplicate={onDuplicate} />,
|
||||
);
|
||||
|
||||
const reopenedInput = await screen.findByTestId('duplicate-modal-input');
|
||||
expect(reopenedInput).toHaveValue('');
|
||||
});
|
||||
|
||||
test('input field clears when new dataset is provided', async () => {
|
||||
const onHide = jest.fn();
|
||||
const onDuplicate = jest.fn();
|
||||
|
||||
const { rerender } = renderModal(mockDataset, onHide, onDuplicate);
|
||||
|
||||
const input = await screen.findByTestId('duplicate-modal-input');
|
||||
|
||||
// Type a name
|
||||
await userEvent.type(input, 'old_name');
|
||||
|
||||
expect(input).toHaveValue('old_name');
|
||||
|
||||
// Switch to different dataset
|
||||
const newDataset: VirtualDatasetFixture = {
|
||||
...mockDataset,
|
||||
id: 2,
|
||||
table_name: 'different_dataset',
|
||||
};
|
||||
|
||||
rerender(
|
||||
<Wrapper dataset={newDataset} onHide={onHide} onDuplicate={onDuplicate} />,
|
||||
);
|
||||
|
||||
// Input should be cleared
|
||||
await waitFor(() => {
|
||||
expect(input).toHaveValue('');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,315 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
import { SupersetClient, JsonResponse } from '@superset-ui/core';
|
||||
import rison from 'rison';
|
||||
import useDatasetsList from './useDatasetLists';
|
||||
|
||||
const mockAddDangerToast = jest.fn();
|
||||
jest.mock('src/components/MessageToasts/actions', () => ({
|
||||
addDangerToast: (msg: string) => mockAddDangerToast(msg),
|
||||
}));
|
||||
|
||||
// Typed response helper to consolidate mocking boilerplate
|
||||
// Uses 'as unknown as JsonResponse' because we're intentionally mocking
|
||||
// only the json field without the full Response object for test simplicity
|
||||
const buildSupersetResponse = <T>(data: { count: number; result: T[] }) =>
|
||||
({
|
||||
json: data,
|
||||
}) as unknown as JsonResponse;
|
||||
|
||||
// Shared test fixtures
|
||||
const mockDb = {
|
||||
id: 1,
|
||||
database_name: 'test_db',
|
||||
owners: [1] as [number],
|
||||
};
|
||||
|
||||
const mockDatasets = [
|
||||
{ id: 1, table_name: 'table1', schema: 'public' },
|
||||
{ id: 2, table_name: 'table2', schema: 'public' },
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
test('useDatasetsList fetches first page of datasets successfully', async () => {
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValue(
|
||||
buildSupersetResponse({ count: 2, result: mockDatasets }),
|
||||
);
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.datasets).toEqual(mockDatasets));
|
||||
|
||||
expect(result.current.datasetNames).toEqual(['table1', 'table2']);
|
||||
expect(getSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('useDatasetsList fetches multiple pages (pagination) until count reached', async () => {
|
||||
const page1Data = [
|
||||
{ id: 1, table_name: 'table1', schema: 'public' },
|
||||
{ id: 2, table_name: 'table2', schema: 'public' },
|
||||
];
|
||||
const page2Data = [{ id: 3, table_name: 'table3', schema: 'public' }];
|
||||
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValueOnce(
|
||||
buildSupersetResponse({ count: 3, result: page1Data }),
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
buildSupersetResponse({ count: 3, result: page2Data }),
|
||||
);
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current.datasets).toEqual([...page1Data, ...page2Data]),
|
||||
);
|
||||
|
||||
expect(result.current.datasetNames).toEqual(['table1', 'table2', 'table3']);
|
||||
expect(getSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test('useDatasetsList extracts dataset names correctly', async () => {
|
||||
const datasets = [
|
||||
{ id: 1, table_name: 'users' },
|
||||
{ id: 2, table_name: 'orders' },
|
||||
{ id: 3, table_name: 'products' },
|
||||
];
|
||||
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValue(buildSupersetResponse({ count: 3, result: datasets }));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current.datasetNames).toEqual([
|
||||
'users',
|
||||
'orders',
|
||||
'products',
|
||||
]),
|
||||
);
|
||||
|
||||
expect(getSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('useDatasetsList handles API 500 error gracefully', async () => {
|
||||
// Mock error on first call, then return empty result to break the loop
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockRejectedValueOnce(new Error('Internal Server Error'))
|
||||
.mockResolvedValueOnce(buildSupersetResponse({ count: 0, result: [] }));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.datasets).toEqual([]));
|
||||
|
||||
expect(result.current.datasetNames).toEqual([]);
|
||||
expect(mockAddDangerToast).toHaveBeenCalledWith(
|
||||
'There was an error fetching dataset',
|
||||
);
|
||||
// Should be called twice - once for error, once to complete
|
||||
expect(getSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test('useDatasetsList handles empty dataset response', async () => {
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValue(buildSupersetResponse({ count: 0, result: [] }));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.datasets).toEqual([]));
|
||||
|
||||
expect(result.current.datasetNames).toEqual([]);
|
||||
expect(getSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('useDatasetsList stops pagination when results reach count', async () => {
|
||||
// First page returns 2 items, second page returns empty (no more results)
|
||||
const datasets = [
|
||||
{ id: 1, table_name: 'table1' },
|
||||
{ id: 2, table_name: 'table2' },
|
||||
];
|
||||
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValueOnce(
|
||||
buildSupersetResponse({ count: 2, result: datasets }),
|
||||
)
|
||||
.mockResolvedValueOnce(buildSupersetResponse({ count: 2, result: [] }));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.datasets).toHaveLength(2));
|
||||
|
||||
expect(result.current.datasetNames).toEqual(['table1', 'table2']);
|
||||
// Should stop after results.length >= count
|
||||
expect(getSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('useDatasetsList resets datasets when schema changes', async () => {
|
||||
const publicDatasets = [
|
||||
{ id: 1, table_name: 'public_table1' },
|
||||
{ id: 2, table_name: 'public_table2' },
|
||||
];
|
||||
const privateDatasets = [{ id: 3, table_name: 'private_table1' }];
|
||||
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValueOnce(
|
||||
buildSupersetResponse({ count: 2, result: publicDatasets }),
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
buildSupersetResponse({ count: 1, result: privateDatasets }),
|
||||
);
|
||||
|
||||
const { result, waitFor, rerender } = renderHook(
|
||||
({ db, schema }) => useDatasetsList(db, schema),
|
||||
{
|
||||
initialProps: { db: mockDb, schema: 'public' },
|
||||
},
|
||||
);
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current.datasetNames).toEqual([
|
||||
'public_table1',
|
||||
'public_table2',
|
||||
]),
|
||||
);
|
||||
|
||||
// Change schema
|
||||
rerender({ db: mockDb, schema: 'private' });
|
||||
|
||||
// Should have new datasets from private schema
|
||||
await waitFor(() =>
|
||||
expect(result.current.datasetNames).toEqual(['private_table1']),
|
||||
);
|
||||
|
||||
expect(getSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test('useDatasetsList handles network timeout gracefully', async () => {
|
||||
// Mock timeout/abort error (status: 0)
|
||||
const timeoutError = new Error('Network timeout');
|
||||
(timeoutError as any).status = 0;
|
||||
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockRejectedValueOnce(timeoutError)
|
||||
.mockResolvedValueOnce(buildSupersetResponse({ count: 0, result: [] }));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'public'),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.datasets).toEqual([]));
|
||||
|
||||
expect(result.current.datasetNames).toEqual([]);
|
||||
expect(mockAddDangerToast).toHaveBeenCalledWith(
|
||||
'There was an error fetching dataset',
|
||||
);
|
||||
expect(getSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test('useDatasetsList skips fetching when schema is null or undefined', () => {
|
||||
const getSpy = jest.spyOn(SupersetClient, 'get');
|
||||
|
||||
// Test with null schema
|
||||
const { result: resultNull, rerender } = renderHook(
|
||||
({ db, schema }) => useDatasetsList(db, schema),
|
||||
{ initialProps: { db: mockDb, schema: null as unknown as string } },
|
||||
);
|
||||
|
||||
// Schema is null - should NOT call API
|
||||
expect(getSpy).not.toHaveBeenCalled();
|
||||
expect(resultNull.current.datasets).toEqual([]);
|
||||
expect(resultNull.current.datasetNames).toEqual([]);
|
||||
|
||||
// Change to undefined - still should NOT call API
|
||||
rerender({ db: mockDb, schema: undefined as unknown as string });
|
||||
expect(getSpy).not.toHaveBeenCalled();
|
||||
expect(resultNull.current.datasets).toEqual([]);
|
||||
expect(resultNull.current.datasetNames).toEqual([]);
|
||||
});
|
||||
|
||||
test('useDatasetsList encodes schemas with spaces and special characters in endpoint URL', async () => {
|
||||
const getSpy = jest
|
||||
.spyOn(SupersetClient, 'get')
|
||||
.mockResolvedValue(buildSupersetResponse({ count: 0, result: [] }));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetsList(mockDb, 'sales analytics'),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.datasets).toEqual([]));
|
||||
|
||||
// Verify API was called with encoded schema
|
||||
expect(getSpy).toHaveBeenCalledTimes(1);
|
||||
const callArg = getSpy.mock.calls[0]?.[0]?.endpoint;
|
||||
expect(callArg).toBeDefined();
|
||||
|
||||
// Verify the encoded schema is present in the URL (double-encoded by rison)
|
||||
// Schema 'sales analytics' -> encodeURIComponent -> 'sales%20analytics' -> rison.encode_uri -> 'sales%2520analytics'
|
||||
expect(callArg).toContain('sales%2520analytics');
|
||||
|
||||
// Decode rison to verify filter structure
|
||||
const risonParam = callArg!.split('?q=')[1];
|
||||
|
||||
interface RisonFilter {
|
||||
col: string;
|
||||
opr: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface RisonQuery {
|
||||
filters: RisonFilter[];
|
||||
}
|
||||
|
||||
const decoded = rison.decode(decodeURIComponent(risonParam)) as RisonQuery;
|
||||
|
||||
// After rison decoding, the schema should be the encoded version (encodeURIComponent output)
|
||||
expect(decoded.filters[1]).toEqual({
|
||||
col: 'schema',
|
||||
opr: 'eq',
|
||||
value: 'sales%20analytics', // This is what encodeURIComponent produces
|
||||
});
|
||||
});
|
||||
383
superset-frontend/src/hooks/apiResources/datasets.test.ts
Normal file
383
superset-frontend/src/hooks/apiResources/datasets.test.ts
Normal file
@@ -0,0 +1,383 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
import { JsonResponse } from '@superset-ui/core';
|
||||
import { Dataset } from 'src/components/Chart/types';
|
||||
import {
|
||||
cachedSupersetGet,
|
||||
supersetGetCache,
|
||||
} from 'src/utils/cachedSupersetGet';
|
||||
import {
|
||||
getDatasetId,
|
||||
createVerboseMap,
|
||||
useDatasetDrillInfo,
|
||||
} from './datasets';
|
||||
|
||||
jest.mock('src/utils/cachedSupersetGet', () => ({
|
||||
cachedSupersetGet: jest.fn(),
|
||||
supersetGetCache: {
|
||||
delete: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock getExtensionsRegistry at module level - returns undefined by default
|
||||
const mockGetExtensionsRegistry = jest.fn(() => ({ get: () => undefined }));
|
||||
jest.mock('@superset-ui/core', () => ({
|
||||
...jest.requireActual('@superset-ui/core'),
|
||||
getExtensionsRegistry: () => mockGetExtensionsRegistry(),
|
||||
}));
|
||||
|
||||
const mockedCachedSupersetGet = jest.mocked(cachedSupersetGet);
|
||||
const mockedSupersetGetCacheDelete = jest.mocked(supersetGetCache.delete);
|
||||
|
||||
// Typed response helper to consolidate mocking boilerplate
|
||||
// Uses 'as unknown as JsonResponse' because we're intentionally mocking
|
||||
// only the json field without the full Response object for test simplicity
|
||||
const buildCachedResponse = <T>(result: T) =>
|
||||
({
|
||||
json: { result },
|
||||
}) as unknown as JsonResponse;
|
||||
|
||||
test('getDatasetId extracts numeric ID from string datasource ID', () => {
|
||||
expect(getDatasetId('123__table')).toBe(123);
|
||||
expect(getDatasetId('456__another_table')).toBe(456);
|
||||
});
|
||||
|
||||
test('getDatasetId handles numeric datasource ID', () => {
|
||||
expect(getDatasetId(789)).toBe(789);
|
||||
expect(getDatasetId(0)).toBe(0);
|
||||
});
|
||||
|
||||
test('createVerboseMap creates verbose_map from columns', () => {
|
||||
const dataset = {
|
||||
columns: [
|
||||
{ column_name: 'col1', verbose_name: 'Column 1' },
|
||||
{ column_name: 'col2', verbose_name: 'Column 2' },
|
||||
{ column_name: 'col3' }, // no verbose_name
|
||||
],
|
||||
metrics: [],
|
||||
} as Dataset;
|
||||
|
||||
const verboseMap = createVerboseMap(dataset);
|
||||
|
||||
expect(verboseMap).toEqual({
|
||||
col1: 'Column 1',
|
||||
col2: 'Column 2',
|
||||
col3: 'col3', // falls back to column_name
|
||||
});
|
||||
});
|
||||
|
||||
test('createVerboseMap creates verbose_map from metrics', () => {
|
||||
// Partial dataset with only metrics - createVerboseMap doesn't require full Dataset
|
||||
const dataset = {
|
||||
columns: [],
|
||||
metrics: [
|
||||
{ metric_name: 'metric1', verbose_name: 'Metric 1' },
|
||||
{ metric_name: 'metric2', verbose_name: 'Metric 2' },
|
||||
{ metric_name: 'metric3' }, // no verbose_name
|
||||
],
|
||||
} as unknown as Dataset;
|
||||
|
||||
const verboseMap = createVerboseMap(dataset);
|
||||
|
||||
expect(verboseMap).toEqual({
|
||||
metric1: 'Metric 1',
|
||||
metric2: 'Metric 2',
|
||||
metric3: 'metric3', // falls back to metric_name
|
||||
});
|
||||
});
|
||||
|
||||
test('createVerboseMap creates verbose_map from both columns and metrics', () => {
|
||||
const dataset = {
|
||||
columns: [{ column_name: 'col1', verbose_name: 'Column 1' }],
|
||||
metrics: [{ metric_name: 'metric1', verbose_name: 'Metric 1' }],
|
||||
} as Dataset;
|
||||
|
||||
const verboseMap = createVerboseMap(dataset);
|
||||
|
||||
expect(verboseMap).toEqual({
|
||||
col1: 'Column 1',
|
||||
metric1: 'Metric 1',
|
||||
});
|
||||
});
|
||||
|
||||
test('createVerboseMap handles undefined dataset', () => {
|
||||
const verboseMap = createVerboseMap(undefined);
|
||||
expect(verboseMap).toEqual({});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo fetches dataset drill info successfully', async () => {
|
||||
const mockDataset = {
|
||||
id: 123,
|
||||
columns: [{ column_name: 'col1', verbose_name: 'Column 1' }],
|
||||
metrics: [{ metric_name: 'metric1', verbose_name: 'Metric 1' }],
|
||||
};
|
||||
|
||||
mockedCachedSupersetGet.mockResolvedValue(buildCachedResponse(mockDataset));
|
||||
|
||||
const { result, waitFor } = renderHook(() => useDatasetDrillInfo(123, 456));
|
||||
|
||||
expect(result.current.status).toBe('loading');
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
expect(result.current.result).toEqual({
|
||||
...mockDataset,
|
||||
verbose_map: {
|
||||
col1: 'Column 1',
|
||||
metric1: 'Metric 1',
|
||||
},
|
||||
});
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo handles network errors', async () => {
|
||||
mockedCachedSupersetGet.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
const { result, waitFor } = renderHook(() => useDatasetDrillInfo(123, 456));
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('error'));
|
||||
|
||||
expect(result.current.result).toBeNull();
|
||||
expect(result.current.error).toBeInstanceOf(Error);
|
||||
expect(result.current.error?.message).toBe('Network error');
|
||||
expect(mockedSupersetGetCacheDelete).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo skips fetch when skip is true', async () => {
|
||||
const { result } = renderHook(() =>
|
||||
useDatasetDrillInfo(123, 456, undefined, true),
|
||||
);
|
||||
|
||||
// Should immediately return complete status without fetching
|
||||
expect(result.current.status).toBe('complete');
|
||||
expect(result.current.result).toEqual({});
|
||||
expect(result.current.error).toBeNull();
|
||||
|
||||
// Verify no API call was made
|
||||
expect(mockedCachedSupersetGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo extracts dataset ID from string format', async () => {
|
||||
const mockDataset = {
|
||||
id: 123,
|
||||
columns: [],
|
||||
metrics: [],
|
||||
};
|
||||
|
||||
mockedCachedSupersetGet.mockResolvedValue(buildCachedResponse(mockDataset));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetDrillInfo('123__table', 456),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
expect(mockedCachedSupersetGet).toHaveBeenCalledWith({
|
||||
endpoint: '/api/v1/dataset/123/drill_info/?q=(dashboard_id:456)',
|
||||
});
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo does not clear cache on successful fetch', async () => {
|
||||
const mockDataset = {
|
||||
id: 123,
|
||||
columns: [],
|
||||
metrics: [],
|
||||
};
|
||||
|
||||
mockedCachedSupersetGet.mockResolvedValue(buildCachedResponse(mockDataset));
|
||||
|
||||
const { result, waitFor } = renderHook(() => useDatasetDrillInfo(123, 456));
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
// Cache should NOT be deleted on success
|
||||
expect(mockedSupersetGetCacheDelete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo creates new verbose_map from columns and metrics', async () => {
|
||||
const mockDataset = {
|
||||
id: 123,
|
||||
verbose_map: { old_key: 'Old Value' }, // Existing verbose_map will be replaced
|
||||
columns: [{ column_name: 'col1', verbose_name: 'Column 1' }],
|
||||
metrics: [{ metric_name: 'metric1', verbose_name: 'Metric 1' }],
|
||||
};
|
||||
|
||||
mockedCachedSupersetGet.mockResolvedValue(buildCachedResponse(mockDataset));
|
||||
|
||||
const { result, waitFor } = renderHook(() => useDatasetDrillInfo(123, 456));
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
// Verify verbose_map is created from columns/metrics (existing verbose_map replaced)
|
||||
expect(result.current.result?.verbose_map).toEqual({
|
||||
col1: 'Column 1',
|
||||
metric1: 'Metric 1',
|
||||
});
|
||||
// Old key should not be present
|
||||
expect(result.current.result?.verbose_map).not.toHaveProperty('old_key');
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo handles NaN datasource ID from malformed string', async () => {
|
||||
mockedCachedSupersetGet.mockResolvedValue(
|
||||
buildCachedResponse({ id: NaN, columns: [], metrics: [] }),
|
||||
);
|
||||
|
||||
const { result, waitFor } = renderHook(() => useDatasetDrillInfo('abc', 456));
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
// Verify hook calls endpoint with NaN (API will handle validation)
|
||||
expect(mockedCachedSupersetGet).toHaveBeenCalledWith({
|
||||
endpoint: '/api/v1/dataset/NaN/drill_info/?q=(dashboard_id:456)',
|
||||
});
|
||||
});
|
||||
|
||||
test('getDatasetId handles non-numeric string ID', () => {
|
||||
const result = getDatasetId('abc');
|
||||
expect(result).toBeNaN();
|
||||
});
|
||||
|
||||
test('getDatasetId handles empty string ID', () => {
|
||||
const result = getDatasetId('');
|
||||
expect(result).toBe(0);
|
||||
});
|
||||
|
||||
test('getDatasetId handles string with trailing underscores', () => {
|
||||
const result = getDatasetId('123__');
|
||||
expect(result).toBe(123);
|
||||
});
|
||||
|
||||
// Extension tests - mock setup/teardown for extension registry
|
||||
const mockExtension = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
// Configure the module-level mock to return our extension for extension tests
|
||||
mockGetExtensionsRegistry.mockReturnValue({
|
||||
get: jest.fn((key: string) =>
|
||||
key === 'load.drillby.options' ? mockExtension : undefined,
|
||||
) as any,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore default behavior to prevent test pollution
|
||||
mockGetExtensionsRegistry.mockReturnValue({ get: () => undefined });
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo fetches dataset via extension when extension and formData provided', async () => {
|
||||
const mockFormData = {
|
||||
viz_type: 'table',
|
||||
datasource: '123__table',
|
||||
adhoc_filters: [],
|
||||
};
|
||||
const mockDataset = {
|
||||
id: 123,
|
||||
columns: [{ column_name: 'col1', verbose_name: 'Column 1' }],
|
||||
metrics: [{ metric_name: 'metric1', verbose_name: 'Metric 1' }],
|
||||
};
|
||||
|
||||
mockExtension.mockResolvedValue(buildCachedResponse(mockDataset));
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetDrillInfo(123, 456, mockFormData),
|
||||
);
|
||||
|
||||
expect(result.current.status).toBe('loading');
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
// Verify extension was called with correct arguments
|
||||
expect(mockExtension).toHaveBeenCalledWith(123, mockFormData);
|
||||
|
||||
// Verify result contains dataset with verbose_map
|
||||
expect(result.current.result).toEqual({
|
||||
...mockDataset,
|
||||
verbose_map: {
|
||||
col1: 'Column 1',
|
||||
metric1: 'Metric 1',
|
||||
},
|
||||
});
|
||||
expect(result.current.error).toBeNull();
|
||||
|
||||
// Verify cachedSupersetGet was NOT called (extension path bypasses REST API)
|
||||
expect(mockedCachedSupersetGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo handles extension throwing error', async () => {
|
||||
const mockFormData = { viz_type: 'table', datasource: '123__table' };
|
||||
const extensionError = new Error('Extension failed');
|
||||
|
||||
mockExtension.mockRejectedValue(extensionError);
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetDrillInfo(123, 456, mockFormData),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('error'));
|
||||
|
||||
// Verify error state
|
||||
expect(result.current.result).toBeNull();
|
||||
expect(result.current.error).toBeInstanceOf(Error);
|
||||
expect(result.current.error?.message).toBe('Extension failed');
|
||||
|
||||
// Verify REST API was not called
|
||||
expect(mockedCachedSupersetGet).not.toHaveBeenCalled();
|
||||
|
||||
// Verify cache is NOT deleted for extension errors (extensions don't use cache)
|
||||
expect(mockedSupersetGetCacheDelete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo handles extension returning malformed payload with undefined result', async () => {
|
||||
const mockFormData = { viz_type: 'table', datasource: '123__table' };
|
||||
|
||||
// Extension returns undefined instead of expected shape
|
||||
mockExtension.mockResolvedValue(undefined);
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetDrillInfo(123, 456, mockFormData),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
// Hook should handle gracefully and set result with empty verbose_map
|
||||
expect(result.current.result).toEqual({ verbose_map: {} });
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
|
||||
test('useDatasetDrillInfo handles extension returning malformed payload with missing json.result', async () => {
|
||||
const mockFormData = { viz_type: 'table', datasource: '123__table' };
|
||||
|
||||
// Extension returns object but missing json.result
|
||||
mockExtension.mockResolvedValue({ json: {} });
|
||||
|
||||
const { result, waitFor } = renderHook(() =>
|
||||
useDatasetDrillInfo(123, 456, mockFormData),
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.status).toBe('complete'));
|
||||
|
||||
// Hook should handle gracefully - undefined result gets empty verbose_map
|
||||
expect(result.current.result).toEqual({ verbose_map: {} });
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
@@ -0,0 +1,485 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { act, cleanup, screen, waitFor, within } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import rison from 'rison';
|
||||
import { ComponentType } from 'react';
|
||||
import {
|
||||
setupMocks,
|
||||
renderDatasetList,
|
||||
waitForDatasetsPageReady,
|
||||
mockAdminUser,
|
||||
mockDatasets,
|
||||
setupDeleteMocks,
|
||||
mockRelatedCharts,
|
||||
mockRelatedDashboards,
|
||||
mockHandleResourceExport,
|
||||
API_ENDPOINTS,
|
||||
} from './DatasetList.testHelpers';
|
||||
|
||||
jest.mock('src/utils/export');
|
||||
|
||||
// Mock withToasts HOC to be a passthrough so we can spy on toast calls
|
||||
jest.mock('src/components/MessageToasts/withToasts', () => ({
|
||||
__esModule: true,
|
||||
default: <P extends object>(Component: ComponentType<P>) => Component,
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
setupMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Wait for any pending state updates to complete before cleanup
|
||||
await act(async () => {
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
});
|
||||
cleanup();
|
||||
fetchMock.reset();
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
test('typing in search updates the input value correctly', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('search-filter-container')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const searchContainer = screen.getByTestId('search-filter-container');
|
||||
const searchInput = within(searchContainer).getByRole('textbox');
|
||||
|
||||
// Type search query
|
||||
await userEvent.type(searchInput, 'sales');
|
||||
|
||||
// Verify input value is updated
|
||||
await waitFor(() => {
|
||||
expect(searchInput).toHaveValue('sales');
|
||||
});
|
||||
});
|
||||
|
||||
test('typing in search triggers debounced API call with search filter', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('search-filter-container')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const searchContainer = screen.getByTestId('search-filter-container');
|
||||
const searchInput = within(searchContainer).getByRole('textbox');
|
||||
|
||||
// Record initial API calls
|
||||
const initialCallCount = fetchMock.calls(API_ENDPOINTS.DATASETS).length;
|
||||
|
||||
// Type search query and submit with Enter to trigger the debounced fetch
|
||||
await userEvent.type(searchInput, 'sales{enter}');
|
||||
|
||||
// Wait for debounced API call
|
||||
await waitFor(
|
||||
() => {
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
expect(calls.length).toBeGreaterThan(initialCallCount);
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
);
|
||||
|
||||
// Verify the latest API call includes search filter in URL
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
const latestCall = calls[calls.length - 1];
|
||||
const url = latestCall[0] as string;
|
||||
|
||||
// URL should contain filters parameter with search term
|
||||
expect(url).toContain('filters');
|
||||
const risonPayload = url.split('?q=')[1];
|
||||
expect(risonPayload).toBeTruthy();
|
||||
const decoded = rison.decode(decodeURIComponent(risonPayload!)) as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
const filters = Array.isArray(decoded?.filters) ? decoded.filters : [];
|
||||
const hasSalesFilter = filters.some(
|
||||
(filter: Record<string, unknown>) =>
|
||||
typeof filter?.value === 'string' &&
|
||||
filter.value.toLowerCase().includes('sales'),
|
||||
);
|
||||
expect(hasSalesFilter).toBe(true);
|
||||
});
|
||||
|
||||
test('500 error triggers danger toast with error message', async () => {
|
||||
const addDangerToast = jest.fn();
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{
|
||||
status: 500,
|
||||
body: { message: 'Internal Server Error' },
|
||||
},
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
// Pass toast spy directly via props to bypass withToasts HOC
|
||||
renderDatasetList(mockAdminUser, {
|
||||
addDangerToast,
|
||||
addSuccessToast: jest.fn(),
|
||||
});
|
||||
|
||||
// Verify component renders despite error
|
||||
await waitForDatasetsPageReady();
|
||||
|
||||
// Verify danger toast called with error information
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(addDangerToast).toHaveBeenCalled();
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
);
|
||||
|
||||
// Verify toast message contains error keywords
|
||||
expect(addDangerToast.mock.calls.length).toBeGreaterThan(0);
|
||||
const toastMessage = String(addDangerToast.mock.calls[0][0]);
|
||||
expect(
|
||||
toastMessage.includes('error') ||
|
||||
toastMessage.includes('Error') ||
|
||||
toastMessage.includes('500') ||
|
||||
toastMessage.includes('Internal Server'),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('network timeout triggers danger toast', async () => {
|
||||
const addDangerToast = jest.fn();
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ throws: new Error('Network timeout') },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
// Pass toast spy directly via props to bypass withToasts HOC
|
||||
renderDatasetList(mockAdminUser, {
|
||||
addDangerToast,
|
||||
addSuccessToast: jest.fn(),
|
||||
});
|
||||
|
||||
// Verify component renders despite error
|
||||
await waitForDatasetsPageReady();
|
||||
|
||||
// Verify danger toast called with timeout message
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(addDangerToast).toHaveBeenCalled();
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
);
|
||||
|
||||
// Verify toast message contains timeout/network keywords
|
||||
expect(addDangerToast.mock.calls.length).toBeGreaterThan(0);
|
||||
const toastMessage = String(addDangerToast.mock.calls[0][0]);
|
||||
expect(
|
||||
toastMessage.includes('timeout') ||
|
||||
toastMessage.includes('Timeout') ||
|
||||
toastMessage.includes('network') ||
|
||||
toastMessage.includes('Network') ||
|
||||
toastMessage.includes('error'),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('clicking delete opens modal with related objects count', async () => {
|
||||
const datasetToDelete = mockDatasets[0];
|
||||
|
||||
// Set up delete mocks
|
||||
setupDeleteMocks(datasetToDelete.id);
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetToDelete], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Wait for dataset to render
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(datasetToDelete.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find and click delete button in the row
|
||||
const table = screen.getByTestId('listview-table');
|
||||
const datasetRow = within(table)
|
||||
.getAllByRole('row')
|
||||
.find(row => within(row).queryByText(datasetToDelete.table_name));
|
||||
expect(datasetRow).toBeTruthy();
|
||||
await userEvent.hover(datasetRow!);
|
||||
const deleteButton = within(datasetRow!).getByTestId('delete');
|
||||
|
||||
await userEvent.click(deleteButton);
|
||||
|
||||
// Verify modal opens with related objects
|
||||
const modal = await screen.findByRole('dialog');
|
||||
expect(modal).toBeInTheDocument();
|
||||
|
||||
// Check for related charts count
|
||||
expect(modal).toHaveTextContent(
|
||||
new RegExp(mockRelatedCharts.count.toString()),
|
||||
);
|
||||
// Check for related dashboards count
|
||||
expect(modal).toHaveTextContent(
|
||||
new RegExp(mockRelatedDashboards.count.toString()),
|
||||
);
|
||||
});
|
||||
|
||||
test('clicking export calls handleResourceExport with dataset ID', async () => {
|
||||
const datasetToExport = mockDatasets[0];
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetToExport], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(datasetToExport.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find and click export button
|
||||
const table = screen.getByTestId('listview-table');
|
||||
const exportButton = await within(table).findByTestId('upload');
|
||||
|
||||
await userEvent.click(exportButton);
|
||||
|
||||
// Verify export was called with correct ID
|
||||
await waitFor(() => {
|
||||
expect(mockHandleResourceExport).toHaveBeenCalledWith(
|
||||
'dataset',
|
||||
[datasetToExport.id],
|
||||
expect.any(Function),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('clicking duplicate opens modal and submits duplicate request', async () => {
|
||||
const datasetToDuplicate = {
|
||||
...mockDatasets[1],
|
||||
kind: 'virtual',
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetToDuplicate], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
fetchMock.post(
|
||||
API_ENDPOINTS.DATASET_DUPLICATE,
|
||||
{ id: 999, table_name: 'Copy of Dataset' },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
const addSuccessToast = jest.fn();
|
||||
|
||||
renderDatasetList(mockAdminUser, {
|
||||
addDangerToast: jest.fn(),
|
||||
addSuccessToast,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(datasetToDuplicate.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Track initial dataset list API calls BEFORE duplicate action
|
||||
const initialDatasetCallCount = fetchMock.calls(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
).length;
|
||||
|
||||
const row = screen.getByText(datasetToDuplicate.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
const duplicateIcon = await within(row!).findByTestId('copy');
|
||||
const duplicateButton = duplicateIcon.closest(
|
||||
'[role="button"]',
|
||||
) as HTMLElement | null;
|
||||
expect(duplicateButton).toBeTruthy();
|
||||
|
||||
await userEvent.click(duplicateButton!);
|
||||
|
||||
const modal = await screen.findByRole('dialog');
|
||||
const modalInput = within(modal).getByRole('textbox');
|
||||
await userEvent.clear(modalInput);
|
||||
await userEvent.type(modalInput, 'Copy of Dataset');
|
||||
|
||||
const confirmButton = within(modal).getByRole('button', {
|
||||
name: /duplicate/i,
|
||||
});
|
||||
await userEvent.click(confirmButton);
|
||||
|
||||
// Verify duplicate API was called with correct payload
|
||||
await waitFor(() => {
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASET_DUPLICATE);
|
||||
expect(calls.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify POST body contains correct dataset info
|
||||
const requestBody = JSON.parse(calls[0][1]?.body as string);
|
||||
expect(requestBody.base_model_id).toBe(datasetToDuplicate.id);
|
||||
expect(requestBody.table_name).toBe('Copy of Dataset');
|
||||
});
|
||||
|
||||
// Verify modal closes after successful duplicate
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify refreshData() is called (observable via new dataset list API call)
|
||||
await waitFor(
|
||||
() => {
|
||||
const datasetCalls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
expect(datasetCalls.length).toBeGreaterThan(initialDatasetCallCount);
|
||||
},
|
||||
{ timeout: 3000 },
|
||||
);
|
||||
|
||||
// Note: Success toast feature not implemented (see index.tsx:718-721)
|
||||
expect(addSuccessToast).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('certified dataset shows badge and tooltip with certification details', async () => {
|
||||
const certifiedDataset = {
|
||||
...mockDatasets[1],
|
||||
extra: JSON.stringify({
|
||||
certification: {
|
||||
certified_by: 'Data Team',
|
||||
details: 'Approved for production use',
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [certifiedDataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(certifiedDataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify the row renders with the dataset
|
||||
const row = screen.getByText(certifiedDataset.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Find certification badge within the row (fail-fast if not found)
|
||||
const certBadge = await within(row!).findByRole('img', {
|
||||
name: /certified/i,
|
||||
});
|
||||
expect(certBadge).toBeInTheDocument();
|
||||
|
||||
// Hover to reveal tooltip
|
||||
await userEvent.hover(certBadge);
|
||||
|
||||
// Wait for tooltip content to appear
|
||||
const tooltip = await screen.findByRole('tooltip');
|
||||
expect(tooltip).toBeInTheDocument();
|
||||
expect(tooltip).toHaveTextContent(/Data Team/i);
|
||||
expect(tooltip).toHaveTextContent(/Approved for production/i);
|
||||
});
|
||||
|
||||
test('dataset with warning shows icon and tooltip with markdown content', async () => {
|
||||
const warningMessage = 'This dataset contains PII. Handle with care.';
|
||||
const datasetWithWarning = {
|
||||
...mockDatasets[2],
|
||||
extra: JSON.stringify({
|
||||
warning_markdown: warningMessage,
|
||||
}),
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetWithWarning], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(datasetWithWarning.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify row exists
|
||||
const row = screen.getByText(datasetWithWarning.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Find warning icon within the row (fail-fast if not found)
|
||||
const warningIcon = await within(row!).findByRole('img', {
|
||||
name: /warning/i,
|
||||
});
|
||||
expect(warningIcon).toBeInTheDocument();
|
||||
|
||||
// Hover to reveal tooltip with markdown content
|
||||
await userEvent.hover(warningIcon);
|
||||
|
||||
// Wait for tooltip to appear with warning text
|
||||
const tooltip = await screen.findByRole('tooltip');
|
||||
expect(tooltip).toBeInTheDocument();
|
||||
expect(tooltip).toHaveTextContent(/PII/i);
|
||||
expect(tooltip).toHaveTextContent(/Handle with care/i);
|
||||
});
|
||||
|
||||
test('dataset name links to Explore with correct URL and accessible label', async () => {
|
||||
const dataset = mockDatasets[0];
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [dataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(dataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the dataset row and scope the link query to it
|
||||
const row = screen.getByText(dataset.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Find the internal link within the dataset row (fail-fast if not found)
|
||||
const exploreLink = within(row!).getByTestId('internal-link');
|
||||
expect(exploreLink).toBeInTheDocument();
|
||||
|
||||
// Verify link has correct href to Explore page
|
||||
expect(exploreLink).toHaveAttribute('href', dataset.explore_url);
|
||||
expect(exploreLink).toHaveAttribute(
|
||||
'href',
|
||||
expect.stringContaining('/explore/'),
|
||||
);
|
||||
|
||||
// Verify link contains dataset ID
|
||||
expect(exploreLink).toHaveAttribute(
|
||||
'href',
|
||||
expect.stringContaining(`${dataset.id}__table`),
|
||||
);
|
||||
});
|
||||
|
||||
// Note: Component "+1" tests for state persistence through operations have been
|
||||
// moved to DatasetList.listview.test.tsx where they can use the reliable selectOption helper.
|
||||
@@ -0,0 +1,211 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { cleanup, screen, waitFor, within } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import rison from 'rison';
|
||||
import { selectOption } from 'spec/helpers/testing-library';
|
||||
import {
|
||||
setupMocks,
|
||||
renderDatasetList,
|
||||
mockAdminUser,
|
||||
mockDatasets,
|
||||
setupBulkDeleteMocks,
|
||||
API_ENDPOINTS,
|
||||
} from './DatasetList.testHelpers';
|
||||
|
||||
/**
|
||||
* Integration Contract Tests
|
||||
*
|
||||
* These tests verify multi-component orchestration that cannot be tested
|
||||
* in component isolation. Unlike component tests which mock all dependencies,
|
||||
* integration tests use real Redux/React Query/Router state management.
|
||||
*
|
||||
* Only 2 tests are needed here - most workflows are covered by component "+1" tests.
|
||||
*/
|
||||
|
||||
jest.mock('src/utils/export');
|
||||
|
||||
beforeEach(() => {
|
||||
setupMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
fetchMock.reset();
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
test('ListView provider correctly merges filter + sort + pagination state on refetch', async () => {
|
||||
// This test verifies that when multiple state sources are combined,
|
||||
// the ListView provider correctly merges them for the API call.
|
||||
// Component tests verify individual pieces persist; this verifies they COMBINE correctly.
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: mockDatasets, count: mockDatasets.length },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('listview-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// 1. Apply a sort by clicking Name header
|
||||
const table = screen.getByTestId('listview-table');
|
||||
const nameHeader = within(table).getByRole('columnheader', { name: /Name/i });
|
||||
|
||||
await userEvent.click(nameHeader);
|
||||
|
||||
// 2. Apply a filter using selectOption helper
|
||||
const beforeFilterCallCount = fetchMock.calls(API_ENDPOINTS.DATASETS).length;
|
||||
await selectOption('Virtual', 'Type');
|
||||
|
||||
// Wait for filter API call to complete
|
||||
await waitFor(() => {
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
expect(calls.length).toBeGreaterThan(beforeFilterCallCount);
|
||||
});
|
||||
|
||||
// 3. Verify the final API call contains ALL three state pieces merged correctly
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
const latestCall = calls[calls.length - 1];
|
||||
const url = latestCall[0] as string;
|
||||
|
||||
// Decode the rison payload
|
||||
const risonPayload = url.split('?q=')[1];
|
||||
expect(risonPayload).toBeTruthy();
|
||||
const decoded = rison.decode(decodeURIComponent(risonPayload!)) as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
|
||||
// Verify ALL three pieces of state are present and merged:
|
||||
// 1. Sort (order_column)
|
||||
expect(decoded?.order_column).toBeTruthy();
|
||||
|
||||
// 2. Filter (filters array)
|
||||
const filters = Array.isArray(decoded?.filters) ? decoded.filters : [];
|
||||
const hasTypeFilter = filters.some(
|
||||
(filter: Record<string, unknown>) =>
|
||||
filter?.col === 'sql' && filter?.value === false,
|
||||
);
|
||||
expect(hasTypeFilter).toBe(true);
|
||||
|
||||
// 3. Pagination (page_size is present with default value)
|
||||
expect(decoded?.page_size).toBeTruthy();
|
||||
|
||||
// This confirms ListView provider merges state from multiple sources correctly
|
||||
});
|
||||
|
||||
test('bulk action orchestration: selection → action → cleanup cycle works correctly', async () => {
|
||||
// This test verifies the full bulk operation cycle across multiple components:
|
||||
// 1. Bulk mode UI (selection state)
|
||||
// 2. Bulk action handler (delete operation)
|
||||
// 3. Selection cleanup (state reset)
|
||||
|
||||
setupBulkDeleteMocks();
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: mockDatasets, count: mockDatasets.length },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('listview-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// 1. Enter bulk mode and select items
|
||||
const bulkSelectButton = screen.getByRole('button', {
|
||||
name: /bulk select/i,
|
||||
});
|
||||
await userEvent.click(bulkSelectButton);
|
||||
|
||||
await waitFor(() => {
|
||||
const checkboxes = screen.getAllByRole('checkbox');
|
||||
expect(checkboxes.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// Select first 2 items (skip select-all checkbox at index 0)
|
||||
const checkboxes = screen.getAllByRole('checkbox');
|
||||
await userEvent.click(checkboxes[1]);
|
||||
await userEvent.click(checkboxes[2]);
|
||||
|
||||
// Wait for selections to register - assert on "selected" text which is what users see
|
||||
await screen.findByText(/selected/i);
|
||||
|
||||
// 2. Execute bulk delete
|
||||
// Multiple bulk actions share the same test ID, so filter by text content
|
||||
const bulkActionButtons = await screen.findAllByTestId('bulk-select-action');
|
||||
const bulkDeleteButton = bulkActionButtons.find(btn =>
|
||||
btn.textContent?.includes('Delete'),
|
||||
);
|
||||
expect(bulkDeleteButton).toBeTruthy();
|
||||
await userEvent.click(bulkDeleteButton!);
|
||||
|
||||
// Confirm in modal - type DELETE to enable button
|
||||
const modal = await screen.findByRole('dialog');
|
||||
const confirmInput = within(modal).getByTestId('delete-modal-input');
|
||||
await userEvent.clear(confirmInput);
|
||||
await userEvent.type(confirmInput, 'DELETE');
|
||||
|
||||
// Capture datasets call count before confirming
|
||||
const datasetsCallCountBeforeDelete = fetchMock.calls(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
).length;
|
||||
|
||||
const confirmButton = within(modal)
|
||||
.getAllByRole('button', { name: /^delete$/i })
|
||||
.pop();
|
||||
await userEvent.click(confirmButton!);
|
||||
|
||||
// 3. Wait for bulk delete API call to be made
|
||||
await waitFor(() => {
|
||||
const deleteCalls = fetchMock.calls(API_ENDPOINTS.DATASET_BULK_DELETE);
|
||||
expect(deleteCalls.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// Wait for modal to close
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Wait for datasets refetch after delete
|
||||
await waitFor(() => {
|
||||
const datasetsCallCount = fetchMock.calls(API_ENDPOINTS.DATASETS).length;
|
||||
expect(datasetsCallCount).toBeGreaterThan(datasetsCallCountBeforeDelete);
|
||||
});
|
||||
|
||||
// 4. Verify selection count shows 0 (selections cleared but still in bulk mode)
|
||||
// After bulk delete, items are deselected but bulk mode may remain active
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('bulk-select-copy')).toHaveTextContent(
|
||||
/0 selected/i,
|
||||
);
|
||||
});
|
||||
|
||||
// This confirms the full bulk operation cycle coordinates correctly:
|
||||
// selection state → action handler → list refresh → state cleanup
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,394 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { cleanup, screen, waitFor, within } from '@testing-library/react';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import {
|
||||
setupMocks,
|
||||
setupApiPermissions,
|
||||
renderDatasetList,
|
||||
mockAdminUser,
|
||||
mockReadOnlyUser,
|
||||
mockWriteUser,
|
||||
mockExportOnlyUser,
|
||||
mockDatasets,
|
||||
API_ENDPOINTS,
|
||||
} from './DatasetList.testHelpers';
|
||||
|
||||
beforeEach(() => {
|
||||
setupMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
fetchMock.reset();
|
||||
});
|
||||
|
||||
test('admin users see all UI elements', async () => {
|
||||
// Setup API with full admin permissions
|
||||
setupApiPermissions(['can_read', 'can_write', 'can_export', 'can_duplicate']);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
expect(await screen.findByText('Datasets')).toBeInTheDocument();
|
||||
|
||||
// Admin should see create button
|
||||
expect(screen.getByRole('button', { name: /dataset/i })).toBeInTheDocument();
|
||||
|
||||
// Admin should see import button
|
||||
// Note: Using testId - import button lacks accessible text content
|
||||
// TODO: Add aria-label or text to import button
|
||||
expect(screen.getByTestId('import-button')).toBeInTheDocument();
|
||||
|
||||
// Admin should see bulk select button
|
||||
expect(
|
||||
screen.getByRole('button', { name: /bulk select/i }),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Admin should see actions column
|
||||
await waitFor(() => {
|
||||
const table = screen.getByTestId('listview-table');
|
||||
expect(
|
||||
within(table).getByRole('columnheader', { name: /Actions/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('read-only users cannot see Actions column', async () => {
|
||||
// Setup API with read-only permissions
|
||||
setupApiPermissions(['can_read']);
|
||||
|
||||
renderDatasetList(mockReadOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
const table = screen.getByTestId('listview-table');
|
||||
// Actions column should not be present
|
||||
expect(within(table).queryByText(/Actions/i)).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('read-only users cannot see bulk select button', async () => {
|
||||
// Setup API with read-only permissions
|
||||
setupApiPermissions(['can_read']);
|
||||
|
||||
renderDatasetList(mockReadOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Bulk select should not be visible
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /bulk select/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('read-only users cannot see Create/Import buttons', async () => {
|
||||
// Setup API with read-only permissions
|
||||
setupApiPermissions(['can_read']);
|
||||
|
||||
renderDatasetList(mockReadOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Create button should not be visible
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /dataset/i }),
|
||||
).not.toBeInTheDocument();
|
||||
|
||||
// Import button should not be visible
|
||||
// Note: Using testId - import button lacks accessible text content
|
||||
// TODO: Add aria-label or text to import button
|
||||
expect(screen.queryByTestId('import-button')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('write users see Actions column', async () => {
|
||||
// Setup API with write permissions
|
||||
setupApiPermissions(['can_read', 'can_write', 'can_export']);
|
||||
|
||||
renderDatasetList(mockWriteUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
const table = screen.getByTestId('listview-table');
|
||||
expect(
|
||||
within(table).getByRole('columnheader', { name: /Actions/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('write users see bulk select button', async () => {
|
||||
// Setup API with write permissions
|
||||
setupApiPermissions(['can_read', 'can_write', 'can_export']);
|
||||
|
||||
renderDatasetList(mockWriteUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(
|
||||
screen.getByRole('button', { name: /bulk select/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('write users see Create/Import buttons', async () => {
|
||||
// Setup API with write permissions
|
||||
setupApiPermissions(['can_read', 'can_write', 'can_export']);
|
||||
|
||||
renderDatasetList(mockWriteUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Create button should be visible
|
||||
expect(screen.getByRole('button', { name: /dataset/i })).toBeInTheDocument();
|
||||
|
||||
// Import button should be visible
|
||||
// Note: Using testId - import button lacks accessible text content
|
||||
// TODO: Add aria-label or text to import button
|
||||
expect(screen.getByTestId('import-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('export-only users see bulk select (for export only)', async () => {
|
||||
// Setup API with export-only permissions
|
||||
setupApiPermissions(['can_read', 'can_export']);
|
||||
|
||||
renderDatasetList(mockExportOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Export users should see bulk select for export functionality
|
||||
expect(
|
||||
screen.getByRole('button', { name: /bulk select/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('export-only users cannot see Create/Import buttons', async () => {
|
||||
// Setup API with export-only permissions
|
||||
setupApiPermissions(['can_read', 'can_export']);
|
||||
|
||||
renderDatasetList(mockExportOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Create and Import should not be visible for export-only users
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /dataset/i }),
|
||||
).not.toBeInTheDocument();
|
||||
// Note: Using testId - import button lacks accessible text content
|
||||
// TODO: Add aria-label or text to import button
|
||||
expect(screen.queryByTestId('import-button')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('action buttons respect user permissions', async () => {
|
||||
// Setup API with full admin permissions
|
||||
setupApiPermissions(['can_read', 'can_write', 'can_export', 'can_duplicate']);
|
||||
|
||||
const dataset = mockDatasets[0];
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [dataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(dataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Admin should see action buttons in the row
|
||||
const row = screen.getByText(dataset.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Verify specific action buttons are present
|
||||
const deleteButton = within(row!).queryByTestId('delete');
|
||||
const exportButton = within(row!).queryByTestId('upload');
|
||||
|
||||
expect(deleteButton).toBeInTheDocument();
|
||||
expect(exportButton).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('read-only user sees no delete or duplicate buttons in row', async () => {
|
||||
// Setup API with read-only permissions
|
||||
setupApiPermissions(['can_read']);
|
||||
|
||||
const dataset = mockDatasets[0];
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [dataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockReadOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(dataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the dataset row
|
||||
const row = screen.getByText(dataset.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Verify no delete button in the row
|
||||
const deleteButton = within(row!).queryByTestId('delete');
|
||||
expect(deleteButton).not.toBeInTheDocument();
|
||||
|
||||
// Verify no duplicate button (Actions column should not exist)
|
||||
const duplicateButton = within(row!).queryByTestId('copy');
|
||||
expect(duplicateButton).not.toBeInTheDocument();
|
||||
|
||||
// Verify no edit button
|
||||
const editButton = within(row!).queryByTestId('edit');
|
||||
expect(editButton).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('write user sees edit, delete, and export actions', async () => {
|
||||
// Setup API with write permissions (includes delete)
|
||||
// Note: can_write grants both edit and delete permissions in DatasetList
|
||||
setupApiPermissions(['can_read', 'can_write', 'can_export']);
|
||||
|
||||
const dataset = {
|
||||
...mockDatasets[0],
|
||||
owners: [{ id: mockWriteUser.userId, username: 'writeuser' }],
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [dataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockWriteUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(dataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const row = screen.getByText(dataset.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Should have delete button (can_write includes delete)
|
||||
const deleteButton = within(row!).getByTestId('delete');
|
||||
expect(deleteButton).toBeInTheDocument();
|
||||
|
||||
// Should have export button
|
||||
const exportButton = within(row!).getByTestId('upload');
|
||||
expect(exportButton).toBeInTheDocument();
|
||||
|
||||
// Should have edit button (user is owner)
|
||||
const editButton = within(row!).getByTestId('edit');
|
||||
expect(editButton).toBeInTheDocument();
|
||||
|
||||
// Should NOT have duplicate button (no can_duplicate permission)
|
||||
const duplicateButton = within(row!).queryByTestId('copy');
|
||||
expect(duplicateButton).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('export-only user has no Actions column (no write/duplicate permissions)', async () => {
|
||||
// Setup API with export-only permissions
|
||||
// Note: Export action alone doesn't render Actions column - it's in toolbar/bulk select
|
||||
setupApiPermissions(['can_read', 'can_export']);
|
||||
|
||||
const dataset = mockDatasets[0];
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [dataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockExportOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(dataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
const row = screen.getByText(dataset.table_name).closest('tr');
|
||||
expect(row).toBeInTheDocument();
|
||||
|
||||
// Actions column is hidden when user only has export permission
|
||||
// (export is available via bulk select toolbar, not row actions)
|
||||
const deleteButton = within(row!).queryByTestId('delete');
|
||||
expect(deleteButton).not.toBeInTheDocument();
|
||||
|
||||
const editButton = within(row!).queryByTestId('edit');
|
||||
expect(editButton).not.toBeInTheDocument();
|
||||
|
||||
const duplicateButton = within(row!).queryByTestId('copy');
|
||||
expect(duplicateButton).not.toBeInTheDocument();
|
||||
|
||||
const exportButton = within(row!).queryByTestId('upload');
|
||||
expect(exportButton).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('user with can_duplicate sees duplicate button only for virtual datasets', async () => {
|
||||
// Setup API with duplicate permission
|
||||
setupApiPermissions(['can_read', 'can_duplicate']);
|
||||
|
||||
const physicalDataset = mockDatasets[0]; // kind: 'physical'
|
||||
const virtualDataset = mockDatasets[1]; // kind: 'virtual'
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [physicalDataset, virtualDataset], count: 2 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(physicalDataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Check physical dataset row
|
||||
const physicalRow = screen
|
||||
.getByText(physicalDataset.table_name)
|
||||
.closest('tr');
|
||||
expect(physicalRow).toBeInTheDocument();
|
||||
|
||||
// Physical dataset should NOT have duplicate button
|
||||
const physicalDuplicateButton = within(physicalRow!).queryByTestId('copy');
|
||||
expect(physicalDuplicateButton).not.toBeInTheDocument();
|
||||
|
||||
// Check virtual dataset row
|
||||
const virtualRow = screen.getByText(virtualDataset.table_name).closest('tr');
|
||||
expect(virtualRow).toBeInTheDocument();
|
||||
|
||||
// Virtual dataset SHOULD have duplicate button
|
||||
const virtualDuplicateButton = within(virtualRow!).getByTestId('copy');
|
||||
expect(virtualDuplicateButton).toBeInTheDocument();
|
||||
});
|
||||
528
superset-frontend/src/pages/DatasetList/DatasetList.test.tsx
Normal file
528
superset-frontend/src/pages/DatasetList/DatasetList.test.tsx
Normal file
@@ -0,0 +1,528 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { cleanup, screen, waitFor, within } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import rison from 'rison';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import {
|
||||
setupMocks,
|
||||
renderDatasetList,
|
||||
waitForDatasetsPageReady,
|
||||
mockAdminUser,
|
||||
mockReadOnlyUser,
|
||||
mockExportOnlyUser,
|
||||
mockDatasets,
|
||||
mockApiError403,
|
||||
API_ENDPOINTS,
|
||||
RisonFilter,
|
||||
} from './DatasetList.testHelpers';
|
||||
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
|
||||
beforeEach(() => {
|
||||
setupMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
fetchMock.reset();
|
||||
});
|
||||
|
||||
test('renders page with "Datasets" title', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitForDatasetsPageReady();
|
||||
});
|
||||
|
||||
test('shows loading state during initial data fetch', () => {
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
new Promise(() => {}), // Never resolves to keep loading state
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
expect(screen.getByRole('status')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('maintains component structure during loading', () => {
|
||||
fetchMock.get(API_ENDPOINTS.DATASETS, new Promise(() => {}), {
|
||||
overwriteRoutes: true,
|
||||
});
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
expect(screen.getByText('Datasets')).toBeInTheDocument();
|
||||
expect(screen.getByRole('status')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('"New Dataset" button exists (when canCreate=true)', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /dataset/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('"New Dataset" button hidden (when canCreate=false)', async () => {
|
||||
renderDatasetList(mockReadOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /dataset/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('"Import" button exists (when canCreate=true)', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Note: Using testId - import button lacks accessible text content
|
||||
// TODO: Add aria-label or text to import button
|
||||
expect(await screen.findByTestId('import-button')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('"Import" button opens import modal', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Note: Using testId - import button lacks accessible text content
|
||||
// TODO: Add aria-label or text to import button
|
||||
const importButton = await screen.findByTestId('import-button');
|
||||
expect(importButton).toBeInTheDocument();
|
||||
|
||||
await userEvent.click(importButton);
|
||||
|
||||
// Modal should appear with title - using semantic query here
|
||||
expect(await screen.findByRole('dialog')).toBeInTheDocument();
|
||||
expect(screen.getByText('Import dataset')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('"Bulk select" button exists (when canDelete || canExport)', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /bulk select/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('"Bulk select" button exists for export-only users', async () => {
|
||||
renderDatasetList(mockExportOnlyUser);
|
||||
|
||||
expect(
|
||||
await screen.findByRole('button', { name: /bulk select/i }),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('"Bulk select" button hidden for read-only users', async () => {
|
||||
renderDatasetList(mockReadOnlyUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByRole('button', { name: /bulk select/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('renders Name search filter', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Note: Using testId - search input lacks accessible label
|
||||
// TODO: Add aria-label to search input
|
||||
expect(
|
||||
await screen.findByTestId('search-filter-container'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('renders Type filter (Virtual/Physical dropdown)', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Filter dropdowns should be present
|
||||
const filters = await screen.findAllByRole('combobox');
|
||||
expect(filters.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('handles datasets with missing fields and renders gracefully', async () => {
|
||||
const datasetWithMissingFields = {
|
||||
id: 999,
|
||||
table_name: 'Incomplete Dataset',
|
||||
kind: 'physical',
|
||||
schema: null,
|
||||
database: {
|
||||
id: '1',
|
||||
database_name: 'PostgreSQL',
|
||||
},
|
||||
owners: [],
|
||||
changed_by_name: 'Unknown',
|
||||
changed_by: null,
|
||||
changed_on_delta_humanized: 'Unknown',
|
||||
explore_url: '/explore/?datasource=999__table',
|
||||
extra: JSON.stringify({}),
|
||||
sql: null,
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetWithMissingFields], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Incomplete Dataset')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify empty owners renders without crashing (no FacePile)
|
||||
const table = screen.getByRole('table');
|
||||
expect(table).toBeInTheDocument();
|
||||
|
||||
// Verify the row exists even with missing data
|
||||
const datasetRow = screen.getByText('Incomplete Dataset').closest('tr');
|
||||
expect(datasetRow).toBeInTheDocument();
|
||||
|
||||
// Verify no certification badge or warning icon (extra is empty)
|
||||
expect(
|
||||
screen.queryByRole('img', { name: /certified/i }),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('handles empty results (shows empty state)', async () => {
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [], count: 0 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Datasets heading should still be present
|
||||
expect(await screen.findByText('Datasets')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('makes correct initial API call on load', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
expect(calls.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('API call includes correct page size', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
expect(calls.length).toBeGreaterThan(0);
|
||||
const url = calls[0][0] as string;
|
||||
expect(url).toContain('page_size');
|
||||
});
|
||||
});
|
||||
|
||||
test('typing in name filter updates input value and triggers API with decoded search filter', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
const searchContainer = await screen.findByTestId('search-filter-container');
|
||||
const searchInput = within(searchContainer).getByRole('textbox');
|
||||
|
||||
// Record initial API calls
|
||||
const initialCallCount = fetchMock.calls(API_ENDPOINTS.DATASETS).length;
|
||||
|
||||
// Type in search box and press Enter to trigger search
|
||||
await userEvent.type(searchInput, 'sales{enter}');
|
||||
|
||||
// Verify input value updated
|
||||
await waitFor(() => {
|
||||
expect(searchInput).toHaveValue('sales');
|
||||
});
|
||||
|
||||
// Wait for API call after Enter key press
|
||||
await waitFor(
|
||||
() => {
|
||||
const calls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
expect(calls.length).toBeGreaterThan(initialCallCount);
|
||||
|
||||
// Get latest API call
|
||||
const url = calls[calls.length - 1][0] as string;
|
||||
|
||||
// Verify URL contains search filter
|
||||
expect(url).toContain('filters');
|
||||
|
||||
// Extract and decode rison query param
|
||||
const queryString = url.split('?q=')[1];
|
||||
expect(queryString).toBeTruthy();
|
||||
|
||||
// Decode the rison payload
|
||||
const decoded = rison.decode(decodeURIComponent(queryString)) as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
|
||||
// Verify filter structure contains table_name search
|
||||
expect(decoded.filters).toBeDefined();
|
||||
expect(Array.isArray(decoded.filters)).toBe(true);
|
||||
|
||||
// Check for sales filter in the filters array
|
||||
const filters = decoded.filters as RisonFilter[];
|
||||
const hasSalesFilter = filters.some(
|
||||
(filter: RisonFilter) =>
|
||||
filter.col === 'table_name' &&
|
||||
filter.opr === 'ct' &&
|
||||
typeof filter.value === 'string' &&
|
||||
filter.value.toLowerCase().includes('sales'),
|
||||
);
|
||||
expect(hasSalesFilter).toBe(true);
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
);
|
||||
});
|
||||
|
||||
test('toggling bulk select mode shows checkboxes', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
const bulkSelectButton = await screen.findByRole('button', {
|
||||
name: /bulk select/i,
|
||||
});
|
||||
|
||||
await userEvent.click(bulkSelectButton);
|
||||
|
||||
await waitFor(() => {
|
||||
// When bulk select is active, checkboxes should appear
|
||||
const checkboxes = screen.queryAllByRole('checkbox');
|
||||
expect(checkboxes.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
|
||||
test('handles 500 error on initial load without crashing', async () => {
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ throws: new Error('Internal Server Error') },
|
||||
{
|
||||
overwriteRoutes: true,
|
||||
},
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser, {
|
||||
addDangerToast: jest.fn(),
|
||||
addSuccessToast: jest.fn(),
|
||||
});
|
||||
|
||||
// Component should still render without crashing
|
||||
await waitForDatasetsPageReady();
|
||||
});
|
||||
|
||||
test('handles 403 error on _info endpoint and disables create actions', async () => {
|
||||
const addDangerToast = jest.fn();
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASETS_INFO, mockApiError403, {
|
||||
overwriteRoutes: true,
|
||||
});
|
||||
|
||||
renderDatasetList(mockAdminUser, {
|
||||
addDangerToast,
|
||||
addSuccessToast: jest.fn(),
|
||||
});
|
||||
|
||||
await waitForDatasetsPageReady();
|
||||
|
||||
// Verify bulk actions are disabled/hidden when permissions fail
|
||||
await waitFor(() => {
|
||||
const bulkSelectButton = screen.queryByRole('button', {
|
||||
name: /bulk select/i,
|
||||
});
|
||||
// Bulk select should not appear without proper permissions
|
||||
expect(bulkSelectButton).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test('handles network timeout without crashing', async () => {
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ throws: new Error('Network timeout') },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser, {
|
||||
addDangerToast: jest.fn(),
|
||||
addSuccessToast: jest.fn(),
|
||||
});
|
||||
|
||||
// Component should not crash
|
||||
await waitForDatasetsPageReady();
|
||||
});
|
||||
|
||||
test('component requires explicit mocks for all API endpoints', async () => {
|
||||
// Use standard mocks
|
||||
setupMocks();
|
||||
|
||||
// Clear call history to start fresh
|
||||
fetchMock.resetHistory();
|
||||
|
||||
// Render component with standard setup
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
// Wait for initial data load
|
||||
await waitForDatasetsPageReady();
|
||||
|
||||
// Verify that critical endpoints were called and had mocks available
|
||||
const newDatasetsCalls = fetchMock.calls(API_ENDPOINTS.DATASETS);
|
||||
const newInfoCalls = fetchMock.calls(API_ENDPOINTS.DATASETS_INFO);
|
||||
|
||||
// These should have been called during render
|
||||
expect(newDatasetsCalls.length).toBeGreaterThan(0);
|
||||
expect(newInfoCalls.length).toBeGreaterThan(0);
|
||||
|
||||
// Verify no unmatched calls (all endpoints were mocked)
|
||||
const unmatchedCalls = fetchMock.calls(false); // false = unmatched only
|
||||
expect(unmatchedCalls.length).toBe(0);
|
||||
});
|
||||
|
||||
test('selecting Database filter triggers API call with database relation filter', async () => {
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitForDatasetsPageReady();
|
||||
|
||||
const filtersContainers = screen.getAllByRole('combobox');
|
||||
expect(filtersContainers.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('renders datasets with certification data', async () => {
|
||||
const certifiedDataset = {
|
||||
...mockDatasets[1], // mockDatasets[1] has certification
|
||||
extra: JSON.stringify({
|
||||
certification: {
|
||||
certified_by: 'Data Team',
|
||||
details: 'Approved for production',
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [certifiedDataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(certifiedDataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify the dataset row renders successfully
|
||||
const datasetRow = screen
|
||||
.getByText(certifiedDataset.table_name)
|
||||
.closest('tr');
|
||||
expect(datasetRow).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('displays datasets with warning_markdown', async () => {
|
||||
const warningText = 'This dataset contains PII. Handle with care.';
|
||||
const datasetWithWarning = {
|
||||
...mockDatasets[2], // mockDatasets[2] has warning
|
||||
extra: JSON.stringify({
|
||||
warning_markdown: warningText,
|
||||
}),
|
||||
};
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetWithWarning], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(datasetWithWarning.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify the dataset row exists
|
||||
const datasetRow = screen
|
||||
.getByText(datasetWithWarning.table_name)
|
||||
.closest('tr');
|
||||
expect(datasetRow).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('displays dataset with multiple owners', async () => {
|
||||
const datasetWithOwners = mockDatasets[1]; // Has 2 owners: Jane Smith, Bob Jones
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetWithOwners], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(datasetWithOwners.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify row exists with the dataset
|
||||
const datasetRow = screen
|
||||
.getByText(datasetWithOwners.table_name)
|
||||
.closest('tr');
|
||||
expect(datasetRow).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('displays ModifiedInfo with humanized date', async () => {
|
||||
const datasetWithModified = mockDatasets[0]; // changed_by_name: 'John Doe', changed_on: '1 day ago'
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [datasetWithModified], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText(datasetWithModified.table_name),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Verify humanized date appears (ModifiedInfo component renders it)
|
||||
expect(
|
||||
screen.getByText(datasetWithModified.changed_on_delta_humanized),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
test('dataset name links to Explore with correct explore_url', async () => {
|
||||
const dataset = mockDatasets[0]; // explore_url: '/explore/?datasource=1__table'
|
||||
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS,
|
||||
{ result: [dataset], count: 1 },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
renderDatasetList(mockAdminUser);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(dataset.table_name)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find the dataset name link (should be a link role)
|
||||
const exploreLink = screen.getByRole('link', { name: dataset.table_name });
|
||||
expect(exploreLink).toBeInTheDocument();
|
||||
expect(exploreLink).toHaveAttribute('href', dataset.explore_url);
|
||||
});
|
||||
@@ -0,0 +1,539 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import fetchMock from 'fetch-mock';
|
||||
import { render, screen } from 'spec/helpers/testing-library';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { configureStore } from '@reduxjs/toolkit';
|
||||
import { QueryParamProvider } from 'use-query-params';
|
||||
import DatasetList from 'src/pages/DatasetList';
|
||||
import handleResourceExport from 'src/utils/export';
|
||||
|
||||
export const mockHandleResourceExport =
|
||||
handleResourceExport as jest.MockedFunction<typeof handleResourceExport>;
|
||||
|
||||
// Type definitions for test helpers
|
||||
export interface UserState {
|
||||
userId: string | number;
|
||||
firstName: string;
|
||||
lastName: string;
|
||||
[key: string]: unknown; // Allow additional properties like roles
|
||||
}
|
||||
|
||||
export interface RisonFilter {
|
||||
col: string;
|
||||
opr: string;
|
||||
value: string | number | boolean;
|
||||
}
|
||||
|
||||
// Test-only dataset type that matches the VirtualDataset interface from index.tsx
|
||||
// Includes extra/sql fields that exist in actual API responses
|
||||
export interface DatasetFixture {
|
||||
id: number;
|
||||
table_name: string;
|
||||
kind: string;
|
||||
schema: string;
|
||||
database: {
|
||||
id: string;
|
||||
database_name: string;
|
||||
};
|
||||
owners: Array<{ first_name: string; last_name: string; id: number }>;
|
||||
changed_by_name: string;
|
||||
changed_by: {
|
||||
first_name: string;
|
||||
last_name: string;
|
||||
id: number;
|
||||
};
|
||||
changed_on_delta_humanized: string;
|
||||
explore_url: string;
|
||||
extra: string; // JSON-serialized metadata (always present in API)
|
||||
sql: string | null; // SQL query for virtual datasets
|
||||
description?: string; // Optional description field
|
||||
}
|
||||
|
||||
interface StoreState {
|
||||
user?: UserState;
|
||||
common?: {
|
||||
conf?: {
|
||||
SUPERSET_WEBSERVER_TIMEOUT?: number;
|
||||
PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET?: boolean;
|
||||
};
|
||||
};
|
||||
datasets?: {
|
||||
datasetList?: typeof mockDatasets;
|
||||
};
|
||||
}
|
||||
|
||||
interface DatasetListPropsOverrides {
|
||||
addDangerToast?: (msg: string) => void;
|
||||
addSuccessToast?: (msg: string) => void;
|
||||
user?: UserState;
|
||||
}
|
||||
|
||||
export const mockDatasets: DatasetFixture[] = [
|
||||
{
|
||||
id: 1,
|
||||
table_name: 'public.sales_data',
|
||||
kind: 'physical',
|
||||
schema: 'public',
|
||||
database: {
|
||||
id: '1',
|
||||
database_name: 'PostgreSQL',
|
||||
},
|
||||
owners: [{ first_name: 'John', last_name: 'Doe', id: 1 }],
|
||||
changed_by_name: 'John Doe',
|
||||
changed_by: {
|
||||
first_name: 'John',
|
||||
last_name: 'Doe',
|
||||
id: 1,
|
||||
},
|
||||
changed_on_delta_humanized: '1 day ago',
|
||||
explore_url: '/explore/?datasource=1__table',
|
||||
extra: JSON.stringify({}),
|
||||
sql: null,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
table_name: 'Analytics Query',
|
||||
kind: 'virtual',
|
||||
schema: 'analytics',
|
||||
database: {
|
||||
id: '2',
|
||||
database_name: 'MySQL',
|
||||
},
|
||||
owners: [
|
||||
{ first_name: 'Jane', last_name: 'Smith', id: 2 },
|
||||
{ first_name: 'Bob', last_name: 'Jones', id: 3 },
|
||||
],
|
||||
changed_by_name: 'Jane Smith',
|
||||
changed_by: {
|
||||
first_name: 'Jane',
|
||||
last_name: 'Smith',
|
||||
id: 2,
|
||||
},
|
||||
changed_on_delta_humanized: '2 hours ago',
|
||||
explore_url: '/explore/?datasource=2__table',
|
||||
extra: JSON.stringify({
|
||||
certification: {
|
||||
certified_by: 'Data Team',
|
||||
details: 'Approved for production use',
|
||||
},
|
||||
}),
|
||||
sql: 'SELECT * FROM analytics_table WHERE date >= current_date - 30',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
table_name: 'Customer Metrics',
|
||||
kind: 'virtual',
|
||||
schema: 'metrics',
|
||||
database: {
|
||||
id: '1',
|
||||
database_name: 'PostgreSQL',
|
||||
},
|
||||
owners: [],
|
||||
changed_by_name: 'System',
|
||||
changed_by: {
|
||||
first_name: 'System',
|
||||
last_name: 'User',
|
||||
id: 999,
|
||||
},
|
||||
changed_on_delta_humanized: '5 days ago',
|
||||
explore_url: '/explore/?datasource=3__table',
|
||||
extra: JSON.stringify({
|
||||
warning_markdown: 'This dataset contains PII. Handle with care.',
|
||||
}),
|
||||
sql: 'SELECT customer_id, COUNT(*) FROM orders GROUP BY customer_id',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
table_name: 'public.product_catalog',
|
||||
kind: 'physical',
|
||||
schema: 'public',
|
||||
database: {
|
||||
id: '3',
|
||||
database_name: 'Redshift',
|
||||
},
|
||||
owners: [{ first_name: 'Alice', last_name: 'Johnson', id: 4 }],
|
||||
changed_by_name: 'Alice Johnson',
|
||||
changed_by: {
|
||||
first_name: 'Alice',
|
||||
last_name: 'Johnson',
|
||||
id: 4,
|
||||
},
|
||||
changed_on_delta_humanized: '3 weeks ago',
|
||||
explore_url: '/explore/?datasource=4__table',
|
||||
extra: JSON.stringify({
|
||||
certification: {
|
||||
certified_by: 'QA Team',
|
||||
details: 'Verified data quality',
|
||||
},
|
||||
warning_markdown: 'Data refreshed weekly on Sundays',
|
||||
}),
|
||||
sql: null,
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
table_name: 'Quarterly Report',
|
||||
kind: 'virtual',
|
||||
schema: 'reports',
|
||||
database: {
|
||||
id: '2',
|
||||
database_name: 'MySQL',
|
||||
},
|
||||
owners: [
|
||||
{ first_name: 'Charlie', last_name: 'Brown', id: 5 },
|
||||
{ first_name: 'David', last_name: 'Lee', id: 6 },
|
||||
{ first_name: 'Eve', last_name: 'Taylor', id: 7 },
|
||||
{ first_name: 'Frank', last_name: 'Wilson', id: 8 },
|
||||
],
|
||||
changed_by_name: 'Charlie Brown',
|
||||
changed_by: {
|
||||
first_name: 'Charlie',
|
||||
last_name: 'Brown',
|
||||
id: 5,
|
||||
},
|
||||
changed_on_delta_humanized: '1 month ago',
|
||||
explore_url: '/explore/?datasource=5__table',
|
||||
extra: JSON.stringify({}),
|
||||
sql: 'SELECT quarter, SUM(revenue) FROM sales GROUP BY quarter',
|
||||
},
|
||||
];
|
||||
|
||||
// Mock users with various permission levels
|
||||
export const mockAdminUser = {
|
||||
userId: 1,
|
||||
firstName: 'Admin',
|
||||
lastName: 'User',
|
||||
roles: {
|
||||
Admin: [
|
||||
['can_read', 'Dataset'],
|
||||
['can_write', 'Dataset'],
|
||||
['can_export', 'Dataset'],
|
||||
['can_duplicate', 'Dataset'],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const mockOwnerUser = {
|
||||
userId: 1,
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
roles: {
|
||||
Alpha: [
|
||||
['can_read', 'Dataset'],
|
||||
['can_write', 'Dataset'],
|
||||
['can_export', 'Dataset'],
|
||||
['can_duplicate', 'Dataset'],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const mockReadOnlyUser = {
|
||||
userId: 10,
|
||||
firstName: 'Read',
|
||||
lastName: 'Only',
|
||||
roles: {
|
||||
Gamma: [['can_read', 'Dataset']],
|
||||
},
|
||||
};
|
||||
|
||||
export const mockExportOnlyUser = {
|
||||
userId: 11,
|
||||
firstName: 'Export',
|
||||
lastName: 'User',
|
||||
roles: {
|
||||
Gamma: [
|
||||
['can_read', 'Dataset'],
|
||||
['can_export', 'Dataset'],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export const mockWriteUser = {
|
||||
userId: 9,
|
||||
firstName: 'Write',
|
||||
lastName: 'User',
|
||||
roles: {
|
||||
Alpha: [
|
||||
['can_read', 'Dataset'],
|
||||
['can_write', 'Dataset'],
|
||||
['can_export', 'Dataset'],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
// Mock related objects for delete modal
|
||||
export const mockRelatedCharts = {
|
||||
count: 3,
|
||||
result: [
|
||||
{ id: 101, slice_name: 'Sales Chart' },
|
||||
{ id: 102, slice_name: 'Revenue Chart' },
|
||||
{ id: 103, slice_name: 'Analytics Chart' },
|
||||
],
|
||||
};
|
||||
|
||||
export const mockRelatedDashboards = {
|
||||
count: 2,
|
||||
result: [
|
||||
{ id: 201, title: 'Executive Dashboard' },
|
||||
{ id: 202, title: 'Sales Dashboard' },
|
||||
],
|
||||
};
|
||||
|
||||
// Mock API error responses
|
||||
export const mockApiError500 = {
|
||||
status: 500,
|
||||
body: { message: 'Internal Server Error' },
|
||||
};
|
||||
|
||||
export const mockApiError403 = {
|
||||
status: 403,
|
||||
body: { message: 'Forbidden' },
|
||||
};
|
||||
|
||||
export const mockApiError404 = {
|
||||
status: 404,
|
||||
body: { message: 'Not Found' },
|
||||
};
|
||||
|
||||
// API endpoint constants
|
||||
export const API_ENDPOINTS = {
|
||||
DATASETS_INFO: 'glob:*/api/v1/dataset/_info*',
|
||||
DATASETS: 'glob:*/api/v1/dataset/?*',
|
||||
DATASET_GET: 'glob:*/api/v1/dataset/[0-9]*',
|
||||
DATASET_RELATED_OBJECTS: 'glob:*/api/v1/dataset/*/related_objects*',
|
||||
DATASET_DELETE: 'glob:*/api/v1/dataset/[0-9]*',
|
||||
DATASET_BULK_DELETE: 'glob:*/api/v1/dataset/?q=*', // Matches DELETE /api/v1/dataset/?q=...
|
||||
DATASET_DUPLICATE: 'glob:*/api/v1/dataset/duplicate*',
|
||||
DATASET_FAVORITE_STATUS: 'glob:*/api/v1/dataset/favorite_status*',
|
||||
DATASET_RELATED_DATABASE: 'glob:*/api/v1/dataset/related/database*',
|
||||
DATASET_RELATED_SCHEMA: 'glob:*/api/v1/dataset/distinct/schema*',
|
||||
DATASET_RELATED_OWNERS: 'glob:*/api/v1/dataset/related/owners*',
|
||||
DATASET_RELATED_CHANGED_BY: 'glob:*/api/v1/dataset/related/changed_by*',
|
||||
};
|
||||
|
||||
// Setup API permissions mock (for permission-based testing)
|
||||
export const setupApiPermissions = (permissions: string[]) => {
|
||||
fetchMock.get(
|
||||
API_ENDPOINTS.DATASETS_INFO,
|
||||
{ permissions },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
};
|
||||
|
||||
// Store utilities
|
||||
export const createMockStore = (initialState: Partial<StoreState> = {}) =>
|
||||
configureStore({
|
||||
reducer: {
|
||||
user: (state = initialState.user || {}) => state,
|
||||
common: (state = initialState.common || {}) => state,
|
||||
datasets: (state = initialState.datasets || {}) => state,
|
||||
},
|
||||
preloadedState: initialState,
|
||||
middleware: getDefaultMiddleware =>
|
||||
getDefaultMiddleware({
|
||||
serializableCheck: false,
|
||||
immutableCheck: false,
|
||||
}),
|
||||
});
|
||||
|
||||
export const createDefaultStoreState = (user: UserState): StoreState => ({
|
||||
user,
|
||||
common: {
|
||||
conf: {
|
||||
SUPERSET_WEBSERVER_TIMEOUT: 60000,
|
||||
PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET: false,
|
||||
},
|
||||
},
|
||||
datasets: {
|
||||
datasetList: mockDatasets,
|
||||
},
|
||||
});
|
||||
|
||||
export const renderDatasetList = (
|
||||
user: UserState,
|
||||
props: Partial<DatasetListPropsOverrides> = {},
|
||||
storeState: Partial<StoreState> = {},
|
||||
) => {
|
||||
const defaultStoreState = createDefaultStoreState(user);
|
||||
const storeStateWithUser = {
|
||||
...defaultStoreState,
|
||||
user,
|
||||
...storeState,
|
||||
};
|
||||
|
||||
const store = createMockStore(storeStateWithUser);
|
||||
|
||||
return render(
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<QueryParamProvider>
|
||||
<DatasetList user={user} {...props} />
|
||||
</QueryParamProvider>
|
||||
</MemoryRouter>
|
||||
</Provider>,
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper to wait for the DatasetList page to be ready
|
||||
* Waits for the "Datasets" heading to appear, indicating initial render is complete
|
||||
*/
|
||||
export const waitForDatasetsPageReady = async () => {
|
||||
await screen.findByText('Datasets');
|
||||
};
|
||||
|
||||
// Helper functions for specific operations
|
||||
export const setupDeleteMocks = (datasetId: number) => {
|
||||
fetchMock.get(
|
||||
`glob:*/api/v1/dataset/${datasetId}/related_objects*`,
|
||||
{
|
||||
charts: mockRelatedCharts,
|
||||
dashboards: mockRelatedDashboards,
|
||||
},
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
|
||||
fetchMock.delete(
|
||||
`glob:*/api/v1/dataset/${datasetId}`,
|
||||
{ message: 'Dataset deleted successfully' },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
};
|
||||
|
||||
export const setupDuplicateMocks = () => {
|
||||
fetchMock.post(
|
||||
API_ENDPOINTS.DATASET_DUPLICATE,
|
||||
{ id: 999, table_name: 'Copy of Dataset' },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
};
|
||||
|
||||
export const setupBulkDeleteMocks = () => {
|
||||
fetchMock.delete(
|
||||
API_ENDPOINTS.DATASET_BULK_DELETE,
|
||||
{ message: '3 datasets deleted successfully' },
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
};
|
||||
|
||||
// Setup error mocks for negative flow testing
|
||||
export const setupDeleteErrorMocks = (
|
||||
datasetId: number,
|
||||
statusCode: number,
|
||||
) => {
|
||||
fetchMock.get(
|
||||
`glob:*/api/v1/dataset/${datasetId}/related_objects*`,
|
||||
{
|
||||
status: statusCode,
|
||||
body: { message: 'Failed to fetch related objects' },
|
||||
},
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
};
|
||||
|
||||
export const setupDuplicateErrorMocks = (statusCode: number) => {
|
||||
fetchMock.post(
|
||||
API_ENDPOINTS.DATASET_DUPLICATE,
|
||||
{
|
||||
status: statusCode,
|
||||
body: { message: 'Failed to duplicate dataset' },
|
||||
},
|
||||
{ overwriteRoutes: true },
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to verify only expected API calls were made
|
||||
* Replaces global fail-fast fetchMock.catch() with test-specific assertions
|
||||
*
|
||||
* @param expectedEndpoints - Array of endpoint glob patterns that should have been called
|
||||
* @throws If any unmocked endpoints were called or expected endpoints weren't called
|
||||
*/
|
||||
export const assertOnlyExpectedCalls = (expectedEndpoints: string[]) => {
|
||||
const allCalls = fetchMock.calls(true); // Get all calls including unmatched
|
||||
const unmatchedCalls = allCalls.filter(call => call.isUnmatched);
|
||||
|
||||
if (unmatchedCalls.length > 0) {
|
||||
const unmatchedUrls = unmatchedCalls.map(call => call[0]);
|
||||
throw new Error(
|
||||
`Unmocked endpoints called: ${unmatchedUrls.join(', ')}. ` +
|
||||
'Add explicit mocks in setupMocks() or test setup.',
|
||||
);
|
||||
}
|
||||
|
||||
// Verify expected endpoints were called
|
||||
expectedEndpoints.forEach(endpoint => {
|
||||
const calls = fetchMock.calls(endpoint);
|
||||
if (calls.length === 0) {
|
||||
throw new Error(
|
||||
`Expected endpoint not called: ${endpoint}. ` +
|
||||
'Check if component logic changed or mock is incorrectly configured.',
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// MSW setup using fetch-mock (following ChartList pattern)
|
||||
export const setupMocks = () => {
|
||||
fetchMock.reset();
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASETS_INFO, {
|
||||
permissions: ['can_read', 'can_write', 'can_export', 'can_duplicate'],
|
||||
});
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASETS, {
|
||||
result: mockDatasets,
|
||||
count: mockDatasets.length,
|
||||
});
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASET_FAVORITE_STATUS, {
|
||||
result: [],
|
||||
});
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASET_RELATED_DATABASE, {
|
||||
result: [
|
||||
{ value: 1, text: 'PostgreSQL' },
|
||||
{ value: 2, text: 'MySQL' },
|
||||
{ value: 3, text: 'Redshift' },
|
||||
],
|
||||
count: 3,
|
||||
});
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASET_RELATED_SCHEMA, {
|
||||
result: [
|
||||
{ value: 'public', text: 'public' },
|
||||
{ value: 'analytics', text: 'analytics' },
|
||||
{ value: 'metrics', text: 'metrics' },
|
||||
{ value: 'reports', text: 'reports' },
|
||||
],
|
||||
count: 4,
|
||||
});
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASET_RELATED_OWNERS, {
|
||||
result: [],
|
||||
count: 0,
|
||||
});
|
||||
|
||||
fetchMock.get(API_ENDPOINTS.DATASET_RELATED_CHANGED_BY, {
|
||||
result: [],
|
||||
count: 0,
|
||||
});
|
||||
};
|
||||
@@ -261,9 +261,11 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
|
||||
})
|
||||
.catch(
|
||||
createErrorHandler(errMsg =>
|
||||
t(
|
||||
'An error occurred while fetching dataset related data: %s',
|
||||
errMsg,
|
||||
addDangerToast(
|
||||
t(
|
||||
'An error occurred while fetching dataset related data: %s',
|
||||
errMsg,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user