mirror of
https://github.com/apache/superset.git
synced 2026-05-14 12:25:19 +00:00
Compare commits
1 Commits
feat/add-d
...
fix-app-ro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d6d1f3588a |
12
UPDATING.md
12
UPDATING.md
@@ -233,6 +233,18 @@ See `superset/mcp_service/PRODUCTION.md` for deployment guides.
|
||||
---
|
||||
|
||||
- [35621](https://github.com/apache/superset/pull/35621): The default hash algorithm has changed from MD5 to SHA-256 for improved security and FedRAMP compliance. This affects cache keys for thumbnails, dashboard digests, chart digests, and filter option names. Existing cached data will be invalidated upon upgrade. To opt out of this change and maintain backward compatibility, set `HASH_ALGORITHM = "md5"` in your `superset_config.py`.
|
||||
- [31590](https://github.com/apache/superset/pull/31590): The `APP_ICON` configuration variable is now deprecated and ignored by the frontend. Custom logos should now be configured using the theme system with `brandLogoUrl`. To migrate, replace:
|
||||
```
|
||||
APP_ICON = "/static/assets/images/custom_logo.png"
|
||||
```
|
||||
with:
|
||||
```
|
||||
THEME_DEFAULT = {
|
||||
"token": {
|
||||
"brandLogoUrl": "/static/assets/images/custom_logo.png"
|
||||
}
|
||||
}
|
||||
```
|
||||
- [35062](https://github.com/apache/superset/pull/35062): Changed the function signature of `setupExtensions` to `setupCodeOverrides` with options as arguments.
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page } from '@playwright/test';
|
||||
import { Modal } from '../core';
|
||||
|
||||
/**
|
||||
* Chart properties edit modal.
|
||||
* Opened by clicking the edit icon on a chart row in the chart list.
|
||||
* General section is expanded by default (defaultActiveKey="general").
|
||||
*/
|
||||
export class ChartPropertiesModal extends Modal {
|
||||
private static readonly SELECTORS = {
|
||||
NAME_INPUT: '[data-test="properties-modal-name-input"]',
|
||||
};
|
||||
|
||||
constructor(page: Page) {
|
||||
super(page, '[data-test="properties-edit-modal"]');
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills the chart name input field
|
||||
* @param name - The new chart name
|
||||
*/
|
||||
async fillName(name: string): Promise<void> {
|
||||
const input = this.body.locator(ChartPropertiesModal.SELECTORS.NAME_INPUT);
|
||||
await input.fill(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the Save button in the modal footer
|
||||
*/
|
||||
async clickSave(): Promise<void> {
|
||||
await this.clickFooterButton('Save');
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,6 @@
|
||||
|
||||
import type { Response, APIResponse } from '@playwright/test';
|
||||
import { expect } from '@playwright/test';
|
||||
import * as unzipper from 'unzipper';
|
||||
|
||||
/**
|
||||
* Common interface for response types with status() method.
|
||||
@@ -60,60 +59,3 @@ export function expectStatusOneOf<T extends ResponseLike>(
|
||||
).toContain(response.status());
|
||||
return response;
|
||||
}
|
||||
|
||||
interface ExportZipOptions {
|
||||
/** Directory name containing resource yaml files (e.g. 'charts', 'datasets') */
|
||||
resourceDir: string;
|
||||
/** Minimum number of resource yaml files expected (default: 1) */
|
||||
minCount?: number;
|
||||
/** Regex to validate Content-Disposition header (skipped if omitted) */
|
||||
contentDispositionPattern?: RegExp;
|
||||
/** Resource names that must each appear in at least one YAML filepath */
|
||||
expectedNames?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate an export zip response: content-type, zip structure, and resource yaml files.
|
||||
* Shared across chart and dataset export tests.
|
||||
*/
|
||||
export async function expectValidExportZip(
|
||||
response: ResponseLike,
|
||||
options: ExportZipOptions,
|
||||
): Promise<void> {
|
||||
const {
|
||||
resourceDir,
|
||||
minCount = 1,
|
||||
contentDispositionPattern,
|
||||
expectedNames,
|
||||
} = options;
|
||||
|
||||
expect(response.headers()['content-type']).toContain('application/zip');
|
||||
|
||||
if (contentDispositionPattern) {
|
||||
expect(response.headers()['content-disposition']).toMatch(
|
||||
contentDispositionPattern,
|
||||
);
|
||||
}
|
||||
|
||||
const body = await response.body();
|
||||
expect(body.length).toBeGreaterThan(0);
|
||||
|
||||
const entries: string[] = [];
|
||||
const directory = await unzipper.Open.buffer(body);
|
||||
directory.files.forEach(file => entries.push(file.path));
|
||||
|
||||
const resourceYamlFiles = entries.filter(
|
||||
entry => entry.includes(`${resourceDir}/`) && entry.endsWith('.yaml'),
|
||||
);
|
||||
expect(resourceYamlFiles.length).toBeGreaterThanOrEqual(minCount);
|
||||
expect(entries.some(entry => entry.endsWith('metadata.yaml'))).toBe(true);
|
||||
|
||||
if (expectedNames) {
|
||||
for (const name of expectedNames) {
|
||||
expect(
|
||||
resourceYamlFiles.some(f => f.includes(name)),
|
||||
`Expected export zip to contain a YAML file matching "${name}"`,
|
||||
).toBe(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, APIResponse } from '@playwright/test';
|
||||
import {
|
||||
apiGet,
|
||||
apiPost,
|
||||
apiDelete,
|
||||
apiPut,
|
||||
ApiRequestOptions,
|
||||
} from './requests';
|
||||
|
||||
export const ENDPOINTS = {
|
||||
CHART: 'api/v1/chart/',
|
||||
CHART_EXPORT: 'api/v1/chart/export/',
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* TypeScript interface for chart creation API payload.
|
||||
* Only slice_name, datasource_id, datasource_type are required (ChartPostSchema).
|
||||
*/
|
||||
export interface ChartCreatePayload {
|
||||
slice_name: string;
|
||||
datasource_id: number;
|
||||
datasource_type: string;
|
||||
viz_type?: string;
|
||||
params?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* POST request to create a chart
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param requestBody - Chart configuration object
|
||||
* @returns API response from chart creation
|
||||
*/
|
||||
export async function apiPostChart(
|
||||
page: Page,
|
||||
requestBody: ChartCreatePayload,
|
||||
): Promise<APIResponse> {
|
||||
return apiPost(page, ENDPOINTS.CHART, requestBody);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET request to fetch a chart's details
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param chartId - ID of the chart to fetch
|
||||
* @param options - Optional request options
|
||||
* @returns API response with chart details
|
||||
*/
|
||||
export async function apiGetChart(
|
||||
page: Page,
|
||||
chartId: number,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return apiGet(page, `${ENDPOINTS.CHART}${chartId}`, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE request to remove a chart
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param chartId - ID of the chart to delete
|
||||
* @param options - Optional request options
|
||||
* @returns API response from chart deletion
|
||||
*/
|
||||
export async function apiDeleteChart(
|
||||
page: Page,
|
||||
chartId: number,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return apiDelete(page, `${ENDPOINTS.CHART}${chartId}`, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT request to update a chart
|
||||
* @param page - Playwright page instance (provides authentication context)
|
||||
* @param chartId - ID of the chart to update
|
||||
* @param data - Partial chart payload (Marshmallow allows optional fields)
|
||||
* @param options - Optional request options
|
||||
* @returns API response from chart update
|
||||
*/
|
||||
export async function apiPutChart(
|
||||
page: Page,
|
||||
chartId: number,
|
||||
data: Record<string, unknown>,
|
||||
options?: ApiRequestOptions,
|
||||
): Promise<APIResponse> {
|
||||
return apiPut(page, `${ENDPOINTS.CHART}${chartId}`, data, options);
|
||||
}
|
||||
@@ -18,7 +18,6 @@
|
||||
*/
|
||||
|
||||
import { test as base } from '@playwright/test';
|
||||
import { apiDeleteChart } from '../api/chart';
|
||||
import { apiDeleteDataset } from '../api/dataset';
|
||||
import { apiDeleteDatabase } from '../api/database';
|
||||
|
||||
@@ -27,78 +26,40 @@ import { apiDeleteDatabase } from '../api/database';
|
||||
* Inspired by Cypress's cleanDashboards/cleanCharts pattern.
|
||||
*/
|
||||
export interface TestAssets {
|
||||
trackChart(id: number): void;
|
||||
trackDataset(id: number): void;
|
||||
trackDatabase(id: number): void;
|
||||
}
|
||||
|
||||
const EXPECTED_CLEANUP_STATUSES = new Set([200, 202, 204, 404]);
|
||||
|
||||
export const test = base.extend<{ testAssets: TestAssets }>({
|
||||
testAssets: async ({ page }, use) => {
|
||||
// Use Set to de-dupe IDs (same resource may be tracked multiple times)
|
||||
const chartIds = new Set<number>();
|
||||
const datasetIds = new Set<number>();
|
||||
const databaseIds = new Set<number>();
|
||||
|
||||
await use({
|
||||
trackChart: id => chartIds.add(id),
|
||||
trackDataset: id => datasetIds.add(id),
|
||||
trackDatabase: id => databaseIds.add(id),
|
||||
});
|
||||
|
||||
// Cleanup order: charts → datasets → databases (respects FK dependencies)
|
||||
// Use failOnStatusCode: false to avoid throwing on 404 (resource already deleted by test)
|
||||
// Warn on unexpected status codes (401/403/500) that may indicate leaked state
|
||||
await Promise.all(
|
||||
[...chartIds].map(id =>
|
||||
apiDeleteChart(page, id, { failOnStatusCode: false })
|
||||
.then(response => {
|
||||
if (!EXPECTED_CLEANUP_STATUSES.has(response.status())) {
|
||||
console.warn(
|
||||
`[testAssets] Unexpected status ${response.status()} cleaning up chart ${id}`,
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.warn(`[testAssets] Failed to cleanup chart ${id}:`, error);
|
||||
}),
|
||||
),
|
||||
);
|
||||
// Cleanup: Delete datasets FIRST (they reference databases)
|
||||
// Then delete databases. Use failOnStatusCode: false for tolerance.
|
||||
await Promise.all(
|
||||
[...datasetIds].map(id =>
|
||||
apiDeleteDataset(page, id, { failOnStatusCode: false })
|
||||
.then(response => {
|
||||
if (!EXPECTED_CLEANUP_STATUSES.has(response.status())) {
|
||||
console.warn(
|
||||
`[testAssets] Unexpected status ${response.status()} cleaning up dataset ${id}`,
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
console.warn(
|
||||
`[testAssets] Failed to cleanup dataset ${id}:`,
|
||||
error,
|
||||
);
|
||||
}),
|
||||
apiDeleteDataset(page, id, { failOnStatusCode: false }).catch(error => {
|
||||
console.warn(`[testAssets] Failed to cleanup dataset ${id}:`, error);
|
||||
}),
|
||||
),
|
||||
);
|
||||
await Promise.all(
|
||||
[...databaseIds].map(id =>
|
||||
apiDeleteDatabase(page, id, { failOnStatusCode: false })
|
||||
.then(response => {
|
||||
if (!EXPECTED_CLEANUP_STATUSES.has(response.status())) {
|
||||
console.warn(
|
||||
`[testAssets] Unexpected status ${response.status()} cleaning up database ${id}`,
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
apiDeleteDatabase(page, id, { failOnStatusCode: false }).catch(
|
||||
error => {
|
||||
console.warn(
|
||||
`[testAssets] Failed to cleanup database ${id}:`,
|
||||
error,
|
||||
);
|
||||
}),
|
||||
},
|
||||
),
|
||||
),
|
||||
);
|
||||
},
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { Page, Locator } from '@playwright/test';
|
||||
import { Table } from '../components/core';
|
||||
import { BulkSelect } from '../components/ListView';
|
||||
import { URL } from '../utils/urls';
|
||||
|
||||
/**
|
||||
* Chart List Page object.
|
||||
*/
|
||||
export class ChartListPage {
|
||||
private readonly page: Page;
|
||||
private readonly table: Table;
|
||||
readonly bulkSelect: BulkSelect;
|
||||
|
||||
/**
|
||||
* Action button names for getByRole('button', { name })
|
||||
* Verified: ChartList uses Icons.DeleteOutlined, Icons.UploadOutlined, Icons.EditOutlined
|
||||
*/
|
||||
private static readonly ACTION_BUTTONS = {
|
||||
DELETE: 'delete',
|
||||
EDIT: 'edit',
|
||||
EXPORT: 'upload',
|
||||
} as const;
|
||||
|
||||
constructor(page: Page) {
|
||||
this.page = page;
|
||||
this.table = new Table(page);
|
||||
this.bulkSelect = new BulkSelect(page, this.table);
|
||||
}
|
||||
|
||||
/**
|
||||
* Navigate to the chart list page.
|
||||
* Forces table view via URL parameter to avoid card view default
|
||||
* (ListviewsDefaultCardView feature flag may enable card view).
|
||||
*/
|
||||
async goto(): Promise<void> {
|
||||
await this.page.goto(`${URL.CHART_LIST}?viewMode=table`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the table to load
|
||||
* @param options - Optional wait options
|
||||
*/
|
||||
async waitForTableLoad(options?: { timeout?: number }): Promise<void> {
|
||||
await this.table.waitForVisible(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a chart row locator by name.
|
||||
* Returns a Locator that tests can use with expect().toBeVisible(), etc.
|
||||
*
|
||||
* @param chartName - The name of the chart
|
||||
* @returns Locator for the chart row
|
||||
*/
|
||||
getChartRow(chartName: string): Locator {
|
||||
return this.table.getRow(chartName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the delete action button for a chart
|
||||
* @param chartName - The name of the chart to delete
|
||||
*/
|
||||
async clickDeleteAction(chartName: string): Promise<void> {
|
||||
const row = this.table.getRow(chartName);
|
||||
await row
|
||||
.getByRole('button', { name: ChartListPage.ACTION_BUTTONS.DELETE })
|
||||
.click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the edit action button for a chart
|
||||
* @param chartName - The name of the chart to edit
|
||||
*/
|
||||
async clickEditAction(chartName: string): Promise<void> {
|
||||
const row = this.table.getRow(chartName);
|
||||
await row
|
||||
.getByRole('button', { name: ChartListPage.ACTION_BUTTONS.EDIT })
|
||||
.click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the export action button for a chart
|
||||
* @param chartName - The name of the chart to export
|
||||
*/
|
||||
async clickExportAction(chartName: string): Promise<void> {
|
||||
const row = this.table.getRow(chartName);
|
||||
await row
|
||||
.getByRole('button', { name: ChartListPage.ACTION_BUTTONS.EXPORT })
|
||||
.click();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks the "Bulk select" button to enable bulk selection mode
|
||||
*/
|
||||
async clickBulkSelectButton(): Promise<void> {
|
||||
await this.bulkSelect.enable();
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects a chart's checkbox in bulk select mode
|
||||
* @param chartName - The name of the chart to select
|
||||
*/
|
||||
async selectChartCheckbox(chartName: string): Promise<void> {
|
||||
await this.bulkSelect.selectRow(chartName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clicks a bulk action button by name (e.g., "Export", "Delete")
|
||||
* @param actionName - The name of the bulk action to click
|
||||
*/
|
||||
async clickBulkAction(actionName: string): Promise<void> {
|
||||
await this.bulkSelect.clickAction(actionName);
|
||||
}
|
||||
}
|
||||
@@ -1,307 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import {
|
||||
test as testWithAssets,
|
||||
expect,
|
||||
} from '../../../helpers/fixtures/testAssets';
|
||||
import { ChartListPage } from '../../../pages/ChartListPage';
|
||||
import { ChartPropertiesModal } from '../../../components/modals/ChartPropertiesModal';
|
||||
import { DeleteConfirmationModal } from '../../../components/modals/DeleteConfirmationModal';
|
||||
import { Toast } from '../../../components/core/Toast';
|
||||
import { apiGetChart, ENDPOINTS } from '../../../helpers/api/chart';
|
||||
import { createTestChart } from './chart-test-helpers';
|
||||
import { waitForGet, waitForPut } from '../../../helpers/api/intercepts';
|
||||
import {
|
||||
expectStatusOneOf,
|
||||
expectValidExportZip,
|
||||
} from '../../../helpers/api/assertions';
|
||||
|
||||
/**
|
||||
* Extend testWithAssets with chartListPage navigation (beforeEach equivalent).
|
||||
*/
|
||||
const test = testWithAssets.extend<{ chartListPage: ChartListPage }>({
|
||||
chartListPage: async ({ page }, use) => {
|
||||
const chartListPage = new ChartListPage(page);
|
||||
await chartListPage.goto();
|
||||
await chartListPage.waitForTableLoad();
|
||||
await use(chartListPage);
|
||||
},
|
||||
});
|
||||
|
||||
test('should delete a chart with confirmation', async ({
|
||||
page,
|
||||
chartListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create throwaway chart for deletion
|
||||
const { id: chartId, name: chartName } = await createTestChart(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_delete' },
|
||||
);
|
||||
|
||||
// Refresh to see the new chart (created via API)
|
||||
await chartListPage.goto();
|
||||
await chartListPage.waitForTableLoad();
|
||||
|
||||
// Verify chart is visible in list
|
||||
await expect(chartListPage.getChartRow(chartName)).toBeVisible();
|
||||
|
||||
// Click delete action button
|
||||
await chartListPage.clickDeleteAction(chartName);
|
||||
|
||||
// Delete confirmation modal should appear
|
||||
const deleteModal = new DeleteConfirmationModal(page);
|
||||
await deleteModal.waitForVisible();
|
||||
|
||||
// Type "DELETE" to confirm
|
||||
await deleteModal.fillConfirmationInput('DELETE');
|
||||
|
||||
// Click the Delete button
|
||||
await deleteModal.clickDelete();
|
||||
|
||||
// Modal should close
|
||||
await deleteModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
await expect(toast.getSuccess()).toBeVisible();
|
||||
|
||||
// Verify chart is removed from list
|
||||
await expect(chartListPage.getChartRow(chartName)).not.toBeVisible();
|
||||
|
||||
// Backend verification: API returns 404
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const response = await apiGetChart(page, chartId, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return response.status();
|
||||
},
|
||||
{ timeout: 10000, message: `Chart ${chartId} should return 404` },
|
||||
)
|
||||
.toBe(404);
|
||||
});
|
||||
|
||||
test('should edit chart name via properties modal', async ({
|
||||
page,
|
||||
chartListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create throwaway chart for editing
|
||||
const { id: chartId, name: chartName } = await createTestChart(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_edit' },
|
||||
);
|
||||
|
||||
// Refresh to see the new chart
|
||||
await chartListPage.goto();
|
||||
await chartListPage.waitForTableLoad();
|
||||
|
||||
// Verify chart is visible in list
|
||||
await expect(chartListPage.getChartRow(chartName)).toBeVisible();
|
||||
|
||||
// Click edit action to open properties modal
|
||||
await chartListPage.clickEditAction(chartName);
|
||||
|
||||
// Wait for properties modal to be ready
|
||||
const propertiesModal = new ChartPropertiesModal(page);
|
||||
await propertiesModal.waitForReady();
|
||||
|
||||
// Edit the chart name
|
||||
const newName = `renamed_${Date.now()}_${test.info().parallelIndex}`;
|
||||
await propertiesModal.fillName(newName);
|
||||
|
||||
// Set up response intercept for save
|
||||
const saveResponsePromise = waitForPut(page, `${ENDPOINTS.CHART}${chartId}`);
|
||||
|
||||
// Click Save button
|
||||
await propertiesModal.clickSave();
|
||||
|
||||
// Wait for save to complete and verify success
|
||||
expectStatusOneOf(await saveResponsePromise, [200, 201]);
|
||||
|
||||
// Modal should close
|
||||
await propertiesModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
await expect(toast.getSuccess()).toBeVisible();
|
||||
|
||||
// Backend verification: API returns updated name
|
||||
const response = await apiGetChart(page, chartId);
|
||||
const chart = (await response.json()).result;
|
||||
expect(chart.slice_name).toBe(newName);
|
||||
});
|
||||
|
||||
test('should export a chart as a zip file', async ({
|
||||
page,
|
||||
chartListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create throwaway chart for export
|
||||
const { name: chartName } = await createTestChart(
|
||||
page,
|
||||
testAssets,
|
||||
test.info(),
|
||||
{ prefix: 'test_export' },
|
||||
);
|
||||
|
||||
// Refresh to see the new chart
|
||||
await chartListPage.goto();
|
||||
await chartListPage.waitForTableLoad();
|
||||
|
||||
// Verify chart is visible in list
|
||||
await expect(chartListPage.getChartRow(chartName)).toBeVisible();
|
||||
|
||||
// Set up API response intercept for export endpoint
|
||||
const exportResponsePromise = waitForGet(page, ENDPOINTS.CHART_EXPORT);
|
||||
|
||||
// Click export action button
|
||||
await chartListPage.clickExportAction(chartName);
|
||||
|
||||
// Wait for export API response and validate zip contents
|
||||
const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
|
||||
await expectValidExportZip(exportResponse, {
|
||||
resourceDir: 'charts',
|
||||
expectedNames: [chartName],
|
||||
});
|
||||
});
|
||||
|
||||
test('should bulk delete multiple charts', async ({
|
||||
page,
|
||||
chartListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
test.setTimeout(60_000);
|
||||
|
||||
// Create 2 throwaway charts for bulk delete
|
||||
const [chart1, chart2] = await Promise.all([
|
||||
createTestChart(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_delete_1',
|
||||
}),
|
||||
createTestChart(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_delete_2',
|
||||
}),
|
||||
]);
|
||||
|
||||
// Refresh to see new charts
|
||||
await chartListPage.goto();
|
||||
await chartListPage.waitForTableLoad();
|
||||
|
||||
// Verify both charts are visible in list
|
||||
await expect(chartListPage.getChartRow(chart1.name)).toBeVisible();
|
||||
await expect(chartListPage.getChartRow(chart2.name)).toBeVisible();
|
||||
|
||||
// Enable bulk select mode
|
||||
await chartListPage.clickBulkSelectButton();
|
||||
|
||||
// Select both charts
|
||||
await chartListPage.selectChartCheckbox(chart1.name);
|
||||
await chartListPage.selectChartCheckbox(chart2.name);
|
||||
|
||||
// Click bulk delete action
|
||||
await chartListPage.clickBulkAction('Delete');
|
||||
|
||||
// Delete confirmation modal should appear
|
||||
const deleteModal = new DeleteConfirmationModal(page);
|
||||
await deleteModal.waitForVisible();
|
||||
|
||||
// Type "DELETE" to confirm
|
||||
await deleteModal.fillConfirmationInput('DELETE');
|
||||
|
||||
// Click the Delete button
|
||||
await deleteModal.clickDelete();
|
||||
|
||||
// Modal should close
|
||||
await deleteModal.waitForHidden();
|
||||
|
||||
// Verify success toast appears
|
||||
const toast = new Toast(page);
|
||||
await expect(toast.getSuccess()).toBeVisible();
|
||||
|
||||
// Verify both charts are removed from list
|
||||
await expect(chartListPage.getChartRow(chart1.name)).not.toBeVisible();
|
||||
await expect(chartListPage.getChartRow(chart2.name)).not.toBeVisible();
|
||||
|
||||
// Backend verification: Both return 404
|
||||
for (const chart of [chart1, chart2]) {
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const response = await apiGetChart(page, chart.id, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return response.status();
|
||||
},
|
||||
{ timeout: 10000, message: `Chart ${chart.id} should return 404` },
|
||||
)
|
||||
.toBe(404);
|
||||
}
|
||||
});
|
||||
|
||||
test('should bulk export multiple charts', async ({
|
||||
page,
|
||||
chartListPage,
|
||||
testAssets,
|
||||
}) => {
|
||||
// Create 2 throwaway charts for bulk export
|
||||
const [chart1, chart2] = await Promise.all([
|
||||
createTestChart(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_export_1',
|
||||
}),
|
||||
createTestChart(page, testAssets, test.info(), {
|
||||
prefix: 'bulk_export_2',
|
||||
}),
|
||||
]);
|
||||
|
||||
// Refresh to see new charts
|
||||
await chartListPage.goto();
|
||||
await chartListPage.waitForTableLoad();
|
||||
|
||||
// Verify both charts are visible in list
|
||||
await expect(chartListPage.getChartRow(chart1.name)).toBeVisible();
|
||||
await expect(chartListPage.getChartRow(chart2.name)).toBeVisible();
|
||||
|
||||
// Enable bulk select mode
|
||||
await chartListPage.clickBulkSelectButton();
|
||||
|
||||
// Select both charts
|
||||
await chartListPage.selectChartCheckbox(chart1.name);
|
||||
await chartListPage.selectChartCheckbox(chart2.name);
|
||||
|
||||
// Set up API response intercept for export endpoint
|
||||
const exportResponsePromise = waitForGet(page, ENDPOINTS.CHART_EXPORT);
|
||||
|
||||
// Click bulk export action
|
||||
await chartListPage.clickBulkAction('Export');
|
||||
|
||||
// Wait for export API response and validate zip contains both charts
|
||||
const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
|
||||
await expectValidExportZip(exportResponse, {
|
||||
resourceDir: 'charts',
|
||||
minCount: 2,
|
||||
expectedNames: [chart1.name, chart2.name],
|
||||
});
|
||||
});
|
||||
@@ -1,88 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import type { Page, TestInfo } from '@playwright/test';
|
||||
import type { TestAssets } from '../../../helpers/fixtures/testAssets';
|
||||
import { apiPostChart } from '../../../helpers/api/chart';
|
||||
import { getDatasetByName } from '../../../helpers/api/dataset';
|
||||
|
||||
interface TestChartResult {
|
||||
id: number;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface CreateTestChartOptions {
|
||||
/** Prefix for generated name (default: 'test_chart') */
|
||||
prefix?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a test chart via the API for E2E testing.
|
||||
* Uses the members_channels_2 dataset (loaded via --load-examples).
|
||||
*
|
||||
* @example
|
||||
* const { id, name } = await createTestChart(page, testAssets, test.info());
|
||||
*
|
||||
* @example
|
||||
* const { id, name } = await createTestChart(page, testAssets, test.info(), {
|
||||
* prefix: 'test_delete',
|
||||
* });
|
||||
*/
|
||||
export async function createTestChart(
|
||||
page: Page,
|
||||
testAssets: TestAssets,
|
||||
testInfo: TestInfo,
|
||||
options?: CreateTestChartOptions,
|
||||
): Promise<TestChartResult> {
|
||||
const prefix = options?.prefix ?? 'test_chart';
|
||||
const name = `${prefix}_${Date.now()}_${testInfo.parallelIndex}`;
|
||||
|
||||
// Look up the members_channels_2 dataset for chart creation
|
||||
const dataset = await getDatasetByName(page, 'members_channels_2');
|
||||
if (!dataset) {
|
||||
throw new Error(
|
||||
'members_channels_2 dataset not found — run Superset with --load-examples',
|
||||
);
|
||||
}
|
||||
|
||||
const response = await apiPostChart(page, {
|
||||
slice_name: name,
|
||||
datasource_id: dataset.id,
|
||||
datasource_type: 'table',
|
||||
viz_type: 'table',
|
||||
params: '{}',
|
||||
});
|
||||
|
||||
if (!response.ok()) {
|
||||
throw new Error(`Failed to create test chart: ${response.status()}`);
|
||||
}
|
||||
|
||||
const body = await response.json();
|
||||
// Handle both response shapes: { id } or { result: { id } }
|
||||
const id = body.result?.id ?? body.id;
|
||||
if (!id) {
|
||||
throw new Error(
|
||||
`Chart creation returned no id. Response: ${JSON.stringify(body)}`,
|
||||
);
|
||||
}
|
||||
|
||||
testAssets.trackChart(id);
|
||||
|
||||
return { id, name };
|
||||
}
|
||||
@@ -21,7 +21,9 @@ import {
|
||||
test as testWithAssets,
|
||||
expect,
|
||||
} from '../../../helpers/fixtures/testAssets';
|
||||
import type { Response } from '@playwright/test';
|
||||
import path from 'path';
|
||||
import * as unzipper from 'unzipper';
|
||||
import { DatasetListPage } from '../../../pages/DatasetListPage';
|
||||
import { ExplorePage } from '../../../pages/ExplorePage';
|
||||
import { ConfirmDialog } from '../../../components/modals/ConfirmDialog';
|
||||
@@ -43,10 +45,7 @@ import {
|
||||
waitForPost,
|
||||
waitForPut,
|
||||
} from '../../../helpers/api/intercepts';
|
||||
import {
|
||||
expectStatusOneOf,
|
||||
expectValidExportZip,
|
||||
} from '../../../helpers/api/assertions';
|
||||
import { expectStatusOneOf } from '../../../helpers/api/assertions';
|
||||
import { TIMEOUT } from '../../../utils/constants';
|
||||
|
||||
/**
|
||||
@@ -61,6 +60,40 @@ const test = testWithAssets.extend<{ datasetListPage: DatasetListPage }>({
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper to validate an export zip response.
|
||||
* Verifies headers, parses zip contents, and validates expected structure.
|
||||
*/
|
||||
async function expectValidExportZip(
|
||||
response: Response,
|
||||
options: { minDatasetCount?: number; checkContentDisposition?: boolean } = {},
|
||||
): Promise<void> {
|
||||
const { minDatasetCount = 1, checkContentDisposition = false } = options;
|
||||
|
||||
// Verify headers
|
||||
expect(response.headers()['content-type']).toContain('application/zip');
|
||||
if (checkContentDisposition) {
|
||||
expect(response.headers()['content-disposition']).toMatch(
|
||||
/filename=.*dataset_export.*\.zip/,
|
||||
);
|
||||
}
|
||||
|
||||
// Parse and validate zip contents
|
||||
const body = await response.body();
|
||||
expect(body.length).toBeGreaterThan(0);
|
||||
|
||||
const entries: string[] = [];
|
||||
const directory = await unzipper.Open.buffer(body);
|
||||
directory.files.forEach(file => entries.push(file.path));
|
||||
|
||||
// Validate structure
|
||||
const datasetYamlFiles = entries.filter(
|
||||
entry => entry.includes('datasets/') && entry.endsWith('.yaml'),
|
||||
);
|
||||
expect(datasetYamlFiles.length).toBeGreaterThanOrEqual(minDatasetCount);
|
||||
expect(entries.some(entry => entry.endsWith('metadata.yaml'))).toBe(true);
|
||||
}
|
||||
|
||||
test('should navigate to Explore when dataset name is clicked', async ({
|
||||
page,
|
||||
datasetListPage,
|
||||
@@ -253,10 +286,7 @@ test('should export a dataset as a zip file', async ({
|
||||
|
||||
// Wait for export API response and validate zip contents
|
||||
const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
|
||||
await expectValidExportZip(exportResponse, {
|
||||
resourceDir: 'datasets',
|
||||
contentDispositionPattern: /filename=.*dataset_export.*\.zip/,
|
||||
});
|
||||
await expectValidExportZip(exportResponse, { checkContentDisposition: true });
|
||||
});
|
||||
|
||||
test('should export multiple datasets via bulk select action', async ({
|
||||
@@ -297,10 +327,7 @@ test('should export multiple datasets via bulk select action', async ({
|
||||
|
||||
// Wait for export API response and validate zip contains multiple datasets
|
||||
const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
|
||||
await expectValidExportZip(exportResponse, {
|
||||
resourceDir: 'datasets',
|
||||
minCount: 2,
|
||||
});
|
||||
await expectValidExportZip(exportResponse, { minDatasetCount: 2 });
|
||||
});
|
||||
|
||||
test('should edit dataset name via modal', async ({
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 5.7 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 93 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 52 KiB |
@@ -1,34 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 355.42 184.17">
|
||||
<defs>
|
||||
<style>
|
||||
.cls-1 {
|
||||
fill: #dc150b;
|
||||
}
|
||||
|
||||
.cls-1, .cls-2, .cls-3 {
|
||||
stroke-width: 0px;
|
||||
}
|
||||
|
||||
.cls-2 {
|
||||
fill: #000;
|
||||
}
|
||||
|
||||
.cls-3 {
|
||||
fill: #fff;
|
||||
}
|
||||
</style>
|
||||
</defs>
|
||||
<g>
|
||||
<polygon class="cls-2" points="151.67 73.23 168.02 73.23 168.02 121.29 179.52 121.29 179.52 73.23 195.87 73.23 195.87 62.89 151.67 62.89 151.67 73.23"/>
|
||||
<path class="cls-2" d="M207.29,61.64c-1.75,0-3.28.62-4.56,1.85-1.28,1.23-1.94,2.77-1.94,4.57,0,1.7.65,3.19,1.93,4.45,1.28,1.25,2.81,1.89,4.56,1.89s3.19-.64,4.45-1.89,1.89-2.75,1.89-4.45c0-1.8-.62-3.33-1.85-4.56s-2.74-1.85-4.48-1.85Z"/>
|
||||
<rect class="cls-2" x="201.71" y="79.4" width="11.17" height="41.89"/>
|
||||
<path class="cls-2" d="M243.46,62.89h-20.53v58.4h20.53c8.85,0,15.88-2.64,20.9-7.85,5.01-5.21,7.56-12.39,7.56-21.35s-2.54-16.21-7.56-21.39c-5.02-5.18-12.05-7.81-20.9-7.81ZM234.43,73.23h8.62c5.72,0,10.04,1.66,12.86,4.93,2.83,3.29,4.26,7.86,4.26,13.59s-1.44,10.49-4.27,13.97c-2.82,3.46-7.14,5.22-12.86,5.22h-8.62v-37.72Z"/>
|
||||
<path class="cls-2" d="M318.75,95.24c-1.9-2.31-4.16-3.97-6.7-4.93v-.1c5.32-2.58,8.01-6.81,8.01-12.57,0-4.47-1.48-8.09-4.39-10.75-2.91-2.65-7.2-4-12.75-4h-24.08v58.4h23.92c5.88,0,10.56-1.44,13.9-4.28,3.36-2.85,5.06-6.9,5.06-12.04,0-4.07-1-7.34-2.96-9.73ZM290.08,86.58v-14.35h11.34c2.16,0,3.91.64,5.2,1.9,1.28,1.26,1.93,3.02,1.93,5.24s-.64,4-1.9,5.28c-1.26,1.28-3.07,1.93-5.4,1.93h-11.18ZM309.96,104.14c0,2.33-.71,4.23-2.1,5.65-1.39,1.42-3.45,2.14-6.1,2.14h-11.67v-16h11.67c2.54,0,4.57.77,6.02,2.3,1.45,1.53,2.19,3.52,2.19,5.9Z"/>
|
||||
</g>
|
||||
<g>
|
||||
<polygon class="cls-1" points="84.27 33.71 33.71 62.9 33.71 121.27 84.27 150.46 134.82 121.27 134.82 62.9 84.27 33.71"/>
|
||||
<polygon class="cls-3" points="67.41 121.27 67.41 82.36 50.56 92.09 50.56 72.63 84.27 53.17 101.12 62.9 84.27 72.63 84.27 131 67.41 121.27"/>
|
||||
<polygon class="cls-3" points="101.12 121.27 101.12 82.36 117.97 72.63 117.97 111.54 101.12 121.27"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 2.1 KiB |
@@ -1,16 +0,0 @@
|
||||
<svg width="2502" height="500" viewBox="0 0 2502 500" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M1731.18 88.8069C1803.56 88.8069 1849.96 145.403 1834.66 233.595C1818.86 321.787 1752.09 380.412 1680.21 380.412C1635.85 380.412 1607.81 358.487 1594.55 335.547L1574.21 451.922C1571.85 465.385 1564.82 477.585 1554.36 486.379C1543.9 495.172 1530.67 499.995 1517 500H1499.12C1498.48 500.001 1497.86 499.862 1497.28 499.594C1496.7 499.326 1496.19 498.935 1495.78 498.448C1495.38 497.962 1495.08 497.391 1494.91 496.778C1494.75 496.164 1494.72 495.521 1494.83 494.895L1564.88 97.0018C1565.05 95.9919 1565.58 95.0764 1566.37 94.4178C1567.15 93.7592 1568.14 93.4 1569.17 93.4041H1632.21C1632.84 93.403 1633.47 93.5415 1634.04 93.8097C1634.62 94.0779 1635.13 94.4693 1635.54 94.9563C1635.95 95.4432 1636.24 96.0138 1636.4 96.6276C1636.57 97.2414 1636.59 97.8836 1636.48 98.5086L1630.24 134.179C1642.94 119.955 1658.49 108.564 1675.88 100.747C1693.27 92.9293 1712.11 88.8612 1731.18 88.8069V88.8069ZM1701.61 151.522C1663.37 151.522 1621.66 181.089 1612.43 234.548C1602.75 288.084 1633.33 317.574 1672.07 317.574C1710.82 317.574 1752.12 286.977 1761.8 233.456C1770.95 180.058 1740.4 151.522 1701.62 151.522H1701.61Z" fill="#2F2D32"/>
|
||||
<path d="M1972.4 0H1909.25C1908.26 0.00280712 1907.3 0.353853 1906.54 0.991803C1905.78 1.62975 1905.27 2.51404 1905.1 3.49016L1840.34 370.203C1840.23 370.84 1840.25 371.494 1840.42 372.119C1840.59 372.744 1840.89 373.325 1841.31 373.82C1841.72 374.316 1842.24 374.714 1842.83 374.987C1843.42 375.26 1844.06 375.401 1844.7 375.4H1907.85C1908.84 375.401 1909.8 375.053 1910.56 374.417C1911.32 373.782 1911.83 372.9 1912 371.925L1976.76 5.21218C1976.87 4.57417 1976.85 3.91938 1976.68 3.29367C1976.51 2.66796 1976.21 2.08646 1975.79 1.58992C1975.38 1.09337 1974.86 0.693775 1974.27 0.419132C1973.68 0.144489 1973.04 0.00143844 1972.4 0V0Z" fill="#2F2D32"/>
|
||||
<path d="M2258.34 92.6353H2195.87C2194.83 92.6369 2193.82 93.0042 2193.02 93.673C2192.23 94.3418 2191.69 95.2696 2191.5 96.2945L2164.6 248.309C2156.91 293.742 2127.34 318.235 2089.06 318.235C2050.78 318.235 2030.36 293.742 2038.01 248.309L2064.66 97.8628C2064.77 97.2228 2064.74 96.566 2064.57 95.9384C2064.41 95.3109 2064.1 94.7277 2063.69 94.2297C2063.27 93.7318 2062.75 93.331 2062.16 93.0556C2061.57 92.7802 2060.93 92.6367 2060.28 92.6353H1997.82C1996.79 92.6344 1995.79 92.9996 1994.99 93.6662C1994.2 94.3329 1993.67 95.2583 1993.49 96.2792L1965.03 258.518C1954.64 317.051 1974.76 355.827 2013.75 371.325L2014.05 371.464L2014.88 371.756C2018.2 373.024 2021.58 374.112 2025.02 375.015C2050.41 382.242 2091.14 386.424 2145.75 354.382L2143.03 370.234C2142.92 370.868 2142.95 371.518 2143.12 372.139C2143.28 372.761 2143.58 373.337 2144 373.83C2144.41 374.322 2144.93 374.717 2145.51 374.989C2146.1 375.26 2146.73 375.4 2147.38 375.4H2209.88C2210.92 375.399 2211.92 375.034 2212.72 374.368C2213.52 373.702 2214.06 372.778 2214.24 371.756L2262.69 97.8321C2262.8 97.1961 2262.77 96.5433 2262.61 95.9194C2262.44 95.2956 2262.14 94.7158 2261.72 94.2206C2261.31 93.7255 2260.79 93.327 2260.21 93.0532C2259.62 92.7793 2258.99 92.6367 2258.34 92.6353V92.6353Z" fill="#2F2D32"/>
|
||||
<path d="M2393.84 88.0381C2325.44 88.0381 2278.99 123.263 2278.99 173.293C2278.99 265.16 2419.35 250.861 2419.35 296.802C2419.35 316.79 2397.41 325.892 2371.88 325.892C2343.64 325.892 2326.62 311.516 2326.3 290.36C2326.27 289.212 2325.8 288.121 2324.98 287.321C2324.15 286.521 2323.05 286.077 2321.9 286.085H2260.98C2259.89 286.078 2258.84 286.472 2258.02 287.192C2257.21 287.913 2256.68 288.909 2256.56 289.991C2251.27 343.035 2300.51 379.997 2366.78 379.997C2436.2 379.997 2488.24 348.355 2488.24 294.757C2488.24 206.965 2348.9 218.712 2348.9 171.248C2348.9 153.382 2366.26 142.143 2390.75 142.143C2418.99 142.143 2435.11 155.981 2435.34 177.137C2435.36 178.293 2435.83 179.394 2436.66 180.202C2437.48 181.011 2438.59 181.462 2439.75 181.458H2497.41C2498.53 181.464 2499.62 181.042 2500.44 180.276C2501.27 179.511 2501.77 178.46 2501.85 177.337C2504.91 124.908 2461.03 88.0381 2393.84 88.0381Z" fill="#2F2D32"/>
|
||||
<path d="M539.071 126.891H500.264V96.7558H539.071V0H575.325V96.7558H651.847V126.891H575.325V299.969C575.325 334.179 587.056 344.895 619.221 344.895H651.847V375.538H613.609C565.101 375.538 539.071 355.627 539.071 299.969V126.891Z" fill="#2F2D32"/>
|
||||
<path d="M688.609 96.7559H724.356V375.538H688.609V96.7559Z" fill="#2F2D32"/>
|
||||
<path d="M1170.39 216.236C1170.39 154.459 1138.22 122.801 1088.7 122.801C1038.15 122.801 1002.4 156.488 1002.4 224.477V375.615H967.174V216.236C967.174 154.459 934.502 122.801 884.979 122.801C833.918 122.801 798.694 156.488 798.694 224.4V375.538H762.947V96.7559H798.694V144.757C816.56 109.517 851.784 91.6514 891.098 91.6514C937.561 91.6514 977.383 113.607 994.234 160.071C1009.61 114.622 1050.4 91.6514 1094.82 91.6514C1156.59 91.6514 1205.61 130.458 1205.61 212.146V375.538H1170.39V216.236Z" fill="#2F2D32"/>
|
||||
<path d="M1370.88 379.612C1291.74 379.612 1234.04 324.477 1234.04 236.147C1234.04 147.309 1290.71 92.6814 1370.88 92.6814C1452.58 92.6814 1502.61 150.876 1502.61 221.833C1502.81 231.208 1502.47 240.587 1501.6 249.923H1270.31C1273.38 315.267 1318.8 349.477 1370.88 349.477C1418.37 349.477 1450.53 324.462 1460.74 287.192H1498.52C1485.76 339.791 1440.83 379.612 1370.88 379.612ZM1270.31 220.833H1466.36C1467.89 155.981 1420.92 123.309 1369.35 123.309C1318.8 123.309 1274.38 155.981 1270.31 220.833Z" fill="#2F2D32"/>
|
||||
<path d="M684.288 37.0849L690.607 0H725.97L719.651 37.0849H684.288Z" fill="#2F2D32"/>
|
||||
<path d="M290.749 234.21C290.748 213.232 286.199 192.505 277.416 173.455C268.633 154.405 255.824 137.486 239.873 123.862C238.212 123.339 236.536 122.827 234.845 122.325C224.342 125.132 214.289 129.413 204.987 135.04C222.098 145.313 236.259 159.841 246.09 177.21C255.921 194.579 261.088 214.197 261.088 234.156C261.088 254.114 255.921 273.733 246.09 291.102C236.259 308.471 222.098 322.999 204.987 333.272C214.287 338.9 224.34 343.176 234.845 345.972C236.536 345.526 238.212 345.034 239.873 344.511C255.818 330.892 268.622 313.98 277.405 294.939C286.188 275.897 290.74 255.179 290.749 234.21Z" fill="#F2DBF3"/>
|
||||
<path d="M29.6033 234.21C29.6373 209.607 37.4958 185.652 52.0422 165.81C66.5887 145.967 87.0692 131.266 110.522 123.831C129.056 107.91 151.31 96.9259 175.22 91.8973C154.055 87.4625 132.167 87.8046 111.151 92.8985C90.1351 97.9924 70.5197 107.71 53.7339 121.343C36.9481 134.976 23.4145 152.182 14.119 171.707C4.82343 191.232 0 212.585 0 234.21C0 255.835 4.82343 277.187 14.119 296.712C23.4145 316.237 36.9481 333.443 53.7339 347.076C70.5197 360.71 90.1351 370.427 111.151 375.521C132.167 380.615 154.055 380.957 175.22 376.522C151.351 371.48 129.14 360.496 110.645 344.588C87.168 337.177 66.6592 322.486 52.0889 302.641C37.5186 282.796 29.6432 258.829 29.6033 234.21V234.21Z" fill="#F2DBF3"/>
|
||||
<path d="M350.496 234.21C350.495 213.223 345.941 192.487 337.15 173.431C328.359 154.375 315.539 137.453 299.574 123.831C278.596 117.228 256.173 116.726 234.921 122.386C259.562 128.955 281.343 143.478 296.88 163.7C312.417 183.921 320.84 208.708 320.84 234.21C320.84 259.711 312.417 284.498 296.88 304.719C281.343 324.941 259.562 339.464 234.921 346.033C256.171 351.714 278.6 351.213 299.574 344.588C315.541 330.968 328.363 314.046 337.154 294.99C345.946 275.933 350.498 255.196 350.496 234.21V234.21Z" fill="#EA9FCC"/>
|
||||
<path d="M170.192 344.588C146.723 337.181 126.226 322.487 111.676 302.638C97.1264 282.79 89.282 258.82 89.282 234.21C89.282 209.599 97.1264 185.63 111.676 165.781C126.226 145.932 146.723 131.238 170.192 123.831C188.713 107.917 210.95 96.9332 234.844 91.8973C213.679 87.4625 191.792 87.8046 170.775 92.8985C149.759 97.9924 130.144 107.71 113.358 121.343C96.5722 134.976 83.0386 152.182 73.7431 171.707C64.4475 191.232 59.6241 212.585 59.6241 234.21C59.6241 255.835 64.4475 277.187 73.7431 296.712C83.0386 316.237 96.5722 333.443 113.358 347.076C130.144 360.71 149.759 370.427 170.775 375.521C191.792 380.615 213.679 380.957 234.844 376.522C210.948 371.493 188.71 360.508 170.192 344.588V344.588Z" fill="#EA9FCC"/>
|
||||
<path d="M264.688 88.8069C235.93 88.8069 207.818 97.3346 183.907 113.312C159.996 129.289 141.359 151.998 130.354 178.566C119.349 205.135 116.469 234.371 122.08 262.576C127.69 290.782 141.538 316.69 161.873 337.025C182.208 357.36 208.116 371.208 236.321 376.819C264.526 382.429 293.762 379.55 320.331 368.544C346.899 357.539 369.608 338.903 385.585 314.991C401.562 291.08 410.09 262.968 410.09 234.21C410.045 195.66 394.711 158.702 367.453 131.444C340.194 104.185 303.237 88.8516 264.688 88.8069V88.8069ZM380.416 234.21C380.419 257.103 373.633 279.483 360.916 298.52C348.199 317.556 330.122 332.394 308.972 341.156C287.823 349.918 264.549 352.212 242.096 347.746C219.642 343.28 199.017 332.257 182.829 316.069C166.641 299.881 155.618 279.255 151.152 256.802C146.687 234.348 148.98 211.075 157.742 189.925C166.505 168.775 181.342 150.698 200.378 137.981C219.415 125.264 241.794 118.478 264.688 118.481C295.369 118.518 324.784 130.722 346.479 152.418C368.175 174.113 380.379 203.528 380.416 234.21V234.21Z" fill="#FF244E"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 8.9 KiB |
@@ -17,12 +17,11 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { render, screen, waitFor } from 'spec/helpers/testing-library';
|
||||
import { render, screen } from 'spec/helpers/testing-library';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import fetchMock from 'fetch-mock';
|
||||
import ThemeModal from './ThemeModal';
|
||||
import { ThemeObject } from './types';
|
||||
import { validateTheme } from 'src/theme/utils/themeStructureValidation';
|
||||
|
||||
const mockThemeContext = {
|
||||
setTemporaryTheme: jest.fn(),
|
||||
@@ -38,27 +37,6 @@ jest.mock('src/dashboard/util/permissionUtils', () => ({
|
||||
isUserAdmin: jest.fn(() => true),
|
||||
}));
|
||||
|
||||
// Mock JsonEditor to avoid direct DOM manipulation in tests
|
||||
jest.mock('@superset-ui/core/components/AsyncAceEditor', () => ({
|
||||
...jest.requireActual('@superset-ui/core/components/AsyncAceEditor'),
|
||||
JsonEditor: ({
|
||||
onChange,
|
||||
value,
|
||||
readOnly,
|
||||
}: {
|
||||
onChange: (value: string) => void;
|
||||
value: string;
|
||||
readOnly?: boolean;
|
||||
}) => (
|
||||
<textarea
|
||||
data-test="json-editor"
|
||||
value={value}
|
||||
onChange={e => onChange(e.target.value)}
|
||||
readOnly={readOnly}
|
||||
/>
|
||||
),
|
||||
}));
|
||||
|
||||
const mockTheme: ThemeObject = {
|
||||
id: 1,
|
||||
theme_name: 'Test Theme',
|
||||
@@ -111,31 +89,6 @@ afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
// Helper to add valid JSON data to the theme
|
||||
// Uses the mocked JsonEditor textarea for testing
|
||||
const addValidJsonData = async () => {
|
||||
const validJson = JSON.stringify(
|
||||
{ token: { colorPrimary: '#1890ff' } },
|
||||
null,
|
||||
2,
|
||||
);
|
||||
const jsonEditor = screen.getByTestId('json-editor');
|
||||
await userEvent.clear(jsonEditor);
|
||||
await userEvent.type(jsonEditor, validJson);
|
||||
};
|
||||
|
||||
// Helper to add JSON with unknown tokens (triggers warnings but not errors)
|
||||
const addJsonWithUnknownToken = async () => {
|
||||
const jsonWithUnknown = JSON.stringify(
|
||||
{ token: { colorPrimary: '#1890ff', unknownTokenName: 'value' } },
|
||||
null,
|
||||
2,
|
||||
);
|
||||
const jsonEditor = screen.getByTestId('json-editor');
|
||||
await userEvent.clear(jsonEditor);
|
||||
await userEvent.type(jsonEditor, jsonWithUnknown);
|
||||
};
|
||||
|
||||
test('renders modal with add theme dialog when show is true', () => {
|
||||
render(
|
||||
<ThemeModal
|
||||
@@ -330,16 +283,10 @@ test('enables save button when theme name is entered', async () => {
|
||||
|
||||
const nameInput = screen.getByPlaceholderText('Enter theme name');
|
||||
await userEvent.type(nameInput, 'My New Theme');
|
||||
await addValidJsonData();
|
||||
|
||||
// Wait for validation to complete and button to become enabled
|
||||
await waitFor(
|
||||
() => {
|
||||
const saveButton = screen.getByRole('button', { name: 'Add' });
|
||||
expect(saveButton).toBeEnabled();
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
const saveButton = await screen.findByRole('button', { name: 'Add' });
|
||||
|
||||
expect(saveButton).toBeEnabled();
|
||||
});
|
||||
|
||||
test('validates JSON format and enables save button', async () => {
|
||||
@@ -357,52 +304,10 @@ test('validates JSON format and enables save button', async () => {
|
||||
|
||||
const nameInput = screen.getByPlaceholderText('Enter theme name');
|
||||
await userEvent.type(nameInput, 'Test Theme');
|
||||
await addValidJsonData();
|
||||
|
||||
// Wait for validation to complete and button to become enabled
|
||||
await waitFor(
|
||||
() => {
|
||||
const saveButton = screen.getByRole('button', { name: 'Add' });
|
||||
expect(saveButton).toBeEnabled();
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
});
|
||||
const saveButton = await screen.findByRole('button', { name: 'Add' });
|
||||
|
||||
test('warnings do not block save - unknown tokens allow save with warnings', async () => {
|
||||
// First verify the test data actually produces warnings (not errors)
|
||||
const testTheme = {
|
||||
token: { colorPrimary: '#1890ff', unknownTokenName: 'value' },
|
||||
};
|
||||
const validationResult = validateTheme(testTheme);
|
||||
expect(validationResult.valid).toBe(true); // No errors
|
||||
expect(validationResult.warnings.length).toBeGreaterThan(0); // Has warnings
|
||||
expect(validationResult.warnings[0].tokenName).toBe('unknownTokenName');
|
||||
|
||||
render(
|
||||
<ThemeModal
|
||||
addDangerToast={jest.fn()}
|
||||
addSuccessToast={jest.fn()}
|
||||
onThemeAdd={jest.fn()}
|
||||
onHide={jest.fn()}
|
||||
show
|
||||
canDevelop={false}
|
||||
/>,
|
||||
{ useRedux: true, useRouter: true },
|
||||
);
|
||||
|
||||
const nameInput = screen.getByPlaceholderText('Enter theme name');
|
||||
await userEvent.type(nameInput, 'Theme With Unknown Token');
|
||||
await addJsonWithUnknownToken();
|
||||
|
||||
// Wait for validation to complete - button should still be enabled despite warnings
|
||||
await waitFor(
|
||||
() => {
|
||||
const saveButton = screen.getByRole('button', { name: 'Add' });
|
||||
expect(saveButton).toBeEnabled();
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
expect(saveButton).toBeEnabled();
|
||||
});
|
||||
|
||||
test('shows unsaved changes alert when closing modal with modifications', async () => {
|
||||
@@ -513,19 +418,6 @@ test('saves changes when clicking Save button in unsaved changes alert', async (
|
||||
|
||||
const nameInput = screen.getByPlaceholderText('Enter theme name');
|
||||
await userEvent.type(nameInput, 'Modified Theme');
|
||||
await addValidJsonData();
|
||||
|
||||
// Wait for validation to complete before canceling
|
||||
await waitFor(
|
||||
() => {
|
||||
const addButton = screen.getByRole('button', { name: 'Add' });
|
||||
expect(addButton).toBeEnabled();
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
|
||||
// Give extra time for all state updates to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
|
||||
const cancelButton = screen.getByRole('button', { name: 'Cancel' });
|
||||
await userEvent.click(cancelButton);
|
||||
@@ -534,26 +426,13 @@ test('saves changes when clicking Save button in unsaved changes alert', async (
|
||||
await screen.findByText('You have unsaved changes'),
|
||||
).toBeInTheDocument();
|
||||
|
||||
// Wait for the Save button in the alert to be enabled
|
||||
const saveButton = await waitFor(
|
||||
() => {
|
||||
const button = screen.getByRole('button', { name: 'Save' });
|
||||
expect(button).toBeEnabled();
|
||||
return button;
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
const saveButton = screen.getByRole('button', { name: 'Save' });
|
||||
await userEvent.click(saveButton);
|
||||
|
||||
// Wait for API call to complete
|
||||
await screen.findByRole('dialog');
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(fetchMock.callHistory.called()).toBe(true);
|
||||
},
|
||||
{ timeout: 15000 },
|
||||
);
|
||||
}, 30000);
|
||||
expect(fetchMock.callHistory.called()).toBe(true);
|
||||
});
|
||||
|
||||
test('discards changes when clicking Discard button in unsaved changes alert', async () => {
|
||||
const onHide = jest.fn();
|
||||
@@ -604,23 +483,12 @@ test('creates new theme when saving', async () => {
|
||||
|
||||
const nameInput = screen.getByPlaceholderText('Enter theme name');
|
||||
await userEvent.type(nameInput, 'New Theme');
|
||||
await addValidJsonData();
|
||||
|
||||
// Wait for validation to complete and button to become enabled
|
||||
const saveButton = await waitFor(
|
||||
() => {
|
||||
const button = screen.getByRole('button', { name: 'Add' });
|
||||
expect(button).toBeEnabled();
|
||||
return button;
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
const saveButton = await screen.findByRole('button', { name: 'Add' });
|
||||
await userEvent.click(saveButton);
|
||||
|
||||
expect(await screen.findByRole('dialog')).toBeInTheDocument();
|
||||
await waitFor(() => {
|
||||
expect(fetchMock.callHistory.called(postThemeMockName)).toBe(true);
|
||||
});
|
||||
expect(fetchMock.callHistory.called(postThemeMockName)).toBe(true);
|
||||
});
|
||||
|
||||
test('updates existing theme when saving', async () => {
|
||||
@@ -668,24 +536,14 @@ test('handles API errors gracefully', async () => {
|
||||
|
||||
const nameInput = screen.getByPlaceholderText('Enter theme name');
|
||||
await userEvent.type(nameInput, 'New Theme');
|
||||
await addValidJsonData();
|
||||
|
||||
// Wait for validation to complete and button to become enabled
|
||||
const saveButton = await waitFor(
|
||||
() => {
|
||||
const button = screen.getByRole('button', { name: 'Add' });
|
||||
expect(button).toBeEnabled();
|
||||
return button;
|
||||
},
|
||||
{ timeout: 10000 },
|
||||
);
|
||||
const saveButton = await screen.findByRole('button', { name: 'Add' });
|
||||
expect(saveButton).toBeEnabled();
|
||||
|
||||
await userEvent.click(saveButton);
|
||||
|
||||
await screen.findByRole('dialog');
|
||||
await waitFor(() => {
|
||||
expect(fetchMock.callHistory.called()).toBe(true);
|
||||
});
|
||||
expect(fetchMock.callHistory.called()).toBe(true);
|
||||
});
|
||||
|
||||
test('applies theme locally when clicking Apply button', async () => {
|
||||
|
||||
@@ -43,10 +43,10 @@ import {
|
||||
Space,
|
||||
Tooltip,
|
||||
} from '@superset-ui/core/components';
|
||||
import { useJsonValidation } from '@superset-ui/core/components/AsyncAceEditor';
|
||||
import type { editors } from '@apache-superset/core';
|
||||
import { EditorHost } from 'src/core/editors';
|
||||
import { Typography } from '@superset-ui/core/components/Typography';
|
||||
import { useThemeValidation } from 'src/theme/hooks/useThemeValidation';
|
||||
import { OnlyKeyWithType } from 'src/utils/types';
|
||||
import { ThemeObject } from './types';
|
||||
|
||||
@@ -147,9 +147,10 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
SupersetText?.THEME_MODAL?.DOCUMENTATION_URL ||
|
||||
'https://superset.apache.org/docs/configuration/theming/';
|
||||
|
||||
// Theme validation (structure + token names)
|
||||
const validation = useThemeValidation(currentTheme?.json_data || '', {
|
||||
enabled: !isReadOnly && Boolean(currentTheme?.json_data),
|
||||
// JSON validation annotations using reusable hook
|
||||
const jsonAnnotations = useJsonValidation(currentTheme?.json_data, {
|
||||
enabled: !isReadOnly,
|
||||
errorPrefix: 'Invalid JSON syntax',
|
||||
});
|
||||
|
||||
// theme fetch logic
|
||||
@@ -177,15 +178,6 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
}, [onHide]);
|
||||
|
||||
const onSave = useCallback(() => {
|
||||
// Synchronous JSON guard to catch invalid JSON before API call
|
||||
// This handles the race condition where debounced validation hasn't updated yet
|
||||
try {
|
||||
JSON.parse(currentTheme?.json_data || '');
|
||||
} catch {
|
||||
addDangerToast(t('Invalid JSON configuration'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (isEditMode) {
|
||||
// Edit
|
||||
if (currentTheme?.id) {
|
||||
@@ -219,7 +211,6 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
createResource,
|
||||
onThemeAdd,
|
||||
hide,
|
||||
addDangerToast,
|
||||
]);
|
||||
|
||||
const handleCancel = useCallback(() => {
|
||||
@@ -315,22 +306,27 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
[currentTheme],
|
||||
);
|
||||
|
||||
const validate = () => {
|
||||
if (isReadOnly || !currentTheme) {
|
||||
const validate = useCallback(() => {
|
||||
if (isReadOnly) {
|
||||
setDisableSave(true);
|
||||
return;
|
||||
}
|
||||
|
||||
const hasValidName = Boolean(currentTheme?.theme_name?.trim());
|
||||
const hasValidJsonData = Boolean(currentTheme?.json_data?.trim());
|
||||
|
||||
// Block save only on ERRORS (not warnings)
|
||||
// Errors: JSON syntax errors, empty themes
|
||||
// Warnings: Unknown tokens, null values (non-blocking)
|
||||
const canSave = hasValidName && hasValidJsonData && !validation.hasErrors;
|
||||
|
||||
setDisableSave(!canSave);
|
||||
};
|
||||
if (
|
||||
currentTheme?.theme_name.length &&
|
||||
currentTheme?.json_data?.length &&
|
||||
isValidJson(currentTheme.json_data)
|
||||
) {
|
||||
setDisableSave(false);
|
||||
} else {
|
||||
setDisableSave(true);
|
||||
}
|
||||
}, [
|
||||
currentTheme?.theme_name,
|
||||
currentTheme?.json_data,
|
||||
isReadOnly,
|
||||
isValidJson,
|
||||
]);
|
||||
|
||||
// Initialize
|
||||
useEffect(() => {
|
||||
@@ -364,12 +360,7 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
// Validation
|
||||
useEffect(() => {
|
||||
validate();
|
||||
}, [
|
||||
currentTheme ? currentTheme.theme_name : '',
|
||||
currentTheme ? currentTheme.json_data : '',
|
||||
isReadOnly,
|
||||
validation.hasErrors,
|
||||
]);
|
||||
}, [validate]);
|
||||
|
||||
// Show/hide
|
||||
useEffect(() => {
|
||||
@@ -499,10 +490,7 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
>
|
||||
{t('documentation')}
|
||||
</a>
|
||||
{t(' for details.')}{' '}
|
||||
<Typography.Text type="secondary">
|
||||
{t('Unknown tokens will be highlighted as warnings.')}
|
||||
</Typography.Text>
|
||||
{t(' for details.')}
|
||||
</span>
|
||||
}
|
||||
/>
|
||||
@@ -518,7 +506,7 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
lineNumbers
|
||||
width="100%"
|
||||
height="250px"
|
||||
annotations={toEditorAnnotations(validation.annotations)}
|
||||
annotations={toEditorAnnotations(jsonAnnotations)}
|
||||
/>
|
||||
</StyledEditorWrapper>
|
||||
{canDevelopThemes && (
|
||||
@@ -532,8 +520,7 @@ const ThemeModal: FunctionComponent<ThemeModalProps> = ({
|
||||
onClick={onApply}
|
||||
disabled={
|
||||
!currentTheme?.json_data ||
|
||||
!isValidJson(currentTheme.json_data) ||
|
||||
validation.hasErrors
|
||||
!isValidJson(currentTheme.json_data)
|
||||
}
|
||||
buttonStyle="secondary"
|
||||
>
|
||||
|
||||
@@ -143,26 +143,8 @@ export class ThemeController {
|
||||
// Setup change callback
|
||||
if (onChange) this.onChangeCallbacks.add(onChange);
|
||||
|
||||
// Apply initial theme with recovery for corrupted stored themes
|
||||
try {
|
||||
this.applyTheme(initialTheme);
|
||||
} catch (error) {
|
||||
// Corrupted dev override or CRUD theme in storage - clear and retry with defaults
|
||||
console.warn(
|
||||
'Failed to apply stored theme, clearing invalid overrides:',
|
||||
error,
|
||||
);
|
||||
this.devThemeOverride = null;
|
||||
this.crudThemeId = null;
|
||||
this.storage.removeItem(STORAGE_KEYS.DEV_THEME_OVERRIDE);
|
||||
this.storage.removeItem(STORAGE_KEYS.CRUD_THEME_ID);
|
||||
this.storage.removeItem(STORAGE_KEYS.APPLIED_THEME_ID);
|
||||
|
||||
// Retry with clean default theme
|
||||
this.currentMode = ThemeMode.DEFAULT;
|
||||
const safeTheme = this.defaultTheme || {};
|
||||
this.applyTheme(safeTheme);
|
||||
}
|
||||
// Apply initial theme and persist mode
|
||||
this.applyTheme(initialTheme);
|
||||
this.persistMode();
|
||||
}
|
||||
|
||||
@@ -247,8 +229,14 @@ export class ThemeController {
|
||||
return this.dashboardThemes.get(themeId)!;
|
||||
}
|
||||
|
||||
// Use the enhanced fetchCrudTheme method which includes validation if feature flag is enabled
|
||||
const themeConfig = await this.fetchCrudTheme(themeId);
|
||||
// Fetch theme config from API using SupersetClient for proper auth
|
||||
const getTheme = makeApi<void, { result: { json_data: string } }>({
|
||||
method: 'GET',
|
||||
endpoint: `/api/v1/theme/${themeId}`,
|
||||
});
|
||||
|
||||
const { result } = await getTheme();
|
||||
const themeConfig = JSON.parse(result.json_data);
|
||||
|
||||
if (themeConfig) {
|
||||
// Controller creates and owns the dashboard theme
|
||||
@@ -341,6 +329,7 @@ export class ThemeController {
|
||||
|
||||
const theme: AnyThemeConfig | null = this.getThemeForMode(mode);
|
||||
if (!theme) {
|
||||
console.warn(`Theme for mode ${mode} not found, falling back to default`);
|
||||
this.fallbackToDefaultMode();
|
||||
return;
|
||||
}
|
||||
@@ -546,7 +535,7 @@ export class ThemeController {
|
||||
* Updates the theme.
|
||||
* @param theme - The new theme to apply
|
||||
*/
|
||||
private async updateTheme(theme?: AnyThemeConfig): Promise<void> {
|
||||
private updateTheme(theme?: AnyThemeConfig): void {
|
||||
try {
|
||||
// If no config provided, use current mode to get theme
|
||||
if (!theme) {
|
||||
@@ -562,41 +551,18 @@ export class ThemeController {
|
||||
this.persistMode();
|
||||
this.notifyListeners();
|
||||
} catch (error) {
|
||||
// Clear potentially corrupted overrides before fallback
|
||||
// This mirrors the constructor's recovery logic to prevent
|
||||
// repeated failures from a malformed devThemeOverride or crudThemeId
|
||||
this.devThemeOverride = null;
|
||||
this.crudThemeId = null;
|
||||
this.storage.removeItem(STORAGE_KEYS.DEV_THEME_OVERRIDE);
|
||||
this.storage.removeItem(STORAGE_KEYS.CRUD_THEME_ID);
|
||||
this.storage.removeItem(STORAGE_KEYS.APPLIED_THEME_ID);
|
||||
|
||||
await this.fallbackToDefaultMode();
|
||||
console.error('Failed to update theme:', error);
|
||||
this.fallbackToDefaultMode();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fallback to default mode with runtime error recovery.
|
||||
* Tries to fetch a fresh system default theme from the API.
|
||||
* Fallback to default mode with error recovery.
|
||||
*/
|
||||
private async fallbackToDefaultMode(): Promise<void> {
|
||||
private fallbackToDefaultMode(): void {
|
||||
this.currentMode = ThemeMode.DEFAULT;
|
||||
|
||||
// Try to fetch fresh system default theme from server
|
||||
const freshSystemTheme = await this.fetchSystemDefaultTheme();
|
||||
|
||||
if (freshSystemTheme) {
|
||||
try {
|
||||
await this.applyThemeWithRecovery(freshSystemTheme);
|
||||
this.persistMode();
|
||||
this.notifyListeners();
|
||||
return;
|
||||
} catch (error) {
|
||||
// Fresh theme also failed, continue to final fallback
|
||||
}
|
||||
}
|
||||
|
||||
// Final fallback: use cached default theme or built-in theme
|
||||
// Get the default theme which will have the correct algorithm
|
||||
const defaultTheme: AnyThemeConfig =
|
||||
this.getThemeForMode(ThemeMode.DEFAULT) || this.defaultTheme || {};
|
||||
|
||||
@@ -830,25 +796,10 @@ export class ThemeController {
|
||||
this.loadFonts(fontUrls);
|
||||
} catch (error) {
|
||||
console.error('Failed to apply theme:', error);
|
||||
// Re-throw the error so updateTheme can handle fallback logic
|
||||
throw error;
|
||||
this.fallbackToDefaultMode();
|
||||
}
|
||||
}
|
||||
|
||||
private async applyThemeWithRecovery(theme: AnyThemeConfig): Promise<void> {
|
||||
// Note: This method re-throws errors to the caller instead of calling
|
||||
// fallbackToDefaultMode directly, to avoid infinite recursion since
|
||||
// fallbackToDefaultMode calls this method. The caller's try/catch
|
||||
// handles the fallback flow.
|
||||
const normalizedConfig = normalizeThemeConfig(theme);
|
||||
this.globalTheme.setConfig(normalizedConfig);
|
||||
|
||||
// Load custom fonts if specified, mirroring applyTheme() behavior
|
||||
const fontUrls = (normalizedConfig?.token as Record<string, unknown>)
|
||||
?.fontUrls as string[] | undefined;
|
||||
this.loadFonts(fontUrls);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads custom fonts from theme configuration.
|
||||
* Injects CSS @import statements for font URLs that haven't been loaded yet.
|
||||
@@ -945,17 +896,14 @@ export class ThemeController {
|
||||
/**
|
||||
* Fetches a theme configuration from the CRUD API.
|
||||
* @param themeId - The ID of the theme to fetch
|
||||
* @returns The theme configuration or null if fetch fails
|
||||
* @returns The theme configuration or null if not found
|
||||
*/
|
||||
private async fetchCrudTheme(
|
||||
themeId: string,
|
||||
): Promise<AnyThemeConfig | null> {
|
||||
try {
|
||||
// Use SupersetClient for proper authentication handling
|
||||
const getTheme = makeApi<
|
||||
void,
|
||||
{ result: { json_data: string; theme_name?: string } }
|
||||
>({
|
||||
const getTheme = makeApi<void, { result: { json_data: string } }>({
|
||||
method: 'GET',
|
||||
endpoint: `/api/v1/theme/${themeId}`,
|
||||
});
|
||||
@@ -963,65 +911,10 @@ export class ThemeController {
|
||||
const { result } = await getTheme();
|
||||
const themeConfig = JSON.parse(result.json_data);
|
||||
|
||||
if (!themeConfig || typeof themeConfig !== 'object') {
|
||||
console.error(`Invalid theme configuration for theme ${themeId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Return theme as-is
|
||||
// Invalid tokens will be handled by Ant Design at runtime
|
||||
// Runtime errors will be caught by applyThemeWithRecovery()
|
||||
return themeConfig;
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch CRUD theme:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a fresh system default theme from the API for runtime recovery.
|
||||
* Tries multiple fallback strategies to find a valid theme.
|
||||
*
|
||||
* Note: Uses raw fetch() instead of SupersetClient because ThemeController
|
||||
* initializes early in the app lifecycle, before SupersetClient is fully
|
||||
* configured. This avoids boot-time circular dependencies.
|
||||
*
|
||||
* @returns The system default theme configuration or null if not found
|
||||
*/
|
||||
private async fetchSystemDefaultTheme(): Promise<AnyThemeConfig | null> {
|
||||
try {
|
||||
// Try to fetch theme marked as system default (is_system_default=true)
|
||||
const defaultResponse = await fetch(
|
||||
'/api/v1/theme/?q=(filters:!((col:is_system_default,opr:eq,value:!t)))',
|
||||
);
|
||||
if (defaultResponse.ok) {
|
||||
const data = await defaultResponse.json();
|
||||
if (data.result?.length > 0) {
|
||||
const themeConfig = JSON.parse(data.result[0].json_data);
|
||||
if (themeConfig && typeof themeConfig === 'object') {
|
||||
return themeConfig;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Try to fetch system theme named 'THEME_DEFAULT'
|
||||
const fallbackResponse = await fetch(
|
||||
'/api/v1/theme/?q=(filters:!((col:theme_name,opr:eq,value:THEME_DEFAULT),(col:is_system,opr:eq,value:!t)))',
|
||||
);
|
||||
if (fallbackResponse.ok) {
|
||||
const fallbackData = await fallbackResponse.json();
|
||||
if (fallbackData.result?.length > 0) {
|
||||
const themeConfig = JSON.parse(fallbackData.result[0].json_data);
|
||||
if (themeConfig && typeof themeConfig === 'object') {
|
||||
return themeConfig;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Log for debugging but don't fail - fallback to cached theme will be used
|
||||
console.warn('Failed to fetch system default theme:', error);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,133 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { renderHook } from '@testing-library/react-hooks';
|
||||
import { useThemeValidation } from './useThemeValidation';
|
||||
|
||||
test('useThemeValidation validates valid theme with standard tokens', () => {
|
||||
const validTheme = JSON.stringify({
|
||||
token: {
|
||||
colorPrimary: '#1890ff',
|
||||
fontSize: 14,
|
||||
},
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useThemeValidation(validTheme));
|
||||
|
||||
expect(result.current.hasErrors).toBe(false);
|
||||
expect(result.current.hasWarnings).toBe(false);
|
||||
expect(result.current.annotations).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('useThemeValidation shows warnings for unknown tokens', () => {
|
||||
const themeWithUnknownToken = JSON.stringify({
|
||||
token: {
|
||||
colorPrimary: '#1890ff',
|
||||
unknownToken: 'value',
|
||||
},
|
||||
});
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useThemeValidation(themeWithUnknownToken),
|
||||
);
|
||||
|
||||
expect(result.current.hasErrors).toBe(false);
|
||||
expect(result.current.hasWarnings).toBe(true);
|
||||
expect(result.current.annotations.length).toBeGreaterThan(0);
|
||||
expect(result.current.annotations[0].type).toBe('warning');
|
||||
});
|
||||
|
||||
test('useThemeValidation shows error for empty theme', () => {
|
||||
const emptyTheme = JSON.stringify({});
|
||||
|
||||
const { result } = renderHook(() => useThemeValidation(emptyTheme));
|
||||
|
||||
expect(result.current.hasErrors).toBe(true);
|
||||
expect(result.current.annotations.length).toBeGreaterThan(0);
|
||||
expect(result.current.annotations[0].type).toBe('error');
|
||||
expect(result.current.annotations[0].text).toContain('cannot be empty');
|
||||
});
|
||||
|
||||
test('useThemeValidation shows error for invalid JSON syntax', () => {
|
||||
const invalidJson = '{invalid json}';
|
||||
|
||||
const { result } = renderHook(() => useThemeValidation(invalidJson));
|
||||
|
||||
expect(result.current.hasErrors).toBe(true);
|
||||
expect(result.current.annotations.length).toBeGreaterThan(0);
|
||||
expect(result.current.annotations[0].type).toBe('error');
|
||||
});
|
||||
|
||||
test('useThemeValidation skips validation for empty string', () => {
|
||||
const { result } = renderHook(() => useThemeValidation(''));
|
||||
|
||||
expect(result.current.hasErrors).toBe(false);
|
||||
expect(result.current.hasWarnings).toBe(false);
|
||||
expect(result.current.annotations).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('useThemeValidation validates Superset custom tokens', () => {
|
||||
const themeWithCustomToken = JSON.stringify({
|
||||
token: {
|
||||
brandLogoUrl: '/static/logo.png',
|
||||
brandSpinnerSvg: '<svg></svg>',
|
||||
},
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useThemeValidation(themeWithCustomToken));
|
||||
|
||||
expect(result.current.hasErrors).toBe(false);
|
||||
expect(result.current.hasWarnings).toBe(false);
|
||||
expect(result.current.annotations).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('useThemeValidation allows theme with only algorithm', () => {
|
||||
const themeWithAlgorithm = JSON.stringify({
|
||||
algorithm: 'dark',
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useThemeValidation(themeWithAlgorithm));
|
||||
|
||||
expect(result.current.hasErrors).toBe(false);
|
||||
expect(result.current.annotations).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('useThemeValidation shows warning for null token value', () => {
|
||||
const themeWithNullValue = JSON.stringify({
|
||||
token: {
|
||||
colorPrimary: null,
|
||||
},
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useThemeValidation(themeWithNullValue));
|
||||
|
||||
expect(result.current.hasErrors).toBe(false);
|
||||
expect(result.current.hasWarnings).toBe(true);
|
||||
expect(result.current.annotations[0].type).toBe('warning');
|
||||
expect(result.current.annotations[0].text).toContain('null/undefined');
|
||||
});
|
||||
|
||||
test('useThemeValidation respects enabled option', () => {
|
||||
const invalidJson = '{invalid}';
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useThemeValidation(invalidJson, { enabled: false }),
|
||||
);
|
||||
|
||||
expect(result.current.annotations).toHaveLength(0);
|
||||
});
|
||||
@@ -1,155 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { useMemo, useState, useEffect } from 'react';
|
||||
import { useJsonValidation } from '@superset-ui/core/components/AsyncAceEditor';
|
||||
import type { JsonValidationAnnotation } from '@superset-ui/core/components/AsyncAceEditor';
|
||||
import type { AnyThemeConfig } from '@apache-superset/core/ui';
|
||||
import { validateTheme } from '../utils/themeStructureValidation';
|
||||
|
||||
/**
|
||||
* Find the line number where a specific token appears in JSON string.
|
||||
* Uses improved logic to handle nested objects and avoid false positives.
|
||||
*/
|
||||
function findTokenLineInJson(jsonString: string, tokenName: string): number {
|
||||
if (!jsonString || !tokenName) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Handle special _root token for structural errors
|
||||
if (tokenName === '_root') {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const lines = jsonString.split('\n');
|
||||
|
||||
// Look for the token name as a JSON property key
|
||||
// Pattern: "tokenName" followed by : (with possible whitespace)
|
||||
const propertyPattern = new RegExp(
|
||||
`"${tokenName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}"\\s*:`,
|
||||
);
|
||||
|
||||
for (let i = 0; i < lines.length; i += 1) {
|
||||
if (propertyPattern.test(lines[i].trim())) {
|
||||
return i; // Return 0-based line number for AceEditor
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: simple string search for edge cases
|
||||
const searchPattern = `"${tokenName}"`;
|
||||
for (let i = 0; i < lines.length; i += 1) {
|
||||
if (lines[i].includes(searchPattern)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
// If token not found, return line 0
|
||||
return 0;
|
||||
}
|
||||
|
||||
export interface ThemeValidationResult {
|
||||
annotations: JsonValidationAnnotation[];
|
||||
hasErrors: boolean; // true if errors exist (blocks save)
|
||||
hasWarnings: boolean; // true if warnings exist (non-blocking)
|
||||
}
|
||||
|
||||
export interface UseThemeValidationOptions {
|
||||
/** Whether to enable validation. Default: true */
|
||||
enabled?: boolean;
|
||||
/** Debounce delay in milliseconds for validation. Default: 300 */
|
||||
debounceMs?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Theme validation hook with live feedback.
|
||||
* - Errors (JSON syntax, empty theme) block save/apply
|
||||
* - Warnings (unknown tokens, null values) allow save/apply
|
||||
*
|
||||
* This hook validates structure and token names only.
|
||||
* Token values are validated by Ant Design at runtime.
|
||||
*/
|
||||
export function useThemeValidation(
|
||||
jsonValue?: string,
|
||||
options: UseThemeValidationOptions = {},
|
||||
): ThemeValidationResult {
|
||||
const { enabled = true, debounceMs = 300 } = options;
|
||||
|
||||
const [debouncedValue, setDebouncedValue] = useState(jsonValue);
|
||||
|
||||
// Debounce for performance
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => setDebouncedValue(jsonValue), debounceMs);
|
||||
return () => clearTimeout(timer);
|
||||
}, [jsonValue, debounceMs]);
|
||||
|
||||
// JSON syntax validation (ERRORS)
|
||||
const jsonAnnotations = useJsonValidation(debouncedValue, {
|
||||
enabled,
|
||||
errorPrefix: 'Invalid JSON',
|
||||
});
|
||||
|
||||
// Theme structure validation (ERRORS + WARNINGS)
|
||||
const themeAnnotations = useMemo(() => {
|
||||
// Skip if disabled or JSON is invalid
|
||||
if (!enabled || jsonAnnotations.length > 0 || !debouncedValue?.trim()) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const config: AnyThemeConfig = JSON.parse(debouncedValue);
|
||||
const result = validateTheme(config);
|
||||
|
||||
const annotations: JsonValidationAnnotation[] = [];
|
||||
|
||||
// Convert errors to annotations (blocks save)
|
||||
result.errors.forEach(issue => {
|
||||
annotations.push({
|
||||
type: 'error',
|
||||
row: findTokenLineInJson(debouncedValue, issue.tokenName),
|
||||
column: 0,
|
||||
text: issue.message,
|
||||
});
|
||||
});
|
||||
|
||||
// Convert warnings to annotations (non-blocking)
|
||||
result.warnings.forEach(issue => {
|
||||
annotations.push({
|
||||
type: 'warning',
|
||||
row: findTokenLineInJson(debouncedValue, issue.tokenName),
|
||||
column: 0,
|
||||
text: issue.message,
|
||||
});
|
||||
});
|
||||
|
||||
return annotations;
|
||||
} catch {
|
||||
// JSON parsing error already caught by jsonAnnotations
|
||||
return [];
|
||||
}
|
||||
}, [enabled, debouncedValue, jsonAnnotations]);
|
||||
|
||||
return useMemo(() => {
|
||||
const allAnnotations = [...jsonAnnotations, ...themeAnnotations];
|
||||
|
||||
return {
|
||||
annotations: allAnnotations,
|
||||
hasErrors: allAnnotations.some(a => a.type === 'error'),
|
||||
hasWarnings: allAnnotations.some(a => a.type === 'warning'),
|
||||
};
|
||||
}, [jsonAnnotations, themeAnnotations]);
|
||||
}
|
||||
@@ -833,202 +833,6 @@ test('ThemeController handles theme application errors', () => {
|
||||
fallbackSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('ThemeController constructor recovers from corrupted stored theme', () => {
|
||||
// Simulate corrupted dev theme override in storage
|
||||
const corruptedTheme = { token: { colorPrimary: '#ff0000' } };
|
||||
mockLocalStorage.getItem.mockImplementation((key: string) => {
|
||||
if (key === 'superset-dev-theme-override') {
|
||||
return JSON.stringify(corruptedTheme);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
// Mock Theme.fromConfig to return object with toSerializedConfig
|
||||
mockThemeFromConfig.mockReturnValue({
|
||||
...mockThemeObject,
|
||||
toSerializedConfig: () => corruptedTheme,
|
||||
});
|
||||
|
||||
// First call throws (corrupted theme), second call succeeds (fallback)
|
||||
let callCount = 0;
|
||||
mockSetConfig.mockImplementation(() => {
|
||||
callCount += 1;
|
||||
if (callCount === 1) {
|
||||
throw new Error('Invalid theme configuration');
|
||||
}
|
||||
});
|
||||
|
||||
// Should not throw - constructor should recover
|
||||
const controller = createController();
|
||||
|
||||
// Verify recovery happened - use shared consoleSpy to avoid interfering with other tests
|
||||
expect(consoleSpy).toHaveBeenCalledWith(
|
||||
'Failed to apply stored theme, clearing invalid overrides:',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Verify invalid overrides were cleared from storage
|
||||
expect(mockLocalStorage.removeItem).toHaveBeenCalledWith(
|
||||
'superset-dev-theme-override',
|
||||
);
|
||||
expect(mockLocalStorage.removeItem).toHaveBeenCalledWith(
|
||||
'superset-crud-theme-id',
|
||||
);
|
||||
expect(mockLocalStorage.removeItem).toHaveBeenCalledWith(
|
||||
'superset-applied-theme-id',
|
||||
);
|
||||
|
||||
// Verify controller is in a valid state
|
||||
expect(controller.getCurrentMode()).toBe(ThemeMode.DEFAULT);
|
||||
});
|
||||
|
||||
test('recovery flow: fetchSystemDefaultTheme returns theme → applies fetched theme', async () => {
|
||||
// Test: fallbackToDefaultMode fetches theme from API and applies it
|
||||
// Flow: fallbackToDefaultMode → fetchSystemDefaultTheme → applyThemeWithRecovery
|
||||
|
||||
const originalFetch = global.fetch;
|
||||
const controller = createController();
|
||||
|
||||
try {
|
||||
// Mock fetch to return a system default theme from API
|
||||
const systemTheme = { token: { colorPrimary: '#recovery-theme' } };
|
||||
const mockFetch = jest.fn().mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
result: [{ json_data: JSON.stringify(systemTheme) }],
|
||||
}),
|
||||
});
|
||||
global.fetch = mockFetch;
|
||||
|
||||
// Track setConfig calls to verify the fetched theme is applied
|
||||
const setConfigCalls: unknown[] = [];
|
||||
mockSetConfig.mockImplementation((config: unknown) => {
|
||||
setConfigCalls.push(config);
|
||||
});
|
||||
|
||||
// Trigger fallbackToDefaultMode (simulates what happens after applyTheme fails)
|
||||
await (controller as any).fallbackToDefaultMode();
|
||||
|
||||
// Verify API was called to fetch system default theme
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/api/v1/theme/'),
|
||||
);
|
||||
|
||||
// Verify the fetched theme was applied via applyThemeWithRecovery
|
||||
expect(setConfigCalls.length).toBe(1);
|
||||
expect(setConfigCalls[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
token: expect.objectContaining({ colorPrimary: '#recovery-theme' }),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify controller is in default mode
|
||||
expect(controller.getCurrentMode()).toBe(ThemeMode.DEFAULT);
|
||||
} finally {
|
||||
global.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
|
||||
test('recovery flow: both API fetches fail → falls back to cached default theme', async () => {
|
||||
// Test: When fetchSystemDefaultTheme fails, fallbackToDefaultMode uses cached theme
|
||||
// Flow: fallbackToDefaultMode → fetchSystemDefaultTheme (fails) → applyTheme(cached)
|
||||
|
||||
const originalFetch = global.fetch;
|
||||
const controller = createController();
|
||||
|
||||
try {
|
||||
// Mock fetch to fail for both API endpoints
|
||||
const mockFetch = jest.fn().mockRejectedValue(new Error('Network error'));
|
||||
global.fetch = mockFetch;
|
||||
|
||||
// Track setConfig calls
|
||||
const setConfigCalls: unknown[] = [];
|
||||
mockSetConfig.mockImplementation((config: unknown) => {
|
||||
setConfigCalls.push(config);
|
||||
});
|
||||
|
||||
// Trigger fallbackToDefaultMode
|
||||
await (controller as any).fallbackToDefaultMode();
|
||||
|
||||
// Verify fetch was attempted
|
||||
expect(mockFetch).toHaveBeenCalled();
|
||||
|
||||
// Verify fallback to cached default theme was applied via applyTheme
|
||||
expect(setConfigCalls.length).toBe(1);
|
||||
expect(setConfigCalls[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
token: expect.objectContaining({
|
||||
colorBgBase: '#ededed', // From DEFAULT_THEME in test setup
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify controller is in default mode
|
||||
expect(controller.getCurrentMode()).toBe(ThemeMode.DEFAULT);
|
||||
} finally {
|
||||
global.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
|
||||
test('recovery flow: fetched theme fails to apply → falls back to cached default', async () => {
|
||||
// Test: When applyThemeWithRecovery fails, fallbackToDefaultMode uses cached theme
|
||||
// Flow: fallbackToDefaultMode → fetchSystemDefaultTheme → applyThemeWithRecovery (fails) → applyTheme(cached)
|
||||
|
||||
const originalFetch = global.fetch;
|
||||
const controller = createController();
|
||||
|
||||
try {
|
||||
// Mock fetch to return a theme
|
||||
const systemTheme = { token: { colorPrimary: '#bad-theme' } };
|
||||
const mockFetch = jest.fn().mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: async () => ({
|
||||
result: [{ json_data: JSON.stringify(systemTheme) }],
|
||||
}),
|
||||
});
|
||||
global.fetch = mockFetch;
|
||||
|
||||
// First setConfig call (applyThemeWithRecovery) fails, second (applyTheme) succeeds
|
||||
const setConfigCalls: unknown[] = [];
|
||||
mockSetConfig.mockImplementation((config: unknown) => {
|
||||
setConfigCalls.push(config);
|
||||
if (setConfigCalls.length === 1) {
|
||||
throw new Error('Fetched theme failed to apply');
|
||||
}
|
||||
});
|
||||
|
||||
// Trigger fallbackToDefaultMode
|
||||
await (controller as any).fallbackToDefaultMode();
|
||||
|
||||
// Verify fetch was called
|
||||
expect(mockFetch).toHaveBeenCalled();
|
||||
|
||||
// Verify both attempts were made: fetched theme (failed) then cached default
|
||||
expect(setConfigCalls.length).toBe(2);
|
||||
|
||||
// First call was the fetched theme (which failed)
|
||||
expect(setConfigCalls[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
token: expect.objectContaining({ colorPrimary: '#bad-theme' }),
|
||||
}),
|
||||
);
|
||||
|
||||
// Second call was the cached default theme
|
||||
expect(setConfigCalls[1]).toEqual(
|
||||
expect.objectContaining({
|
||||
token: expect.objectContaining({
|
||||
colorBgBase: '#ededed', // From DEFAULT_THEME
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify controller is in default mode
|
||||
expect(controller.getCurrentMode()).toBe(ThemeMode.DEFAULT);
|
||||
} finally {
|
||||
global.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
|
||||
// Cleanup tests
|
||||
test('ThemeController cleans up listeners on destroy', () => {
|
||||
const mockMediaQueryInstance = {
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import {
|
||||
isValidTokenName,
|
||||
isSupersetCustomToken,
|
||||
getAllValidTokenNames,
|
||||
} from './antdTokenNames';
|
||||
|
||||
test('isValidTokenName recognizes standard Ant Design tokens', () => {
|
||||
expect(isValidTokenName('colorPrimary')).toBe(true);
|
||||
expect(isValidTokenName('fontSize')).toBe(true);
|
||||
expect(isValidTokenName('padding')).toBe(true);
|
||||
expect(isValidTokenName('borderRadius')).toBe(true);
|
||||
});
|
||||
|
||||
test('isValidTokenName recognizes Superset custom tokens', () => {
|
||||
expect(isValidTokenName('brandLogoUrl')).toBe(true);
|
||||
expect(isValidTokenName('brandSpinnerSvg')).toBe(true);
|
||||
expect(isValidTokenName('fontSizeXS')).toBe(true);
|
||||
expect(isValidTokenName('echartsOptionsOverrides')).toBe(true);
|
||||
});
|
||||
|
||||
test('isValidTokenName rejects unknown tokens', () => {
|
||||
expect(isValidTokenName('fooBarBaz')).toBe(false);
|
||||
expect(isValidTokenName('colrPrimary')).toBe(false);
|
||||
expect(isValidTokenName('invalidToken')).toBe(false);
|
||||
});
|
||||
|
||||
test('isValidTokenName handles edge cases', () => {
|
||||
expect(isValidTokenName('')).toBe(false);
|
||||
expect(isValidTokenName(' ')).toBe(false);
|
||||
});
|
||||
|
||||
test('isSupersetCustomToken identifies Superset-specific tokens', () => {
|
||||
expect(isSupersetCustomToken('brandLogoUrl')).toBe(true);
|
||||
expect(isSupersetCustomToken('brandSpinnerSvg')).toBe(true);
|
||||
expect(isSupersetCustomToken('fontSizeXS')).toBe(true);
|
||||
expect(isSupersetCustomToken('fontUrls')).toBe(true);
|
||||
});
|
||||
|
||||
test('isSupersetCustomToken returns false for Ant Design tokens', () => {
|
||||
expect(isSupersetCustomToken('colorPrimary')).toBe(false);
|
||||
expect(isSupersetCustomToken('fontSize')).toBe(false);
|
||||
});
|
||||
|
||||
test('isSupersetCustomToken returns false for unknown tokens', () => {
|
||||
expect(isSupersetCustomToken('fooBar')).toBe(false);
|
||||
});
|
||||
|
||||
test('getAllValidTokenNames returns categorized token names', () => {
|
||||
const result = getAllValidTokenNames();
|
||||
|
||||
expect(result).toHaveProperty('antdTokens');
|
||||
expect(result).toHaveProperty('supersetTokens');
|
||||
expect(result).toHaveProperty('total');
|
||||
});
|
||||
|
||||
test('getAllValidTokenNames has reasonable token counts', () => {
|
||||
const result = getAllValidTokenNames();
|
||||
|
||||
// Ant Design tokens should exist (avoid brittle exact count that breaks on upgrades)
|
||||
expect(result.antdTokens.length).toBeGreaterThan(0);
|
||||
expect(result.antdTokens).toContain('colorPrimary');
|
||||
expect(result.antdTokens).toContain('fontSize');
|
||||
expect(result.antdTokens).toContain('borderRadius');
|
||||
|
||||
// Superset custom tokens should exist
|
||||
expect(result.supersetTokens.length).toBeGreaterThan(0);
|
||||
expect(result.supersetTokens).toContain('brandLogoUrl');
|
||||
expect(result.supersetTokens).toContain('fontUrls');
|
||||
|
||||
// Total should be sum of both
|
||||
expect(result.total).toBe(
|
||||
result.antdTokens.length + result.supersetTokens.length,
|
||||
);
|
||||
});
|
||||
|
||||
test('getAllValidTokenNames includes known Superset tokens', () => {
|
||||
const result = getAllValidTokenNames();
|
||||
|
||||
expect(result.supersetTokens).toContain('brandLogoUrl');
|
||||
expect(result.supersetTokens).toContain('brandSpinnerSvg');
|
||||
expect(result.supersetTokens).toContain('fontSizeXS');
|
||||
});
|
||||
|
||||
test('getAllValidTokenNames includes known Ant Design tokens', () => {
|
||||
const result = getAllValidTokenNames();
|
||||
|
||||
expect(result.antdTokens).toContain('colorPrimary');
|
||||
expect(result.antdTokens).toContain('fontSize');
|
||||
expect(result.antdTokens).toContain('padding');
|
||||
});
|
||||
@@ -1,115 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import { theme } from 'antd';
|
||||
|
||||
/**
|
||||
* Superset-specific custom tokens that extend Ant Design's token system.
|
||||
* These keys are derived from the SupersetSpecificTokens interface to ensure consistency.
|
||||
*/
|
||||
const SUPERSET_CUSTOM_TOKENS: Set<string> = new Set([
|
||||
// Font extensions (fontWeightStrong is an Ant Design token, not Superset-specific)
|
||||
'fontSizeXS',
|
||||
'fontSizeXXL',
|
||||
'fontWeightNormal',
|
||||
'fontWeightLight',
|
||||
|
||||
// Brand tokens
|
||||
'brandIconMaxWidth',
|
||||
'brandLogoAlt',
|
||||
'brandLogoUrl',
|
||||
'brandLogoMargin',
|
||||
'brandLogoHref',
|
||||
'brandLogoHeight',
|
||||
|
||||
// Spinner tokens
|
||||
'brandSpinnerUrl',
|
||||
'brandSpinnerSvg',
|
||||
|
||||
// ECharts tokens
|
||||
'echartsOptionsOverrides',
|
||||
'echartsOptionsOverridesByChartType',
|
||||
|
||||
// Font loading
|
||||
'fontUrls',
|
||||
]);
|
||||
|
||||
/**
|
||||
* Lazy-loaded cache of valid token names.
|
||||
* Combines Ant Design tokens (extracted at runtime) + Superset custom tokens.
|
||||
*/
|
||||
let validTokenNamesCache: Set<string> | undefined;
|
||||
|
||||
/**
|
||||
* Get all valid token names (Ant Design + Superset custom).
|
||||
* Uses lazy loading and caching for performance.
|
||||
*/
|
||||
function getValidTokenNames(): Set<string> {
|
||||
if (validTokenNamesCache === undefined) {
|
||||
// Extract all token names from Ant Design's default theme
|
||||
const antdTokens = theme.getDesignToken();
|
||||
const antdTokenNames = Object.keys(antdTokens);
|
||||
|
||||
// Combine with Superset custom tokens
|
||||
validTokenNamesCache = new Set([
|
||||
...antdTokenNames,
|
||||
...SUPERSET_CUSTOM_TOKENS,
|
||||
]);
|
||||
}
|
||||
return validTokenNamesCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a token name is valid (recognized by Ant Design OR Superset).
|
||||
* @param tokenName - The token name to validate
|
||||
* @returns true if the token is recognized, false otherwise
|
||||
*/
|
||||
export function isValidTokenName(tokenName: string): boolean {
|
||||
return getValidTokenNames().has(tokenName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a token is a Superset custom token (not from Ant Design).
|
||||
* @param tokenName - The token name to check
|
||||
* @returns true if it's a Superset-specific token
|
||||
*/
|
||||
export function isSupersetCustomToken(tokenName: string): boolean {
|
||||
return SUPERSET_CUSTOM_TOKENS.has(tokenName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all valid token names, categorized by source.
|
||||
* Useful for debugging and testing.
|
||||
*/
|
||||
export function getAllValidTokenNames(): {
|
||||
antdTokens: string[];
|
||||
supersetTokens: string[];
|
||||
total: number;
|
||||
} {
|
||||
const allTokens = getValidTokenNames();
|
||||
const antdTokens = Array.from(allTokens).filter(
|
||||
t => !isSupersetCustomToken(t),
|
||||
);
|
||||
const supersetTokens: string[] = Array.from(SUPERSET_CUSTOM_TOKENS);
|
||||
|
||||
return {
|
||||
antdTokens,
|
||||
supersetTokens,
|
||||
total: allTokens.size,
|
||||
};
|
||||
}
|
||||
@@ -1,346 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import type { AnyThemeConfig } from '@apache-superset/core/ui';
|
||||
import { validateTheme } from './themeStructureValidation';
|
||||
|
||||
test('validateTheme validates a valid theme with standard tokens', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
token: {
|
||||
colorPrimary: '#1890ff',
|
||||
fontSize: 14,
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.warnings).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('validateTheme validates a theme with Superset custom tokens', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
token: {
|
||||
colorPrimary: '#1890ff',
|
||||
brandLogoUrl: '/static/logo.png',
|
||||
brandSpinnerSvg: '<svg></svg>',
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.warnings).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('validateTheme warns about unknown token names', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
token: {
|
||||
colorPrimary: '#1890ff',
|
||||
fooBarBaz: 'invalid',
|
||||
colrPrimary: '#ff0000', // Typo
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true); // Warnings don't block
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.warnings).toHaveLength(2);
|
||||
expect(result.warnings[0].tokenName).toBe('fooBarBaz');
|
||||
expect(result.warnings[1].tokenName).toBe('colrPrimary');
|
||||
expect(result.warnings[0].severity).toBe('warning');
|
||||
});
|
||||
|
||||
test('validateTheme warns about null/undefined token values', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
token: {
|
||||
colorPrimary: null,
|
||||
fontSize: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.warnings).toHaveLength(2);
|
||||
expect(result.warnings[0].message).toContain('null/undefined');
|
||||
expect(result.warnings[1].message).toContain('null/undefined');
|
||||
});
|
||||
|
||||
test('validateTheme errors on empty theme object', () => {
|
||||
const theme: AnyThemeConfig = {};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('cannot be empty');
|
||||
});
|
||||
|
||||
test('validateTheme errors on null theme config', () => {
|
||||
const result = validateTheme(null as any);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('must be a valid object');
|
||||
});
|
||||
|
||||
test('validateTheme allows theme with only algorithm', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
algorithm: 'dark' as any,
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('validateTheme allows theme with only components', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
components: {
|
||||
Button: {
|
||||
colorPrimary: '#1890ff',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('validateTheme errors on theme with empty token object but no algorithm or components', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
token: {},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('cannot be empty');
|
||||
});
|
||||
|
||||
test('validateTheme combines errors and warnings correctly', () => {
|
||||
const theme: AnyThemeConfig = {
|
||||
token: {
|
||||
colorPrimary: '#1890ff',
|
||||
unknownToken: 'value',
|
||||
nullToken: null,
|
||||
},
|
||||
};
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true); // No errors, just warnings
|
||||
expect(result.errors).toHaveLength(0);
|
||||
expect(result.warnings.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('validateTheme errors when token is an array instead of object', () => {
|
||||
const theme = {
|
||||
token: ['colorPrimary', '#1890ff'],
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('must be an object');
|
||||
});
|
||||
|
||||
test('validateTheme errors when token is a string instead of object', () => {
|
||||
const theme = {
|
||||
token: 'colorPrimary',
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('must be an object');
|
||||
});
|
||||
|
||||
test('validateTheme errors when components is an array instead of object', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
components: ['Button', 'Input'],
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('Components configuration');
|
||||
expect(result.errors[0].message).toContain('must be an object');
|
||||
});
|
||||
|
||||
test('validateTheme errors when components is a primitive', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
components: 'Button',
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('Components configuration');
|
||||
});
|
||||
|
||||
test('validateTheme errors when algorithm is a number', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
algorithm: 123,
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('Algorithm must be a string');
|
||||
});
|
||||
|
||||
test('validateTheme errors when algorithm is an object', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
algorithm: { type: 'dark' },
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('Algorithm must be a string');
|
||||
});
|
||||
|
||||
test('validateTheme allows algorithm as array of strings', () => {
|
||||
const theme = {
|
||||
algorithm: ['dark', 'compact'],
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('validateTheme errors when algorithm array contains non-strings', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
algorithm: ['dark', 123, 'compact'],
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('Algorithm must be a string');
|
||||
});
|
||||
|
||||
test('validateTheme errors when token is explicitly null', () => {
|
||||
const theme = {
|
||||
token: null,
|
||||
algorithm: 'dark',
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('must be an object');
|
||||
expect(result.errors[0].message).toContain('not null');
|
||||
});
|
||||
|
||||
test('validateTheme errors when components is explicitly null', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
components: null,
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('Components configuration');
|
||||
expect(result.errors[0].message).toContain('not null');
|
||||
});
|
||||
|
||||
test('validateTheme errors when algorithm is explicitly null', () => {
|
||||
const theme = {
|
||||
token: { colorPrimary: '#1890ff' },
|
||||
algorithm: null,
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('Algorithm cannot be null');
|
||||
});
|
||||
|
||||
test('validateTheme errors when algorithm string is not a valid value', () => {
|
||||
const theme = {
|
||||
algorithm: 'invalid-algorithm',
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].tokenName).toBe('_root');
|
||||
expect(result.errors[0].message).toContain('Invalid algorithm value');
|
||||
expect(result.errors[0].message).toContain('invalid-algorithm');
|
||||
expect(result.errors[0].message).toContain('default, dark, system, compact');
|
||||
});
|
||||
|
||||
test('validateTheme errors when algorithm array contains invalid values', () => {
|
||||
const theme = {
|
||||
algorithm: ['dark', 'invalid-mode', 'compact'],
|
||||
} as unknown as AnyThemeConfig;
|
||||
|
||||
const result = validateTheme(theme);
|
||||
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toHaveLength(1);
|
||||
expect(result.errors[0].message).toContain('Invalid algorithm value');
|
||||
expect(result.errors[0].message).toContain('invalid-mode');
|
||||
});
|
||||
|
||||
test('validateTheme allows all valid algorithm values', () => {
|
||||
const validAlgorithms = ['default', 'dark', 'system', 'compact'];
|
||||
|
||||
validAlgorithms.forEach(algo => {
|
||||
const theme = { algorithm: algo } as unknown as AnyThemeConfig;
|
||||
const result = validateTheme(theme);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -1,191 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import type { AnyThemeConfig } from '@apache-superset/core/ui';
|
||||
import { isValidTokenName } from './antdTokenNames';
|
||||
|
||||
/**
|
||||
* Valid algorithm values that match backend ThemeMode enum.
|
||||
* These correspond to Ant Design's built-in theme algorithms.
|
||||
*/
|
||||
const VALID_ALGORITHM_VALUES = new Set([
|
||||
'default',
|
||||
'dark',
|
||||
'system',
|
||||
'compact',
|
||||
]);
|
||||
|
||||
export interface ValidationIssue {
|
||||
tokenName: string;
|
||||
severity: 'error' | 'warning';
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface ValidationResult {
|
||||
valid: boolean; // false if ANY errors exist (warnings don't affect this)
|
||||
errors: ValidationIssue[];
|
||||
warnings: ValidationIssue[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates theme structure and token names.
|
||||
* - ERRORS block save/apply (invalid structure, empty themes)
|
||||
* - WARNINGS allow save/apply but show in editor (unknown tokens, null values)
|
||||
*
|
||||
* This validation does NOT check token values - Ant Design handles that at runtime.
|
||||
*/
|
||||
export function validateTheme(themeConfig: AnyThemeConfig): ValidationResult {
|
||||
const errors: ValidationIssue[] = [];
|
||||
const warnings: ValidationIssue[] = [];
|
||||
|
||||
// ERROR: Null/invalid config
|
||||
if (!themeConfig || typeof themeConfig !== 'object') {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message: 'Theme configuration must be a valid object',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
|
||||
// ERROR: Empty theme (no tokens, no algorithm, no components)
|
||||
const hasTokens =
|
||||
themeConfig.token && Object.keys(themeConfig.token).length > 0;
|
||||
const hasAlgorithm = Boolean(themeConfig.algorithm);
|
||||
const hasComponents =
|
||||
themeConfig.components && Object.keys(themeConfig.components).length > 0;
|
||||
|
||||
if (!hasTokens && !hasAlgorithm && !hasComponents) {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message:
|
||||
'Theme cannot be empty. Add at least one token, algorithm, or component override.',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
|
||||
// ERROR: token must be an object if present (null is also rejected by backend)
|
||||
const rawToken = themeConfig.token;
|
||||
if (rawToken !== undefined) {
|
||||
if (
|
||||
rawToken === null ||
|
||||
typeof rawToken !== 'object' ||
|
||||
Array.isArray(rawToken)
|
||||
) {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message:
|
||||
'Token configuration must be an object, not null, array, or primitive',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
}
|
||||
const tokens = rawToken ?? {};
|
||||
|
||||
// ERROR: components must be an object if present (null is also rejected by backend)
|
||||
const rawComponents = themeConfig.components;
|
||||
if (rawComponents !== undefined) {
|
||||
if (
|
||||
rawComponents === null ||
|
||||
typeof rawComponents !== 'object' ||
|
||||
Array.isArray(rawComponents)
|
||||
) {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message:
|
||||
'Components configuration must be an object, not null, array, or primitive',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
}
|
||||
|
||||
// ERROR: algorithm must be a valid string or array of valid strings if present
|
||||
// Valid values: "default", "dark", "system", "compact" (matches backend ThemeMode)
|
||||
const rawAlgorithm = themeConfig.algorithm;
|
||||
if (rawAlgorithm !== undefined) {
|
||||
// Null is rejected by backend
|
||||
if (rawAlgorithm === null) {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message: 'Algorithm cannot be null',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
|
||||
// Must be string or array of strings
|
||||
const isString = typeof rawAlgorithm === 'string';
|
||||
const isStringArray =
|
||||
Array.isArray(rawAlgorithm) &&
|
||||
rawAlgorithm.every(a => typeof a === 'string');
|
||||
|
||||
if (!isString && !isStringArray) {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message:
|
||||
'Algorithm must be a string or array of strings (e.g., "dark" or ["dark", "compact"])',
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
|
||||
// Validate algorithm values against allowed set
|
||||
const algorithms = isString ? [rawAlgorithm] : (rawAlgorithm as string[]);
|
||||
const invalidAlgorithms = algorithms.filter(
|
||||
a => !VALID_ALGORITHM_VALUES.has(a),
|
||||
);
|
||||
if (invalidAlgorithms.length > 0) {
|
||||
errors.push({
|
||||
tokenName: '_root',
|
||||
severity: 'error',
|
||||
message: `Invalid algorithm value(s): "${invalidAlgorithms.join('", "')}". Valid values are: default, dark, system, compact`,
|
||||
});
|
||||
return { valid: false, errors, warnings };
|
||||
}
|
||||
}
|
||||
|
||||
Object.entries(tokens).forEach(([name, value]) => {
|
||||
// Null/undefined check
|
||||
if (value === null || value === undefined) {
|
||||
warnings.push({
|
||||
tokenName: name,
|
||||
severity: 'warning',
|
||||
message: `Token '${name}' has null/undefined value`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Token name validation
|
||||
if (!isValidTokenName(name)) {
|
||||
warnings.push({
|
||||
tokenName: name,
|
||||
severity: 'warning',
|
||||
message: `Unknown token '${name}' - may be ignored by Ant Design`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from superset.db_engine_specs.base import DatabaseCategory
|
||||
from superset.db_engine_specs.mysql import MySQLEngineSpec
|
||||
|
||||
|
||||
class DoltDBEngineSpec(MySQLEngineSpec):
|
||||
"""
|
||||
Engine spec for DoltDB.
|
||||
|
||||
DoltDB is a SQL database with Git-like version control for data and schema.
|
||||
It is fully MySQL-compatible.
|
||||
"""
|
||||
|
||||
engine = "doltdb"
|
||||
engine_name = "DoltDB"
|
||||
|
||||
metadata = {
|
||||
"description": (
|
||||
"DoltDB is a SQL database with Git-like version control for data "
|
||||
"and schema. It is fully MySQL-compatible."
|
||||
),
|
||||
"logo": "doltdb.png",
|
||||
"homepage_url": "https://www.dolthub.com/",
|
||||
"categories": [
|
||||
DatabaseCategory.TRADITIONAL_RDBMS,
|
||||
DatabaseCategory.OPEN_SOURCE,
|
||||
],
|
||||
"pypi_packages": ["mysqlclient"],
|
||||
"connection_string": "mysql://{username}:{password}@{host}:{port}/{database}",
|
||||
"default_port": 3306,
|
||||
"notes": (
|
||||
"DoltDB uses the MySQL wire protocol. Connect using any MySQL driver."
|
||||
),
|
||||
}
|
||||
@@ -57,7 +57,7 @@ class MssqlEngineSpec(BaseEngineSpec):
|
||||
"description": (
|
||||
"Microsoft SQL Server is a relational database management system."
|
||||
),
|
||||
"logo": "mssql-server.png",
|
||||
"logo": "msql.png",
|
||||
"homepage_url": "https://www.microsoft.com/en-us/sql-server",
|
||||
"categories": [
|
||||
DatabaseCategory.TRADITIONAL_RDBMS,
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from superset.db_engine_specs.base import DatabaseCategory
|
||||
from superset.db_engine_specs.postgres import PostgresBaseEngineSpec
|
||||
|
||||
|
||||
class PostGISEngineSpec(PostgresBaseEngineSpec):
|
||||
"""
|
||||
Engine spec for PostGIS.
|
||||
|
||||
PostGIS is a spatial database extender for PostgreSQL, adding support for
|
||||
geographic objects and location queries.
|
||||
"""
|
||||
|
||||
engine = "postgis"
|
||||
engine_name = "PostGIS"
|
||||
default_driver = "psycopg2"
|
||||
|
||||
metadata = {
|
||||
"description": (
|
||||
"PostGIS is a spatial database extender for PostgreSQL, adding "
|
||||
"support for geographic objects and location queries."
|
||||
),
|
||||
"logo": "postgis.svg",
|
||||
"homepage_url": "https://postgis.net/",
|
||||
"categories": [
|
||||
DatabaseCategory.TRADITIONAL_RDBMS,
|
||||
DatabaseCategory.OPEN_SOURCE,
|
||||
],
|
||||
"pypi_packages": ["psycopg2"],
|
||||
"connection_string": (
|
||||
"postgresql://{username}:{password}@{host}:{port}/{database}"
|
||||
),
|
||||
"default_port": 5432,
|
||||
"notes": (
|
||||
"PostGIS extends PostgreSQL with geospatial capabilities. "
|
||||
"Uses the standard PostgreSQL driver."
|
||||
),
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from superset.constants import TimeGrain
|
||||
from superset.db_engine_specs.base import BaseEngineSpec, DatabaseCategory
|
||||
|
||||
|
||||
class QuestDBEngineSpec(BaseEngineSpec):
|
||||
"""
|
||||
Engine spec for QuestDB.
|
||||
|
||||
QuestDB is a high-performance, open-source time-series database optimized
|
||||
for fast ingest and SQL queries.
|
||||
"""
|
||||
|
||||
engine = "questdb"
|
||||
engine_name = "QuestDB"
|
||||
default_driver = "questdb"
|
||||
|
||||
metadata = {
|
||||
"description": (
|
||||
"QuestDB is a high-performance, open-source time-series database "
|
||||
"optimized for fast ingest and SQL queries."
|
||||
),
|
||||
"logo": "questdb.png",
|
||||
"homepage_url": "https://questdb.io/",
|
||||
"categories": [
|
||||
DatabaseCategory.ANALYTICAL_DATABASES,
|
||||
DatabaseCategory.OPEN_SOURCE,
|
||||
],
|
||||
"pypi_packages": ["questdb-connect"],
|
||||
"connection_string": "questdb://{username}:{password}@{host}:{port}/{database}",
|
||||
"default_port": 8812,
|
||||
"notes": (
|
||||
"QuestDB is optimized for time-series data. Install questdb-connect "
|
||||
"for SQLAlchemy support."
|
||||
),
|
||||
}
|
||||
|
||||
_time_grain_expressions = {
|
||||
None: "{col}",
|
||||
TimeGrain.SECOND: "timestamp_floor('s', {col})",
|
||||
TimeGrain.MINUTE: "timestamp_floor('m', {col})",
|
||||
TimeGrain.HOUR: "timestamp_floor('h', {col})",
|
||||
TimeGrain.DAY: "timestamp_floor('d', {col})",
|
||||
TimeGrain.WEEK: "timestamp_floor('w', {col})",
|
||||
TimeGrain.MONTH: "timestamp_floor('M', {col})",
|
||||
TimeGrain.YEAR: "timestamp_floor('y', {col})",
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from superset.db_engine_specs.base import DatabaseCategory
|
||||
from superset.db_engine_specs.mysql import MySQLEngineSpec
|
||||
|
||||
|
||||
class TiDBEngineSpec(MySQLEngineSpec):
|
||||
"""
|
||||
Engine spec for TiDB.
|
||||
|
||||
TiDB is an open-source, cloud-native, distributed SQL database designed for
|
||||
hybrid transactional and analytical processing (HTAP) workloads. It is
|
||||
MySQL-compatible.
|
||||
"""
|
||||
|
||||
engine = "tidb"
|
||||
engine_name = "TiDB"
|
||||
|
||||
metadata = {
|
||||
"description": (
|
||||
"TiDB is an open-source, cloud-native, distributed SQL database "
|
||||
"designed for hybrid transactional and analytical processing (HTAP) "
|
||||
"workloads. It is MySQL-compatible."
|
||||
),
|
||||
"logo": "tidb.svg",
|
||||
"homepage_url": "https://www.pingcap.com/tidb/",
|
||||
"categories": [
|
||||
DatabaseCategory.TRADITIONAL_RDBMS,
|
||||
DatabaseCategory.OPEN_SOURCE,
|
||||
],
|
||||
"pypi_packages": ["mysqlclient", "sqlalchemy-tidb"],
|
||||
"connection_string": "mysql://{username}:{password}@{host}:{port}/{database}",
|
||||
"default_port": 4000,
|
||||
"drivers": [
|
||||
{
|
||||
"name": "mysqlclient",
|
||||
"pypi_package": "mysqlclient",
|
||||
"connection_string": (
|
||||
"mysql://{username}:{password}@{host}:{port}/{database}"
|
||||
),
|
||||
"is_recommended": True,
|
||||
"notes": (
|
||||
"Standard MySQL driver, works with TiDB's MySQL compatibility."
|
||||
),
|
||||
},
|
||||
{
|
||||
"name": "tidb",
|
||||
"pypi_package": "sqlalchemy-tidb",
|
||||
"connection_string": (
|
||||
"tidb://{username}:{password}@{host}:{port}/{database}"
|
||||
),
|
||||
"is_recommended": False,
|
||||
"notes": "Native TiDB dialect with TiDB-specific optimizations.",
|
||||
},
|
||||
],
|
||||
"notes": (
|
||||
"TiDB is MySQL-compatible. Use the standard MySQL driver or the "
|
||||
"native sqlalchemy-tidb dialect."
|
||||
),
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from superset.constants import TimeGrain
|
||||
from superset.db_engine_specs.base import BaseEngineSpec, DatabaseCategory
|
||||
|
||||
|
||||
class TimeplusEngineSpec(BaseEngineSpec):
|
||||
"""
|
||||
Engine spec for Timeplus.
|
||||
|
||||
Timeplus is a streaming-first analytics platform that provides real-time
|
||||
data processing with SQL.
|
||||
"""
|
||||
|
||||
engine = "timeplus"
|
||||
engine_name = "Timeplus"
|
||||
default_driver = "timeplus"
|
||||
|
||||
metadata = {
|
||||
"description": (
|
||||
"Timeplus is a streaming-first analytics platform that provides "
|
||||
"real-time data processing with SQL."
|
||||
),
|
||||
"logo": "timeplus.svg",
|
||||
"homepage_url": "https://www.timeplus.com/",
|
||||
"categories": [
|
||||
DatabaseCategory.ANALYTICAL_DATABASES,
|
||||
DatabaseCategory.OPEN_SOURCE,
|
||||
],
|
||||
"pypi_packages": ["timeplus-connect"],
|
||||
"connection_string": "timeplus://{username}:{password}@{host}:{port}",
|
||||
"default_port": 8123,
|
||||
"notes": (
|
||||
"Timeplus provides real-time streaming SQL analytics. Install "
|
||||
"timeplus-connect for SQLAlchemy and Superset support."
|
||||
),
|
||||
}
|
||||
|
||||
_time_grain_expressions = {
|
||||
None: "{col}",
|
||||
TimeGrain.SECOND: "date_trunc('second', {col})",
|
||||
TimeGrain.MINUTE: "date_trunc('minute', {col})",
|
||||
TimeGrain.HOUR: "date_trunc('hour', {col})",
|
||||
TimeGrain.DAY: "date_trunc('day', {col})",
|
||||
TimeGrain.WEEK: "date_trunc('week', {col})",
|
||||
TimeGrain.MONTH: "date_trunc('month', {col})",
|
||||
TimeGrain.QUARTER: "date_trunc('quarter', {col})",
|
||||
TimeGrain.YEAR: "date_trunc('year', {col})",
|
||||
}
|
||||
@@ -14,12 +14,11 @@
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Tests for the examples importer: orchestration, transpilation, normalization."""
|
||||
"""Tests for the examples importer, specifically SQL transpilation."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from superset.commands.importers.v1.examples import transpile_virtual_dataset_sql
|
||||
from superset.examples.utils import _normalize_dataset_schema
|
||||
|
||||
|
||||
def test_transpile_virtual_dataset_sql_no_sql():
|
||||
@@ -243,133 +242,3 @@ def test_transpile_virtual_dataset_sql_postgres_to_sqlite(mock_transpile, mock_d
|
||||
|
||||
assert config["sql"] == transpiled_sql
|
||||
mock_transpile.assert_called_once_with(original_sql, "sqlite", "postgresql")
|
||||
|
||||
|
||||
@patch(
|
||||
"superset.commands.importers.v1.examples.safe_insert_dashboard_chart_relationships"
|
||||
)
|
||||
@patch("superset.commands.importers.v1.examples.import_dashboard")
|
||||
@patch("superset.commands.importers.v1.examples.import_chart")
|
||||
@patch("superset.commands.importers.v1.examples.import_dataset")
|
||||
@patch("superset.commands.importers.v1.examples.import_database")
|
||||
def test_import_passes_ignore_permissions_to_all_importers(
|
||||
mock_import_db,
|
||||
mock_import_dataset,
|
||||
mock_import_chart,
|
||||
mock_import_dashboard,
|
||||
mock_safe_insert,
|
||||
):
|
||||
"""_import() must pass ignore_permissions=True to all importers.
|
||||
|
||||
This is the key wiring test: the security bypass for system imports
|
||||
only works if _import() passes ignore_permissions=True to each
|
||||
sub-importer. Without this, SQLite example databases are blocked
|
||||
by PREVENT_UNSAFE_DB_CONNECTIONS.
|
||||
"""
|
||||
from superset.commands.importers.v1.examples import ImportExamplesCommand
|
||||
|
||||
db_uuid = "a2dc77af-e654-49bb-b321-40f6b559a1ee"
|
||||
dataset_uuid = "14f48794-ebfa-4f60-a26a-582c49132f1b"
|
||||
chart_uuid = "cccccccc-cccc-cccc-cccc-cccccccccccc"
|
||||
dashboard_uuid = "dddddddd-dddd-dddd-dddd-dddddddddddd"
|
||||
|
||||
# Mock database import
|
||||
mock_db_obj = MagicMock()
|
||||
mock_db_obj.uuid = db_uuid
|
||||
mock_db_obj.id = 1
|
||||
mock_import_db.return_value = mock_db_obj
|
||||
|
||||
# Mock dataset import
|
||||
mock_dataset_obj = MagicMock()
|
||||
mock_dataset_obj.uuid = dataset_uuid
|
||||
mock_dataset_obj.id = 10
|
||||
mock_dataset_obj.table_name = "test_table"
|
||||
mock_import_dataset.return_value = mock_dataset_obj
|
||||
|
||||
# Mock chart import
|
||||
mock_chart_obj = MagicMock()
|
||||
mock_chart_obj.uuid = chart_uuid
|
||||
mock_chart_obj.id = 100
|
||||
mock_import_chart.return_value = mock_chart_obj
|
||||
|
||||
# Mock dashboard import
|
||||
mock_dashboard_obj = MagicMock()
|
||||
mock_dashboard_obj.id = 1000
|
||||
mock_import_dashboard.return_value = mock_dashboard_obj
|
||||
|
||||
configs = {
|
||||
"databases/examples.yaml": {
|
||||
"uuid": db_uuid,
|
||||
"database_name": "examples",
|
||||
"sqlalchemy_uri": "sqlite:///test.db",
|
||||
},
|
||||
"datasets/examples/test.yaml": {
|
||||
"uuid": dataset_uuid,
|
||||
"table_name": "test_table",
|
||||
"database_uuid": db_uuid,
|
||||
"schema": None,
|
||||
"sql": None,
|
||||
},
|
||||
"charts/test/chart.yaml": {
|
||||
"uuid": chart_uuid,
|
||||
"dataset_uuid": dataset_uuid,
|
||||
},
|
||||
"dashboards/test.yaml": {
|
||||
"uuid": dashboard_uuid,
|
||||
"position": {},
|
||||
},
|
||||
}
|
||||
|
||||
with patch(
|
||||
"superset.commands.importers.v1.examples.get_example_default_schema",
|
||||
return_value=None,
|
||||
):
|
||||
with patch(
|
||||
"superset.commands.importers.v1.examples.find_chart_uuids",
|
||||
return_value=[],
|
||||
):
|
||||
with patch(
|
||||
"superset.commands.importers.v1.examples.update_id_refs",
|
||||
return_value=configs["dashboards/test.yaml"],
|
||||
):
|
||||
ImportExamplesCommand._import(configs)
|
||||
|
||||
# Verify ALL importers received ignore_permissions=True
|
||||
mock_import_db.assert_called_once()
|
||||
assert mock_import_db.call_args[1].get("ignore_permissions") is True
|
||||
|
||||
mock_import_dataset.assert_called_once()
|
||||
assert mock_import_dataset.call_args[1].get("ignore_permissions") is True
|
||||
|
||||
mock_import_chart.assert_called_once()
|
||||
assert mock_import_chart.call_args[1].get("ignore_permissions") is True
|
||||
|
||||
mock_import_dashboard.assert_called_once()
|
||||
assert mock_import_dashboard.call_args[1].get("ignore_permissions") is True
|
||||
|
||||
|
||||
def test_normalize_dataset_schema_converts_main_to_null():
|
||||
"""SQLite 'main' schema must be normalized to null in YAML content.
|
||||
|
||||
This normalization happens in the YAML import path (utils.py), which is
|
||||
separate from the data_loading.py normalization. Both paths must handle
|
||||
SQLite's default 'main' schema correctly.
|
||||
"""
|
||||
content = "table_name: test\nschema: main\nuuid: abc-123"
|
||||
result = _normalize_dataset_schema(content)
|
||||
assert "schema: null" in result
|
||||
assert "schema: main" not in result
|
||||
|
||||
|
||||
def test_normalize_dataset_schema_preserves_other_schemas():
|
||||
"""Non-'main' schemas should be left unchanged."""
|
||||
content = "table_name: test\nschema: public\nuuid: abc-123"
|
||||
result = _normalize_dataset_schema(content)
|
||||
assert "schema: public" in result
|
||||
|
||||
|
||||
def test_normalize_dataset_schema_preserves_null_schema():
|
||||
"""Already-null schemas should remain null."""
|
||||
content = "table_name: test\nschema: null\nuuid: abc-123"
|
||||
result = _normalize_dataset_schema(content)
|
||||
assert "schema: null" in result
|
||||
|
||||
@@ -120,39 +120,6 @@ def test_import_database_sqlite_invalid(
|
||||
current_app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True
|
||||
|
||||
|
||||
def test_import_database_sqlite_allowed_with_ignore_permissions(
|
||||
mocker: MockerFixture, session: Session
|
||||
) -> None:
|
||||
"""
|
||||
Test that SQLite imports succeed when ignore_permissions=True.
|
||||
|
||||
System imports (like examples) use URIs from server config, not user input,
|
||||
so they should bypass the PREVENT_UNSAFE_DB_CONNECTIONS check. This is the
|
||||
key fix from PR #37577 that allows example loading to work in CI/showtime
|
||||
environments where PREVENT_UNSAFE_DB_CONNECTIONS is enabled.
|
||||
"""
|
||||
from superset.commands.database.importers.v1.utils import import_database
|
||||
from superset.models.core import Database
|
||||
from tests.integration_tests.fixtures.importexport import database_config_sqlite
|
||||
|
||||
mocker.patch.dict(current_app.config, {"PREVENT_UNSAFE_DB_CONNECTIONS": True})
|
||||
mocker.patch("superset.commands.database.importers.v1.utils.add_permissions")
|
||||
|
||||
engine = db.session.get_bind()
|
||||
Database.metadata.create_all(engine) # pylint: disable=no-member
|
||||
|
||||
config = copy.deepcopy(database_config_sqlite)
|
||||
# With ignore_permissions=True, the security check should be skipped
|
||||
database = import_database(config, ignore_permissions=True)
|
||||
|
||||
assert database.database_name == "imported_database"
|
||||
assert "sqlite" in database.sqlalchemy_uri
|
||||
|
||||
# Cleanup
|
||||
db.session.delete(database)
|
||||
db.session.flush()
|
||||
|
||||
|
||||
def test_import_database_managed_externally(
|
||||
mocker: MockerFixture,
|
||||
session: Session,
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,204 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Tests for data_loading.py UUID extraction functionality."""
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import patch
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
def test_get_dataset_config_from_yaml_extracts_uuid():
|
||||
"""Test that UUID is extracted from dataset.yaml."""
|
||||
from superset.examples.data_loading import get_dataset_config_from_yaml
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
dataset_yaml = example_dir / "dataset.yaml"
|
||||
dataset_yaml.write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "test_table",
|
||||
"uuid": "12345678-1234-1234-1234-123456789012",
|
||||
"schema": "public",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
config = get_dataset_config_from_yaml(example_dir)
|
||||
|
||||
assert config["uuid"] == "12345678-1234-1234-1234-123456789012"
|
||||
assert config["table_name"] == "test_table"
|
||||
assert config["schema"] == "public"
|
||||
|
||||
|
||||
def test_get_dataset_config_from_yaml_without_uuid():
|
||||
"""Test that missing UUID returns None."""
|
||||
from superset.examples.data_loading import get_dataset_config_from_yaml
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
dataset_yaml = example_dir / "dataset.yaml"
|
||||
dataset_yaml.write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "test_table",
|
||||
"schema": "public",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
config = get_dataset_config_from_yaml(example_dir)
|
||||
|
||||
assert config["uuid"] is None
|
||||
assert config["table_name"] == "test_table"
|
||||
|
||||
|
||||
def test_get_dataset_config_from_yaml_no_file():
|
||||
"""Test behavior when dataset.yaml doesn't exist."""
|
||||
from superset.examples.data_loading import get_dataset_config_from_yaml
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
|
||||
config = get_dataset_config_from_yaml(example_dir)
|
||||
|
||||
assert config["uuid"] is None
|
||||
assert config["table_name"] is None
|
||||
assert config["schema"] is None
|
||||
|
||||
|
||||
def test_get_dataset_config_from_yaml_treats_main_schema_as_none():
|
||||
"""Test that SQLite's 'main' schema is treated as None."""
|
||||
from superset.examples.data_loading import get_dataset_config_from_yaml
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
dataset_yaml = example_dir / "dataset.yaml"
|
||||
dataset_yaml.write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "test_table",
|
||||
"schema": "main", # SQLite default schema
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
config = get_dataset_config_from_yaml(example_dir)
|
||||
|
||||
assert config["schema"] is None
|
||||
|
||||
|
||||
def test_get_multi_dataset_config_extracts_uuid():
|
||||
"""Test that UUID is extracted from datasets/{name}.yaml."""
|
||||
from superset.examples.data_loading import _get_multi_dataset_config
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
datasets_dir = example_dir / "datasets"
|
||||
datasets_dir.mkdir()
|
||||
dataset_yaml = datasets_dir / "test_dataset.yaml"
|
||||
dataset_yaml.write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "custom_table_name",
|
||||
"uuid": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee",
|
||||
"schema": "public",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
data_file = example_dir / "data" / "test_dataset.parquet"
|
||||
config = _get_multi_dataset_config(example_dir, "test_dataset", data_file)
|
||||
|
||||
assert config["uuid"] == "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
|
||||
assert config["table_name"] == "custom_table_name"
|
||||
|
||||
|
||||
def test_get_multi_dataset_config_without_yaml():
|
||||
"""Test behavior when datasets/{name}.yaml doesn't exist."""
|
||||
from superset.examples.data_loading import _get_multi_dataset_config
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
data_file = example_dir / "data" / "test_dataset.parquet"
|
||||
|
||||
config = _get_multi_dataset_config(example_dir, "test_dataset", data_file)
|
||||
|
||||
assert config.get("uuid") is None
|
||||
assert config["table_name"] == "test_dataset"
|
||||
|
||||
|
||||
def test_get_multi_dataset_config_treats_main_schema_as_none():
|
||||
"""Test that SQLite's 'main' schema is treated as None in multi-dataset config."""
|
||||
from superset.examples.data_loading import _get_multi_dataset_config
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
example_dir = Path(tmpdir)
|
||||
datasets_dir = example_dir / "datasets"
|
||||
datasets_dir.mkdir()
|
||||
dataset_yaml = datasets_dir / "test_dataset.yaml"
|
||||
dataset_yaml.write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "test_table",
|
||||
"schema": "main",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
data_file = example_dir / "data" / "test_dataset.parquet"
|
||||
config = _get_multi_dataset_config(example_dir, "test_dataset", data_file)
|
||||
|
||||
assert config["schema"] is None
|
||||
|
||||
|
||||
def test_discover_datasets_passes_uuid_to_loader():
|
||||
"""Test that discover_datasets passes UUID from YAML to create_generic_loader."""
|
||||
from superset.examples.data_loading import discover_datasets
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
examples_dir = Path(tmpdir)
|
||||
|
||||
# Create a simple example with data.parquet and dataset.yaml
|
||||
example_dir = examples_dir / "test_example"
|
||||
example_dir.mkdir()
|
||||
(example_dir / "data.parquet").touch()
|
||||
(example_dir / "dataset.yaml").write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "test_table",
|
||||
"uuid": "12345678-1234-1234-1234-123456789012",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch(
|
||||
"superset.examples.data_loading.get_examples_directory",
|
||||
return_value=examples_dir,
|
||||
):
|
||||
with patch(
|
||||
"superset.examples.data_loading.create_generic_loader"
|
||||
) as mock_create:
|
||||
mock_create.return_value = lambda: None
|
||||
|
||||
discover_datasets()
|
||||
|
||||
mock_create.assert_called_once()
|
||||
call_kwargs = mock_create.call_args[1]
|
||||
assert call_kwargs["uuid"] == "12345678-1234-1234-1234-123456789012"
|
||||
@@ -1,233 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Tests for generic_loader.py UUID threading functionality."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
|
||||
@patch("superset.examples.generic_loader.get_example_database")
|
||||
@patch("superset.examples.generic_loader.db")
|
||||
def test_load_parquet_table_sets_uuid_on_new_table(mock_db, mock_get_db):
|
||||
"""Test that load_parquet_table sets UUID on newly created SqlaTable."""
|
||||
from superset.examples.generic_loader import load_parquet_table
|
||||
|
||||
mock_database = MagicMock()
|
||||
mock_database.id = 1
|
||||
mock_database.has_table.return_value = True
|
||||
mock_get_db.return_value = mock_database
|
||||
|
||||
mock_engine = MagicMock()
|
||||
mock_inspector = MagicMock()
|
||||
mock_inspector.default_schema_name = "public"
|
||||
mock_database.get_sqla_engine.return_value.__enter__ = MagicMock(
|
||||
return_value=mock_engine
|
||||
)
|
||||
mock_database.get_sqla_engine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
# Simulate table not found in metadata
|
||||
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
|
||||
|
||||
test_uuid = "12345678-1234-1234-1234-123456789012"
|
||||
|
||||
with patch("superset.examples.generic_loader.inspect") as mock_inspect:
|
||||
mock_inspect.return_value = mock_inspector
|
||||
|
||||
tbl = load_parquet_table(
|
||||
parquet_file="test_data",
|
||||
table_name="test_table",
|
||||
database=mock_database,
|
||||
only_metadata=True,
|
||||
uuid=test_uuid,
|
||||
)
|
||||
|
||||
assert tbl.uuid == test_uuid
|
||||
|
||||
|
||||
@patch("superset.examples.generic_loader.get_example_database")
|
||||
@patch("superset.examples.generic_loader.db")
|
||||
def test_load_parquet_table_early_return_does_not_modify_existing_uuid(
|
||||
mock_db, mock_get_db
|
||||
):
|
||||
"""Test early return path when table exists - UUID is not modified.
|
||||
|
||||
When the physical table exists and force=False, the function returns early
|
||||
without going through the full load path. The existing table's UUID is
|
||||
preserved as-is (not modified even if different from the provided uuid).
|
||||
"""
|
||||
from superset.examples.generic_loader import load_parquet_table
|
||||
|
||||
mock_database = MagicMock()
|
||||
mock_database.id = 1
|
||||
mock_database.has_table.return_value = True # Triggers early return
|
||||
mock_get_db.return_value = mock_database
|
||||
|
||||
mock_engine = MagicMock()
|
||||
mock_inspector = MagicMock()
|
||||
mock_inspector.default_schema_name = "public"
|
||||
mock_database.get_sqla_engine.return_value.__enter__ = MagicMock(
|
||||
return_value=mock_engine
|
||||
)
|
||||
mock_database.get_sqla_engine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
# Simulate existing table without UUID
|
||||
existing_table = MagicMock()
|
||||
existing_table.uuid = None
|
||||
mock_db.session.query.return_value.filter_by.return_value.first.return_value = (
|
||||
existing_table
|
||||
)
|
||||
|
||||
test_uuid = "12345678-1234-1234-1234-123456789012"
|
||||
|
||||
with patch("superset.examples.generic_loader.inspect") as mock_inspect:
|
||||
mock_inspect.return_value = mock_inspector
|
||||
|
||||
tbl = load_parquet_table(
|
||||
parquet_file="test_data",
|
||||
table_name="test_table",
|
||||
database=mock_database,
|
||||
only_metadata=True,
|
||||
uuid=test_uuid,
|
||||
)
|
||||
|
||||
# Early return path returns existing table as-is
|
||||
assert tbl is existing_table
|
||||
# UUID was not modified (still None)
|
||||
assert tbl.uuid is None
|
||||
|
||||
|
||||
@patch("superset.examples.generic_loader.get_example_database")
|
||||
@patch("superset.examples.generic_loader.db")
|
||||
def test_load_parquet_table_preserves_existing_uuid(mock_db, mock_get_db):
|
||||
"""Test that load_parquet_table does not overwrite existing UUID."""
|
||||
from superset.examples.generic_loader import load_parquet_table
|
||||
|
||||
mock_database = MagicMock()
|
||||
mock_database.id = 1
|
||||
mock_database.has_table.return_value = True
|
||||
mock_get_db.return_value = mock_database
|
||||
|
||||
mock_engine = MagicMock()
|
||||
mock_inspector = MagicMock()
|
||||
mock_inspector.default_schema_name = "public"
|
||||
mock_database.get_sqla_engine.return_value.__enter__ = MagicMock(
|
||||
return_value=mock_engine
|
||||
)
|
||||
mock_database.get_sqla_engine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
# Simulate existing table with different UUID
|
||||
existing_uuid = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
|
||||
existing_table = MagicMock()
|
||||
existing_table.uuid = existing_uuid
|
||||
mock_db.session.query.return_value.filter_by.return_value.first.return_value = (
|
||||
existing_table
|
||||
)
|
||||
|
||||
new_uuid = "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"
|
||||
|
||||
with patch("superset.examples.generic_loader.inspect") as mock_inspect:
|
||||
mock_inspect.return_value = mock_inspector
|
||||
|
||||
tbl = load_parquet_table(
|
||||
parquet_file="test_data",
|
||||
table_name="test_table",
|
||||
database=mock_database,
|
||||
only_metadata=True,
|
||||
uuid=new_uuid,
|
||||
)
|
||||
|
||||
# Should preserve original UUID
|
||||
assert tbl.uuid == existing_uuid
|
||||
|
||||
|
||||
@patch("superset.examples.generic_loader.get_example_database")
|
||||
@patch("superset.examples.generic_loader.db")
|
||||
def test_load_parquet_table_works_without_uuid(mock_db, mock_get_db):
|
||||
"""Test that load_parquet_table works correctly when no UUID is provided."""
|
||||
from superset.examples.generic_loader import load_parquet_table
|
||||
|
||||
mock_database = MagicMock()
|
||||
mock_database.id = 1
|
||||
mock_database.has_table.return_value = True
|
||||
mock_get_db.return_value = mock_database
|
||||
|
||||
mock_engine = MagicMock()
|
||||
mock_inspector = MagicMock()
|
||||
mock_inspector.default_schema_name = "public"
|
||||
mock_database.get_sqla_engine.return_value.__enter__ = MagicMock(
|
||||
return_value=mock_engine
|
||||
)
|
||||
mock_database.get_sqla_engine.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
# Simulate table not found
|
||||
mock_db.session.query.return_value.filter_by.return_value.first.return_value = None
|
||||
|
||||
with patch("superset.examples.generic_loader.inspect") as mock_inspect:
|
||||
mock_inspect.return_value = mock_inspector
|
||||
|
||||
tbl = load_parquet_table(
|
||||
parquet_file="test_data",
|
||||
table_name="test_table",
|
||||
database=mock_database,
|
||||
only_metadata=True,
|
||||
# No uuid parameter
|
||||
)
|
||||
|
||||
# UUID should remain None
|
||||
assert tbl.uuid is None
|
||||
|
||||
|
||||
def test_create_generic_loader_passes_uuid():
|
||||
"""Test that create_generic_loader passes UUID to load_parquet_table."""
|
||||
from superset.examples.generic_loader import create_generic_loader
|
||||
|
||||
test_uuid = "12345678-1234-1234-1234-123456789012"
|
||||
loader = create_generic_loader(
|
||||
parquet_file="test_data",
|
||||
table_name="test_table",
|
||||
uuid=test_uuid,
|
||||
)
|
||||
|
||||
# Verify loader was created with UUID in closure
|
||||
with patch("superset.examples.generic_loader.load_parquet_table") as mock_load:
|
||||
mock_load.return_value = MagicMock()
|
||||
|
||||
loader(only_metadata=True)
|
||||
|
||||
# Verify UUID was passed through
|
||||
mock_load.assert_called_once()
|
||||
call_kwargs = mock_load.call_args[1]
|
||||
assert call_kwargs["uuid"] == test_uuid
|
||||
|
||||
|
||||
def test_create_generic_loader_without_uuid():
|
||||
"""Test that create_generic_loader works without UUID (backward compat)."""
|
||||
from superset.examples.generic_loader import create_generic_loader
|
||||
|
||||
loader = create_generic_loader(
|
||||
parquet_file="test_data",
|
||||
table_name="test_table",
|
||||
# No uuid
|
||||
)
|
||||
|
||||
with patch("superset.examples.generic_loader.load_parquet_table") as mock_load:
|
||||
mock_load.return_value = MagicMock()
|
||||
|
||||
loader(only_metadata=True)
|
||||
|
||||
mock_load.assert_called_once()
|
||||
call_kwargs = mock_load.call_args[1]
|
||||
assert call_kwargs["uuid"] is None
|
||||
@@ -1,206 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Tests for examples/utils.py - YAML config loading and content assembly."""
|
||||
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
def _create_example_tree(base_dir: Path) -> Path:
|
||||
"""Create a minimal example directory tree under base_dir/superset/examples/.
|
||||
|
||||
Returns the 'superset' directory (what files("superset") would return).
|
||||
"""
|
||||
superset_dir = base_dir / "superset"
|
||||
examples_dir = superset_dir / "examples"
|
||||
|
||||
# _shared configs
|
||||
shared_dir = examples_dir / "_shared"
|
||||
shared_dir.mkdir(parents=True)
|
||||
(shared_dir / "database.yaml").write_text(
|
||||
"database_name: examples\n"
|
||||
"sqlalchemy_uri: __SQLALCHEMY_EXAMPLES_URI__\n"
|
||||
"uuid: a2dc77af-e654-49bb-b321-40f6b559a1ee\n"
|
||||
"version: '1.0.0'\n"
|
||||
)
|
||||
(shared_dir / "metadata.yaml").write_text(
|
||||
"version: '1.0.0'\ntimestamp: '2020-12-11T22:52:56.534241+00:00'\n"
|
||||
)
|
||||
|
||||
# An example with dataset, dashboard, and chart
|
||||
example_dir = examples_dir / "test_example"
|
||||
example_dir.mkdir()
|
||||
(example_dir / "dataset.yaml").write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"table_name": "test_table",
|
||||
"schema": "main",
|
||||
"uuid": "14f48794-ebfa-4f60-a26a-582c49132f1b",
|
||||
"database_uuid": "a2dc77af-e654-49bb-b321-40f6b559a1ee",
|
||||
"version": "1.0.0",
|
||||
}
|
||||
)
|
||||
)
|
||||
(example_dir / "dashboard.yaml").write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"dashboard_title": "Test Dashboard",
|
||||
"uuid": "dddddddd-dddd-dddd-dddd-dddddddddddd",
|
||||
"version": "1.0.0",
|
||||
}
|
||||
)
|
||||
)
|
||||
charts_dir = example_dir / "charts"
|
||||
charts_dir.mkdir()
|
||||
(charts_dir / "test_chart.yaml").write_text(
|
||||
yaml.dump(
|
||||
{
|
||||
"slice_name": "Test Chart",
|
||||
"uuid": "cccccccc-cccc-cccc-cccc-cccccccccccc",
|
||||
"dataset_uuid": "14f48794-ebfa-4f60-a26a-582c49132f1b",
|
||||
"version": "1.0.0",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return superset_dir
|
||||
|
||||
|
||||
def test_load_contents_builds_correct_import_structure():
|
||||
"""load_contents() must produce the key structure ImportExamplesCommand expects.
|
||||
|
||||
This tests the orchestration entry point: YAML files are discovered from
|
||||
the examples directory, the shared database config has its URI placeholder
|
||||
replaced, and the result has the correct key prefixes (databases/, datasets/,
|
||||
metadata.yaml).
|
||||
"""
|
||||
from superset.examples.utils import load_contents
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
superset_dir = _create_example_tree(Path(tmpdir))
|
||||
|
||||
test_examples_uri = "sqlite:///path/to/examples.db"
|
||||
mock_app = MagicMock()
|
||||
mock_app.config = {"SQLALCHEMY_EXAMPLES_URI": test_examples_uri}
|
||||
|
||||
with patch("superset.examples.utils.files", return_value=superset_dir):
|
||||
with patch("flask.current_app", mock_app):
|
||||
contents = load_contents()
|
||||
|
||||
# Verify database config is present with placeholder replaced
|
||||
assert "databases/examples.yaml" in contents
|
||||
db_content = contents["databases/examples.yaml"]
|
||||
assert "__SQLALCHEMY_EXAMPLES_URI__" not in db_content
|
||||
assert test_examples_uri in db_content
|
||||
|
||||
# Verify metadata is present
|
||||
assert "metadata.yaml" in contents
|
||||
|
||||
# Verify dataset is discovered with correct key prefix
|
||||
assert "datasets/examples/test_example.yaml" in contents
|
||||
|
||||
# Verify dashboard is discovered with correct key prefix
|
||||
assert "dashboards/test_example.yaml" in contents
|
||||
|
||||
# Verify chart is discovered with correct key prefix
|
||||
assert "charts/test_example/test_chart.yaml" in contents
|
||||
|
||||
# Verify schema normalization happened (main -> null)
|
||||
dataset_content = contents["datasets/examples/test_example.yaml"]
|
||||
assert "schema: main" not in dataset_content
|
||||
assert "schema: null" in dataset_content
|
||||
|
||||
|
||||
def test_load_contents_replaces_sqlalchemy_examples_uri_placeholder():
|
||||
"""The __SQLALCHEMY_EXAMPLES_URI__ placeholder must be replaced with the real URI.
|
||||
|
||||
If this placeholder is not replaced, the database import will fail with an
|
||||
invalid connection string, preventing all examples from loading.
|
||||
"""
|
||||
from superset.examples.utils import _load_shared_configs
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
superset_dir = _create_example_tree(Path(tmpdir))
|
||||
examples_root = Path("examples")
|
||||
|
||||
test_uri = "postgresql://user:pass@host/db"
|
||||
mock_app = MagicMock()
|
||||
mock_app.config = {"SQLALCHEMY_EXAMPLES_URI": test_uri}
|
||||
|
||||
with patch("superset.examples.utils.files", return_value=superset_dir):
|
||||
with patch("flask.current_app", mock_app):
|
||||
contents = _load_shared_configs(examples_root)
|
||||
|
||||
assert "databases/examples.yaml" in contents
|
||||
assert test_uri in contents["databases/examples.yaml"]
|
||||
assert "__SQLALCHEMY_EXAMPLES_URI__" not in contents["databases/examples.yaml"]
|
||||
|
||||
|
||||
@patch("superset.examples.utils.ImportExamplesCommand")
|
||||
@patch("superset.examples.utils.load_contents")
|
||||
def test_load_examples_from_configs_wires_command_correctly(
|
||||
mock_load_contents,
|
||||
mock_command_cls,
|
||||
):
|
||||
"""load_examples_from_configs() must construct ImportExamplesCommand
|
||||
with overwrite=True and thread force_data through.
|
||||
|
||||
A wiring regression here would silently skip overwriting existing
|
||||
examples or ignore the force_data flag.
|
||||
"""
|
||||
from superset.examples.utils import load_examples_from_configs
|
||||
|
||||
mock_load_contents.return_value = {"databases/examples.yaml": "content"}
|
||||
mock_command = MagicMock()
|
||||
mock_command_cls.return_value = mock_command
|
||||
|
||||
load_examples_from_configs(force_data=True)
|
||||
|
||||
mock_load_contents.assert_called_once_with(False)
|
||||
mock_command_cls.assert_called_once_with(
|
||||
{"databases/examples.yaml": "content"},
|
||||
overwrite=True,
|
||||
force_data=True,
|
||||
)
|
||||
mock_command.run.assert_called_once()
|
||||
|
||||
|
||||
@patch("superset.examples.utils.ImportExamplesCommand")
|
||||
@patch("superset.examples.utils.load_contents")
|
||||
def test_load_examples_from_configs_defaults(
|
||||
mock_load_contents,
|
||||
mock_command_cls,
|
||||
):
|
||||
"""Default call should pass force_data=False and load_test_data=False."""
|
||||
from superset.examples.utils import load_examples_from_configs
|
||||
|
||||
mock_load_contents.return_value = {}
|
||||
mock_command = MagicMock()
|
||||
mock_command_cls.return_value = mock_command
|
||||
|
||||
load_examples_from_configs()
|
||||
|
||||
mock_load_contents.assert_called_once_with(False)
|
||||
mock_command_cls.assert_called_once_with(
|
||||
{},
|
||||
overwrite=True,
|
||||
force_data=False,
|
||||
)
|
||||
mock_command.run.assert_called_once()
|
||||
Reference in New Issue
Block a user