diff --git a/UPDATING.md b/UPDATING.md
index 27fc3428b9e..3d42f2b3d4e 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -46,6 +46,13 @@ The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitud
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
+### Combined datasource list endpoint
+
+Added a new combined datasource list endpoint at `GET /api/v1/datasource/` to serve datasets and semantic views in one response.
+
+- The endpoint is available to users with at least one of `can_read` on `Dataset` or `SemanticView`.
+- Semantic views are included only when the `SEMANTIC_LAYERS` feature flag is enabled.
+- The endpoint enforces strict `order_column` validation and returns `400` for invalid sort columns.
### ClickHouse minimum driver version bump
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
diff --git a/superset-frontend/src/features/semanticViews/SemanticViewEditModal.test.tsx b/superset-frontend/src/features/semanticViews/SemanticViewEditModal.test.tsx
new file mode 100644
index 00000000000..0e80c9e6e7c
--- /dev/null
+++ b/superset-frontend/src/features/semanticViews/SemanticViewEditModal.test.tsx
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import userEvent from '@testing-library/user-event';
+import { render, screen, waitFor } from 'spec/helpers/testing-library';
+import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
+
+import SemanticViewEditModal from './SemanticViewEditModal';
+
+jest.mock('@superset-ui/core', () => ({
+ ...jest.requireActual('@superset-ui/core'),
+ SupersetClient: {
+ ...jest.requireActual('@superset-ui/core').SupersetClient,
+ put: jest.fn(),
+ },
+ getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
+}));
+
+const mockedPut = SupersetClient.put as jest.Mock;
+const mockedGetClientErrorObject = getClientErrorObject as jest.Mock;
+
+const createProps = () => ({
+ show: true,
+ onHide: jest.fn(),
+ onSave: jest.fn(),
+ addDangerToast: jest.fn(),
+ addSuccessToast: jest.fn(),
+ semanticView: {
+ id: 7,
+ table_name: 'orders_semantic_view',
+ description: 'old description',
+ cache_timeout: 60,
+ },
+});
+
+beforeEach(() => {
+ mockedPut.mockReset();
+ mockedGetClientErrorObject.mockReset();
+ mockedGetClientErrorObject.mockResolvedValue({ error: '' });
+});
+
+test('saves semantic view and refreshes list', async () => {
+ mockedPut.mockResolvedValue({});
+ const props = createProps();
+
+ render();
+
+ await userEvent.click(screen.getByRole('button', { name: /save/i }));
+
+ await waitFor(() => {
+ expect(mockedPut).toHaveBeenCalledWith({
+ endpoint: '/api/v1/semantic_view/7',
+ jsonPayload: {
+ description: 'old description',
+ cache_timeout: 60,
+ },
+ });
+ });
+ expect(props.addSuccessToast).toHaveBeenCalledWith('Semantic view updated');
+ expect(props.onSave).toHaveBeenCalled();
+ expect(props.onHide).toHaveBeenCalled();
+});
+
+test('shows backend error toast when save fails', async () => {
+ mockedPut.mockRejectedValue(new Error('save failed'));
+ mockedGetClientErrorObject.mockResolvedValue({
+ error: 'Semantic view failed to save',
+ });
+ const props = createProps();
+
+ render();
+
+ await userEvent.click(screen.getByRole('button', { name: /save/i }));
+
+ await waitFor(() => {
+ expect(props.addDangerToast).toHaveBeenCalledWith(
+ 'Semantic view failed to save',
+ );
+ });
+});
diff --git a/superset-frontend/src/features/semanticViews/SemanticViewEditModal.tsx b/superset-frontend/src/features/semanticViews/SemanticViewEditModal.tsx
index 0a08331dff1..1c51f77b719 100644
--- a/superset-frontend/src/features/semanticViews/SemanticViewEditModal.tsx
+++ b/superset-frontend/src/features/semanticViews/SemanticViewEditModal.tsx
@@ -18,8 +18,7 @@
*/
import { useState, useEffect } from 'react';
import { t } from '@apache-superset/core/translation';
-import { styled } from '@apache-superset/core/theme';
-import { SupersetClient } from '@superset-ui/core';
+import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import { Input, InputNumber } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import {
@@ -28,9 +27,7 @@ import {
MODAL_STANDARD_WIDTH,
} from 'src/components/Modal';
-const ModalContent = styled.div`
- padding: ${({ theme }) => theme.sizeUnit * 4}px;
-`;
+type InputNumberValue = number | null;
interface SemanticViewEditModalProps {
show: boolean;
@@ -79,8 +76,12 @@ export default function SemanticViewEditModal({
addSuccessToast(t('Semantic view updated'));
onSave();
onHide();
- } catch {
- addDangerToast(t('An error occurred while saving the semantic view'));
+ } catch (error) {
+ const clientError = await getClientErrorObject(error);
+ addDangerToast(
+ clientError.error ||
+ t('An error occurred while saving the semantic view'),
+ );
} finally {
setSaving(false);
}
@@ -97,24 +98,22 @@ export default function SemanticViewEditModal({
width={MODAL_STANDARD_WIDTH}
saveLoading={saving}
>
-
-
- setDescription(e.target.value)}
- rows={4}
- />
-
-
- setCacheTimeout(value as number | null)}
- min={0}
- placeholder={t('Duration in seconds')}
- style={{ width: '100%' }}
- />
-
-
+
+ setDescription(e.target.value)}
+ rows={4}
+ />
+
+
+ setCacheTimeout(value as InputNumberValue)}
+ min={0}
+ placeholder={t('Duration in seconds')}
+ style={{ width: '100%' }}
+ />
+
);
}
diff --git a/superset-frontend/src/pages/DatasetList/index.tsx b/superset-frontend/src/pages/DatasetList/index.tsx
index be7c890ece8..c4b1f5f4aeb 100644
--- a/superset-frontend/src/pages/DatasetList/index.tsx
+++ b/superset-frontend/src/pages/DatasetList/index.tsx
@@ -25,6 +25,7 @@ import {
} from '@superset-ui/core';
import { styled, useTheme, css } from '@apache-superset/core/theme';
import { FunctionComponent, useState, useMemo, useCallback, Key } from 'react';
+import type { CellProps } from 'react-table';
import { Link, useHistory } from 'react-router-dom';
import rison from 'rison';
import {
@@ -46,8 +47,9 @@ import {
Loading,
List,
} from '@superset-ui/core/components';
-import { DatasourceModal, GenericLink } from 'src/components';
import {
+ DatasourceModal,
+ GenericLink,
FacePile,
ImportModal as ImportModelsModal,
ModifiedInfo,
@@ -78,6 +80,7 @@ import SemanticViewEditModal from 'src/features/semanticViews/SemanticViewEditMo
import { useSelector } from 'react-redux';
import { QueryObjectColumns } from 'src/views/CRUD/types';
import { WIDER_DROPDOWN_WIDTH } from 'src/components/ListView/utils';
+import type { BootstrapData } from 'src/types/bootstrapTypes';
const extensionsRegistry = getExtensionsRegistry();
const DatasetDeleteRelatedExtension = extensionsRegistry.get(
@@ -123,25 +126,28 @@ const Actions = styled.div`
type Dataset = {
changed_by_name: string;
- changed_by: string;
+ changed_by: Owner;
changed_on_delta_humanized: string;
database: {
id: string;
database_name: string;
} | null;
- kind: string;
+ kind: 'physical' | 'virtual' | 'semantic_view';
source_type?: 'database' | 'semantic_layer';
explore_url: string;
id: number;
owners: Array;
- schema: string;
+ schema: string | null;
table_name: string;
description?: string | null;
cache_timeout?: number | null;
+ extra?: string | Record | null;
+ sql?: string | null;
};
interface VirtualDataset extends Dataset {
- extra: Record;
+ kind: 'virtual';
+ extra: string | Record;
sql: string;
}
@@ -174,71 +180,67 @@ const DatasetList: FunctionComponent = ({
const [loading, setLoading] = useState(true);
const [lastFetchConfig, setLastFetchConfig] =
useState(null);
- const [currentSourceFilter, setCurrentSourceFilter] = useState('');
- const fetchData = useCallback((config: ListViewFetchDataConfig) => {
- setLastFetchConfig(config);
- setLoading(true);
- const { pageIndex, pageSize, sortBy, filters: filterValues } = config;
+ const fetchData = useCallback(
+ (config: ListViewFetchDataConfig) => {
+ setLastFetchConfig(config);
+ setLoading(true);
+ const { pageIndex, pageSize, sortBy, filters: filterValues } = config;
- // Separate source_type filter from other filters
- const sourceTypeFilter = filterValues.find(f => f.id === 'source_type');
+ // Separate source_type filter from other filters
+ const sourceTypeFilter = filterValues.find(f => f.id === 'source_type');
- // Track source filter for conditional Type filter visibility
- const sourceVal =
- sourceTypeFilter?.value && typeof sourceTypeFilter.value === 'object'
- ? (sourceTypeFilter.value as { value: string }).value
- : ((sourceTypeFilter?.value as string) ?? '');
- setCurrentSourceFilter(sourceVal);
- const otherFilters = filterValues
- .filter(f => f.id !== 'source_type')
- .filter(
- ({ value }) => value !== '' && value !== null && value !== undefined,
- )
- .map(({ id, operator: opr, value }) => ({
- col: id,
- opr,
- value:
- value && typeof value === 'object' && 'value' in value
- ? value.value
- : value,
- }));
+ const otherFilters = filterValues
+ .filter(f => f.id !== 'source_type')
+ .filter(
+ ({ value }) => value !== '' && value !== null && value !== undefined,
+ )
+ .map(({ id, operator: opr, value }) => ({
+ col: id,
+ opr,
+ value:
+ value && typeof value === 'object' && 'value' in value
+ ? value.value
+ : value,
+ }));
- // Add source_type filter for the combined endpoint
- const sourceTypeValue =
- sourceTypeFilter?.value && typeof sourceTypeFilter.value === 'object'
- ? (sourceTypeFilter.value as { value: string }).value
- : (sourceTypeFilter?.value as string | undefined);
- if (sourceTypeValue) {
- otherFilters.push({
- col: 'source_type',
- opr: 'eq',
- value: sourceTypeValue,
+ // Add source_type filter for the combined endpoint
+ const sourceTypeValue =
+ sourceTypeFilter?.value && typeof sourceTypeFilter.value === 'object'
+ ? (sourceTypeFilter.value as { value: string }).value
+ : (sourceTypeFilter?.value as string | undefined);
+ if (sourceTypeValue) {
+ otherFilters.push({
+ col: 'source_type',
+ opr: 'eq',
+ value: sourceTypeValue,
+ });
+ }
+
+ const queryParams = rison.encode_uri({
+ order_column: sortBy[0].id,
+ order_direction: sortBy[0].desc ? 'desc' : 'asc',
+ page: pageIndex,
+ page_size: pageSize,
+ ...(otherFilters.length ? { filters: otherFilters } : {}),
});
- }
- const queryParams = rison.encode_uri({
- order_column: sortBy[0].id,
- order_direction: sortBy[0].desc ? 'desc' : 'asc',
- page: pageIndex,
- page_size: pageSize,
- ...(otherFilters.length ? { filters: otherFilters } : {}),
- });
-
- return SupersetClient.get({
- endpoint: `/api/v1/datasource/?q=${queryParams}`,
- })
- .then(({ json = {} }) => {
- setDatasets(json.result);
- setDatasetCount(json.count);
+ return SupersetClient.get({
+ endpoint: `/api/v1/datasource/?q=${queryParams}`,
})
- .catch(() => {
- addDangerToast(t('An error occurred while fetching datasets'));
- })
- .finally(() => {
- setLoading(false);
- });
- }, [addDangerToast]);
+ .then(({ json = {} }) => {
+ setDatasets(json.result);
+ setDatasetCount(json.count);
+ })
+ .catch(() => {
+ addDangerToast(t('An error occurred while fetching datasets'));
+ })
+ .finally(() => {
+ setLoading(false);
+ });
+ },
+ [addDangerToast],
+ );
const refreshData = useCallback(() => {
if (lastFetchConfig) {
@@ -279,11 +281,28 @@ const DatasetList: FunctionComponent = ({
setSSHTunnelPrivateKeyPasswordFields,
] = useState([]);
- const PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET = useSelector(
+ const PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET = useSelector<
+ BootstrapData,
+ boolean
+ >(
state =>
state.common?.conf?.PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET || false,
);
+ const currentSourceFilter = useMemo(() => {
+ const sourceTypeFilter = lastFetchConfig?.filters.find(
+ filter => filter.id === 'source_type',
+ );
+ if (
+ sourceTypeFilter?.value &&
+ typeof sourceTypeFilter.value === 'object' &&
+ 'value' in sourceTypeFilter.value
+ ) {
+ return sourceTypeFilter.value.value as string;
+ }
+ return (sourceTypeFilter?.value as string | undefined) ?? '';
+ }, [lastFetchConfig]);
+
const openDatasetImportModal = () => {
showImportModal(true);
};
@@ -375,7 +394,7 @@ const DatasetList: FunctionComponent = ({
await handleResourceExport('dataset', ids, () => {
setPreparingExport(false);
});
- } catch (error) {
+ } catch {
setPreparingExport(false);
addDangerToast(t('There was an issue exporting the selected datasets'));
}
@@ -402,7 +421,7 @@ const DatasetList: FunctionComponent = ({
explore_url: exploreURL,
},
},
- }: any) => {
+ }: CellProps) => {
let titleLink: JSX.Element;
if (PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET) {
titleLink = (
@@ -418,7 +437,10 @@ const DatasetList: FunctionComponent = ({
);
}
try {
- const parsedExtra = JSON.parse(extra);
+ const parsedExtra =
+ typeof extra === 'string'
+ ? JSON.parse(extra)
+ : (extra as Record | null);
return (
{parsedExtra?.certification && (
@@ -451,7 +473,7 @@ const DatasetList: FunctionComponent = ({
row: {
original: { kind },
},
- }: any) => ,
+ }: CellProps) => ,
Header: t('Type'),
accessor: 'kind',
disableSortBy: true,
@@ -463,7 +485,7 @@ const DatasetList: FunctionComponent = ({
row: {
original: { database },
},
- }: any) => database?.database_name || '-',
+ }: CellProps) => database?.database_name || '-',
Header: t('Database'),
accessor: 'database.database_name',
size: 'xl',
@@ -474,7 +496,7 @@ const DatasetList: FunctionComponent = ({
row: {
original: { schema },
},
- }: any) => schema || '-',
+ }: CellProps) => schema || '-',
Header: t('Schema'),
accessor: 'schema',
size: 'lg',
@@ -491,7 +513,7 @@ const DatasetList: FunctionComponent = ({
row: {
original: { owners = [] },
},
- }: any) => ,
+ }: CellProps) => ,
Header: t('Owners'),
id: 'owners',
disableSortBy: true,
@@ -505,7 +527,9 @@ const DatasetList: FunctionComponent = ({
changed_by: changedBy,
},
},
- }: any) => ,
+ }: CellProps) => (
+
+ ),
Header: t('Last modified'),
accessor: 'changed_on_delta_humanized',
size: 'xl',
@@ -524,7 +548,7 @@ const DatasetList: FunctionComponent = ({
id: 'source_type',
},
{
- Cell: ({ row: { original } }: any) => {
+ Cell: ({ row: { original } }: CellProps) => {
const isSemanticView = original.kind === 'semantic_view';
// Semantic view: only show edit button
@@ -552,13 +576,18 @@ const DatasetList: FunctionComponent = ({
// Dataset: full set of actions
const allowEdit =
- original.owners.map((o: Owner) => o.id).includes(user.userId) ||
- isUserAdmin(user);
+ original.owners
+ .map((o: Owner) => o.id)
+ .includes(Number(user.userId)) || isUserAdmin(user);
const handleEdit = () => openDatasetEditModal(original);
const handleDelete = () => openDatasetDeleteModal(original);
const handleExport = () => handleBulkDatasetExport([original]);
- const handleDuplicate = () => openDatasetDuplicateModal(original);
+ const handleDuplicate = () => {
+ if (original.kind === 'virtual' && original.sql) {
+ openDatasetDuplicateModal(original as VirtualDataset);
+ }
+ };
if (!canEdit && !canDelete && !canExport && !canDuplicate) {
return null;
}
diff --git a/superset-frontend/src/types/Dataset.ts b/superset-frontend/src/types/Dataset.ts
index 1bb14207d6d..4d10b2da3f6 100644
--- a/superset-frontend/src/types/Dataset.ts
+++ b/superset-frontend/src/types/Dataset.ts
@@ -25,11 +25,18 @@ export default interface Dataset {
database: {
id: string;
database_name: string;
- };
+ } | null;
kind: string;
+ source_type?: 'database' | 'semantic_layer';
explore_url: string;
id: number;
owners: Array;
- schema: string;
+ schema: string | null;
+ catalog?: string | null;
table_name: string;
+ description?: string | null;
+ cache_timeout?: number | null;
+ default_endpoint?: string | null;
+ is_sqllab_view?: boolean;
+ is_managed_externally?: boolean;
}
diff --git a/superset/commands/datasource/list.py b/superset/commands/datasource/list.py
index 50ea765b4c3..75ead5516d8 100644
--- a/superset/commands/datasource/list.py
+++ b/superset/commands/datasource/list.py
@@ -141,6 +141,7 @@ class GetCombinedDatasourceListCommand(BaseCommand):
for f in filters:
col = f.get("col")
+ opr = f.get("opr")
value = f.get("value")
if col == "source_type":
@@ -148,9 +149,9 @@ class GetCombinedDatasourceListCommand(BaseCommand):
elif col == "table_name" and f.get("opr") == "ct":
name_filter = value
elif col == "sql":
- if value == "semantic_view":
+ if opr == "dataset_is_null_or_empty" and value == "semantic_view":
type_filter = "semantic_view"
- else:
+ elif opr == "dataset_is_null_or_empty" and isinstance(value, bool):
sql_filter = value
return source_type, name_filter, sql_filter, type_filter
diff --git a/superset/daos/datasource.py b/superset/daos/datasource.py
index 3d347332ce7..60531606971 100644
--- a/superset/daos/datasource.py
+++ b/superset/daos/datasource.py
@@ -42,6 +42,10 @@ logger = logging.getLogger(__name__)
Datasource = Union[SqlaTable, Query, SavedQuery, SemanticView]
+def _escape_ilike_fragment(value: str) -> str:
+ return value.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_")
+
+
class DatasourceDAO(BaseDAO[Datasource]):
sources: dict[Union[DatasourceType, str], type[Datasource]] = {
DatasourceType.TABLE: SqlaTable,
@@ -108,7 +112,8 @@ class DatasourceDAO(BaseDAO[Datasource]):
ds_q = ds_q.where(get_dataset_access_filters(SqlaTable))
if name_filter:
- ds_q = ds_q.where(SqlaTable.table_name.ilike(f"%{name_filter}%"))
+ escaped = _escape_ilike_fragment(name_filter)
+ ds_q = ds_q.where(SqlaTable.table_name.ilike(f"%{escaped}%", escape="\\"))
if sql_filter is not None:
if sql_filter:
@@ -129,7 +134,8 @@ class DatasourceDAO(BaseDAO[Datasource]):
).select_from(SemanticView.__table__)
if name_filter:
- sv_q = sv_q.where(SemanticView.name.ilike(f"%{name_filter}%"))
+ escaped = _escape_ilike_fragment(name_filter)
+ sv_q = sv_q.where(SemanticView.name.ilike(f"%{escaped}%", escape="\\"))
return sv_q
@@ -143,10 +149,13 @@ class DatasourceDAO(BaseDAO[Datasource]):
) -> tuple[int, list[Any]]:
"""Count, sort, and paginate the combined dataset/semantic-view query."""
sort_col_map = {
+ "changed_on": "changed_on",
"changed_on_delta_humanized": "changed_on",
"table_name": "table_name",
}
- sort_col_name = sort_col_map.get(order_column, "changed_on")
+ if order_column not in sort_col_map:
+ raise ValueError(f"Invalid order column: {order_column}")
+ sort_col_name = sort_col_map[order_column]
total_count = (
db.session.execute(select(func.count()).select_from(combined)).scalar() or 0
diff --git a/superset/datasource/api.py b/superset/datasource/api.py
index 6690d9d34f2..0a2ef99823c 100644
--- a/superset/datasource/api.py
+++ b/superset/datasource/api.py
@@ -346,10 +346,13 @@ class DatasourceRestApi(BaseSupersetApi):
if not can_read_datasets and not can_read_sv:
return self.response(403, message="Access denied")
- result = GetCombinedDatasourceListCommand(
- args=kwargs.get("rison", {}),
- can_read_datasets=can_read_datasets,
- can_read_semantic_views=can_read_sv,
- ).run()
+ try:
+ result = GetCombinedDatasourceListCommand(
+ args=kwargs.get("rison", {}),
+ can_read_datasets=can_read_datasets,
+ can_read_semantic_views=can_read_sv,
+ ).run()
+ except ValueError as ex:
+ return self.response(400, message=str(ex))
return self.response(200, **result)
diff --git a/superset/datasource/schemas.py b/superset/datasource/schemas.py
index bd9bf5326f9..c73d8a8e61b 100644
--- a/superset/datasource/schemas.py
+++ b/superset/datasource/schemas.py
@@ -51,9 +51,13 @@ class DatasetListSchema(Schema):
description = fields.String(allow_none=True)
explore_url = fields.String()
database = fields.Method("get_database")
+ catalog = fields.String(allow_none=True)
schema = fields.String(allow_none=True)
sql = fields.String(allow_none=True)
- extra = fields.String(allow_none=True)
+ extra = fields.Raw(allow_none=True)
+ default_endpoint = fields.String(allow_none=True)
+ is_sqllab_view = fields.Boolean(allow_none=True)
+ is_managed_externally = fields.Boolean(allow_none=True)
owners = fields.Method("get_owners")
changed_by_name = fields.String()
changed_by = fields.Method("get_changed_by")
@@ -106,9 +110,13 @@ class SemanticViewListSchema(Schema):
description = fields.String(allow_none=True)
explore_url = fields.String()
database = fields.Constant(None)
+ catalog = fields.Constant(None)
schema = fields.Constant(None)
sql = fields.Constant(None)
extra = fields.Constant(None)
+ default_endpoint = fields.Constant(None)
+ is_sqllab_view = fields.Constant(False)
+ is_managed_externally = fields.Constant(False)
owners = fields.Constant([])
changed_by_name = fields.String()
changed_by = fields.Method("get_changed_by")
diff --git a/tests/integration_tests/datasource/api_tests.py b/tests/integration_tests/datasource/api_tests.py
index 4c285caeb69..0dda05a41ac 100644
--- a/tests/integration_tests/datasource/api_tests.py
+++ b/tests/integration_tests/datasource/api_tests.py
@@ -204,3 +204,55 @@ class TestDatasourceApi(SupersetTestCase):
assert rv.status_code == 200
response = json.loads(rv.data.decode("utf-8"))
assert response["result"] == []
+
+ @patch("superset.datasource.api.security_manager.can_access")
+ @patch("superset.datasource.api.GetCombinedDatasourceListCommand.run")
+ def test_combined_list_invalid_order_column(
+ self,
+ run_mock,
+ can_access_mock,
+ ):
+ security_manager.add_permission_view_menu("can_combined_list", "Datasource")
+ perm = security_manager.find_permission_view_menu(
+ "can_combined_list", "Datasource"
+ )
+ admin_role = security_manager.find_role("Admin")
+ security_manager.add_permission_role(admin_role, perm)
+ can_access_mock.side_effect = [True, True]
+ run_mock.side_effect = ValueError("Invalid order column: invalid")
+ self.login(ADMIN_USERNAME)
+
+ rv = self.client.get(
+ "api/v1/datasource/?q=(order_column:invalid,order_direction:desc,page:0,page_size:25)"
+ )
+
+ assert rv.status_code == 400
+ response = json.loads(rv.data.decode("utf-8"))
+ assert response["message"] == "Invalid order column: invalid"
+
+ @patch("superset.datasource.api.security_manager.can_access")
+ @patch("superset.datasource.api.GetCombinedDatasourceListCommand.run")
+ def test_combined_list_semantic_layers_off(
+ self,
+ run_mock,
+ can_access_mock,
+ ):
+ security_manager.add_permission_view_menu("can_combined_list", "Datasource")
+ perm = security_manager.find_permission_view_menu(
+ "can_combined_list", "Datasource"
+ )
+ admin_role = security_manager.find_role("Admin")
+ security_manager.add_permission_role(admin_role, perm)
+ can_access_mock.return_value = True
+ run_mock.return_value = {"count": 1, "result": []}
+ self.login(ADMIN_USERNAME)
+
+ with patch("superset.datasource.api.is_feature_enabled", return_value=False):
+ rv = self.client.get(
+ "api/v1/datasource/?q=(order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)"
+ )
+
+ assert rv.status_code == 200
+ run_mock.assert_called_once()
+ _, kwargs = run_mock.call_args
+ assert kwargs == {}
diff --git a/tests/unit_tests/commands/datasource/__init__.py b/tests/unit_tests/commands/datasource/__init__.py
new file mode 100644
index 00000000000..13a83393a91
--- /dev/null
+++ b/tests/unit_tests/commands/datasource/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/unit_tests/commands/datasource/list_test.py b/tests/unit_tests/commands/datasource/list_test.py
new file mode 100644
index 00000000000..06294ee1b25
--- /dev/null
+++ b/tests/unit_tests/commands/datasource/list_test.py
@@ -0,0 +1,141 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from unittest.mock import patch
+
+import pytest
+from sqlalchemy import literal, select
+
+from superset.commands.datasource.list import GetCombinedDatasourceListCommand
+
+
+def test_parse_filters_semantic_view_requires_dataset_operator() -> None:
+ source_type, name_filter, sql_filter, type_filter = (
+ GetCombinedDatasourceListCommand._parse_filters(
+ [{"col": "sql", "opr": "eq", "value": "semantic_view"}]
+ )
+ )
+
+ assert source_type == "all"
+ assert name_filter is None
+ assert sql_filter is None
+ assert type_filter is None
+
+
+def test_parse_filters_semantic_view_with_dataset_operator() -> None:
+ source_type, name_filter, sql_filter, type_filter = (
+ GetCombinedDatasourceListCommand._parse_filters(
+ [
+ {
+ "col": "sql",
+ "opr": "dataset_is_null_or_empty",
+ "value": "semantic_view",
+ }
+ ]
+ )
+ )
+
+ assert source_type == "all"
+ assert name_filter is None
+ assert sql_filter is None
+ assert type_filter == "semantic_view"
+
+
+def test_parse_filters_sql_bool_requires_dataset_operator() -> None:
+ source_type, name_filter, sql_filter, type_filter = (
+ GetCombinedDatasourceListCommand._parse_filters(
+ [{"col": "sql", "opr": "eq", "value": True}]
+ )
+ )
+
+ assert source_type == "all"
+ assert name_filter is None
+ assert sql_filter is None
+ assert type_filter is None
+
+
+def test_resolve_source_type_semantic_view_filter_forces_semantic_layer() -> None:
+ command = GetCombinedDatasourceListCommand(
+ args={},
+ can_read_datasets=True,
+ can_read_semantic_views=True,
+ )
+
+ source_type = command._resolve_source_type(
+ source_type="all",
+ sql_filter=None,
+ type_filter="semantic_view",
+ )
+
+ assert source_type == "semantic_layer"
+
+
+def test_resolve_source_type_sql_filter_forces_database() -> None:
+ command = GetCombinedDatasourceListCommand(
+ args={},
+ can_read_datasets=True,
+ can_read_semantic_views=True,
+ )
+
+ source_type = command._resolve_source_type(
+ source_type="all",
+ sql_filter=True,
+ type_filter=None,
+ )
+
+ assert source_type == "database"
+
+
+@pytest.mark.parametrize(
+ "order_column",
+ ["unknown", "database.database_name", "id"],
+)
+def test_run_raises_for_invalid_sort_column(order_column: str) -> None:
+ command = GetCombinedDatasourceListCommand(
+ args={"order_column": order_column, "order_direction": "desc"},
+ can_read_datasets=True,
+ can_read_semantic_views=True,
+ )
+
+ ds_q = select(
+ literal(1).label("item_id"),
+ literal("database").label("source_type"),
+ literal("2026-01-01").label("changed_on"),
+ literal("name").label("table_name"),
+ )
+ sv_q = select(
+ literal(2).label("item_id"),
+ literal("semantic_layer").label("source_type"),
+ literal("2026-01-01").label("changed_on"),
+ literal("name").label("table_name"),
+ )
+
+ with (
+ patch(
+ "superset.commands.datasource.list.DatasourceDAO.build_dataset_query",
+ return_value=ds_q,
+ ),
+ patch(
+ "superset.commands.datasource.list.DatasourceDAO.build_semantic_view_query",
+ return_value=sv_q,
+ ),
+ patch(
+ "superset.commands.datasource.list.DatasourceDAO.paginate_combined_query",
+ side_effect=ValueError(f"Invalid order column: {order_column}"),
+ ),
+ ):
+ with pytest.raises(ValueError, match=f"Invalid order column: {order_column}"):
+ command.run()
diff --git a/tests/unit_tests/datasource/dao_tests.py b/tests/unit_tests/datasource/dao_tests.py
index 17e170f12b0..a55b8058b61 100644
--- a/tests/unit_tests/datasource/dao_tests.py
+++ b/tests/unit_tests/datasource/dao_tests.py
@@ -18,6 +18,7 @@
from collections.abc import Iterator
import pytest
+from sqlalchemy import literal, select
from sqlalchemy.orm.session import Session
from superset.utils.core import DatasourceType
@@ -138,3 +139,31 @@ def test_not_found_datasource(session_with_data: Session) -> None:
datasource_type="table",
database_id_or_uuid=500000,
)
+
+
+def test_escape_ilike_fragment() -> None:
+ from superset.daos.datasource import _escape_ilike_fragment
+
+ assert _escape_ilike_fragment("foo%bar_baz\\") == "foo\\%bar\\_baz\\\\"
+
+
+def test_paginate_combined_query_invalid_sort_column() -> None:
+ from superset.daos.datasource import DatasourceDAO
+
+ combined = (
+ select(
+ literal(1).label("item_id"),
+ literal("database").label("source_type"),
+ literal("2026-01-01").label("changed_on"),
+ literal("name").label("table_name"),
+ )
+ ).subquery()
+
+ with pytest.raises(ValueError, match="Invalid order column: invalid"):
+ DatasourceDAO.paginate_combined_query(
+ combined=combined,
+ order_column="invalid",
+ order_direction="desc",
+ page=0,
+ page_size=25,
+ )