diff --git a/superset-frontend/src/features/semanticLayers/SemanticLayerModal.test.tsx b/superset-frontend/src/features/semanticLayers/SemanticLayerModal.test.tsx
new file mode 100644
index 00000000000..3ac348fdc73
--- /dev/null
+++ b/superset-frontend/src/features/semanticLayers/SemanticLayerModal.test.tsx
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import { SupersetClient } from '@superset-ui/core';
+import { render, waitFor } from 'spec/helpers/testing-library';
+
+import SemanticLayerModal from './SemanticLayerModal';
+
+let mockJsonFormsChangeTriggered = false;
+
+jest.mock('@jsonforms/react', () => ({
+ ...jest.requireActual('@jsonforms/react'),
+ JsonForms: ({ onChange }: { onChange: (value: unknown) => void }) => {
+ // eslint-disable-next-line react-hooks/rules-of-hooks
+ if (!mockJsonFormsChangeTriggered) {
+ mockJsonFormsChangeTriggered = true;
+ onChange({
+ data: { warehouse: 'wh1' },
+ errors: [],
+ });
+ }
+ return null;
+ },
+}));
+
+jest.mock('@superset-ui/core', () => ({
+ ...jest.requireActual('@superset-ui/core'),
+ SupersetClient: {
+ ...jest.requireActual('@superset-ui/core').SupersetClient,
+ get: jest.fn(),
+ post: jest.fn(),
+ put: jest.fn(),
+ },
+ getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
+}));
+
+const mockedGet = SupersetClient.get as jest.Mock;
+const mockedPost = SupersetClient.post as jest.Mock;
+
+const props = {
+ show: true,
+ onHide: jest.fn(),
+ addDangerToast: jest.fn(),
+ addSuccessToast: jest.fn(),
+ semanticLayerUuid: '11111111-1111-1111-1111-111111111111',
+};
+
+beforeEach(() => {
+ mockJsonFormsChangeTriggered = false;
+ jest.useFakeTimers();
+ mockedGet.mockReset();
+ mockedPost.mockReset();
+
+ mockedGet
+ .mockResolvedValueOnce({
+ json: {
+ result: [{ id: 'snowflake', name: 'Snowflake', description: '' }],
+ },
+ })
+ .mockResolvedValueOnce({
+ json: {
+ result: {
+ name: 'Layer 1',
+ type: 'snowflake',
+ configuration: { warehouse: 'wh0' },
+ },
+ },
+ });
+
+ mockedPost.mockResolvedValue({
+ json: {
+ result: {
+ type: 'object',
+ properties: {
+ warehouse: {
+ type: 'string',
+ 'x-dynamic': true,
+ 'x-dependsOn': ['warehouse'],
+ },
+ },
+ },
+ },
+ });
+});
+
+afterEach(() => {
+ jest.runOnlyPendingTimers();
+ jest.useRealTimers();
+});
+
+test('posts configuration schema refresh after debounce', async () => {
+ render();
+
+ await waitFor(() => {
+ expect(mockedPost).toHaveBeenCalledWith({
+ endpoint: '/api/v1/semantic_layer/schema/configuration',
+ jsonPayload: { type: 'snowflake' },
+ });
+ });
+
+ jest.advanceTimersByTime(501);
+
+ await waitFor(() => {
+ expect(mockedPost).toHaveBeenCalledWith({
+ endpoint: '/api/v1/semantic_layer/schema/configuration',
+ jsonPayload: {
+ type: 'snowflake',
+ configuration: { warehouse: 'wh1' },
+ },
+ });
+ });
+});
diff --git a/superset-frontend/src/features/semanticLayers/SemanticLayerModal.tsx b/superset-frontend/src/features/semanticLayers/SemanticLayerModal.tsx
index 7803138542f..e00eead4ba7 100644
--- a/superset-frontend/src/features/semanticLayers/SemanticLayerModal.tsx
+++ b/superset-frontend/src/features/semanticLayers/SemanticLayerModal.tsx
@@ -18,10 +18,8 @@
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { t } from '@apache-superset/core/translation';
-import { styled } from '@apache-superset/core/theme';
-import { SupersetClient } from '@superset-ui/core';
-import { Input, Spin } from 'antd';
-import { Select } from '@superset-ui/core/components';
+import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
+import { Input, Select, Button } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import { JsonForms, withJsonFormsControlProps } from '@jsonforms/react';
import type {
@@ -131,7 +129,7 @@ function DynamicFieldControl(props: ControlProps) {
options: {
...props.uischema.options,
placeholderText: t('Loading...'),
- inputProps: { suffix: },
+ inputProps: { suffix: },
},
};
return TextControl({ ...props, uischema, enabled: false });
@@ -271,27 +269,6 @@ function serializeDependencyValues(
return JSON.stringify(snapshot);
}
-const ModalContent = styled.div`
- padding: ${({ theme }) => theme.sizeUnit * 4}px;
-`;
-
-const BackLink = styled.button`
- background: none;
- border: none;
- color: ${({ theme }) => theme.colorPrimary};
- cursor: pointer;
- padding: 0;
- font-size: ${({ theme }) => theme.fontSize}px;
- margin-bottom: ${({ theme }) => theme.sizeUnit * 2}px;
- display: inline-flex;
- align-items: center;
- gap: ${({ theme }) => theme.sizeUnit}px;
-
- &:hover {
- text-decoration: underline;
- }
-`;
-
interface SemanticLayerType {
id: string;
name: string;
@@ -368,11 +345,21 @@ export default function SemanticLayerModal({
jsonPayload: { type, configuration },
});
applySchema(json.result);
+ if (json.warning) {
+ addDangerToast(String(json.warning));
+ }
if (isInitialFetch) setStep('config');
- } catch {
+ } catch (error) {
+ const clientError = await getClientErrorObject(error);
if (isInitialFetch) {
addDangerToast(
- t('An error occurred while fetching the configuration schema'),
+ clientError.error ||
+ t('An error occurred while fetching the configuration schema'),
+ );
+ } else {
+ addDangerToast(
+ clientError.error ||
+ t('An error occurred while refreshing the configuration schema'),
);
}
} finally {
@@ -405,9 +392,11 @@ export default function SemanticLayerModal({
});
applySchema(schemaJson.result);
setStep('config');
- } catch {
+ } catch (error) {
+ const clientError = await getClientErrorObject(error);
addDangerToast(
- t('An error occurred while fetching the semantic layer'),
+ clientError.error ||
+ t('An error occurred while fetching the semantic layer'),
);
} finally {
setLoading(false);
@@ -477,11 +466,13 @@ export default function SemanticLayerModal({
addSuccessToast(t('Semantic layer created'));
}
onHide();
- } catch {
+ } catch (error) {
+ const clientError = await getClientErrorObject(error);
addDangerToast(
- isEditMode
- ? t('An error occurred while updating the semantic layer')
- : t('An error occurred while creating the semantic layer'),
+ clientError.error ||
+ (isEditMode
+ ? t('An error occurred while updating the semantic layer')
+ : t('An error occurred while creating the semantic layer')),
);
} finally {
setSaving(false);
@@ -536,15 +527,9 @@ export default function SemanticLayerModal({
setFormData(data);
errorsRef.current = errors ?? [];
setHasErrors(errorsRef.current.length > 0);
- if (
- validationMode === 'ValidateAndShow' &&
- errorsRef.current.length === 0
- ) {
- handleCreate();
- }
maybeRefreshSchema(data);
},
- [validationMode, handleCreate, maybeRefreshSchema],
+ [maybeRefreshSchema],
);
const selectedTypeName =
@@ -574,7 +559,7 @@ export default function SemanticLayerModal({
contentLoading={loading}
>
{step === 'type' ? (
-
+ <>
-
+ >
) : (
-
+ <>
{!isEditMode && (
-
-
+ }
+ onClick={handleBack}
+ >
{t('Back')}
-
+
)}
)}
-
+ >
)}
);
diff --git a/superset-frontend/src/pages/DatabaseList/index.tsx b/superset-frontend/src/pages/DatabaseList/index.tsx
index a263fe2d2b4..2d7ee93b0ca 100644
--- a/superset-frontend/src/pages/DatabaseList/index.tsx
+++ b/superset-frontend/src/pages/DatabaseList/index.tsx
@@ -25,6 +25,7 @@ import {
} from '@superset-ui/core';
import { css, styled, useTheme } from '@apache-superset/core/theme';
import { useState, useMemo, useEffect, useCallback } from 'react';
+import type { CellProps } from 'react-table';
import rison from 'rison';
import { useSelector } from 'react-redux';
import { useQueryParams, BooleanParam } from 'use-query-params';
@@ -68,6 +69,7 @@ import { DatabaseObject } from 'src/features/databases/types';
import { QueryObjectColumns } from 'src/views/CRUD/types';
import { WIDER_DROPDOWN_WIDTH } from 'src/components/ListView/utils';
import { ModalTitleWithIcon } from 'src/components/ModalTitleWithIcon';
+import type Owner from 'src/types/Owner';
const extensionsRegistry = getExtensionsRegistry();
const DatabaseDeleteRelatedExtension = extensionsRegistry.get(
@@ -82,6 +84,8 @@ const PAGE_SIZE = 25;
type ConnectionItem = DatabaseObject & {
source_type?: 'database' | 'semantic_layer';
sl_type?: string;
+ changed_by?: Owner;
+ changed_on_delta_humanized?: string;
};
interface DatabaseDeleteObject extends DatabaseObject {
@@ -497,7 +501,7 @@ function DatabaseList({
await handleResourceExport('database', [database.id], () => {
setPreparingExport(false);
});
- } catch (error) {
+ } catch {
setPreparingExport(false);
addDangerToast(t('There was an issue exporting the database'));
}
@@ -596,11 +600,11 @@ function DatabaseList({
{t('AQE')}
),
- Cell: ({ row: { original } }: any) =>
+ Cell: ({ row: { original } }: CellProps) =>
original.source_type === 'semantic_layer' ? (
–
) : (
-
+
),
size: 'sm',
id: 'allow_run_async',
@@ -616,11 +620,11 @@ function DatabaseList({
{t('DML')}
),
- Cell: ({ row: { original } }: any) =>
+ Cell: ({ row: { original } }: CellProps) =>
original.source_type === 'semantic_layer' ? (
–
) : (
-
+
),
size: 'sm',
id: 'allow_dml',
@@ -628,11 +632,11 @@ function DatabaseList({
{
accessor: 'allow_file_upload',
Header: t('File upload'),
- Cell: ({ row: { original } }: any) =>
+ Cell: ({ row: { original } }: CellProps) =>
original.source_type === 'semantic_layer' ? (
–
) : (
-
+
),
size: 'md',
id: 'allow_file_upload',
@@ -640,11 +644,11 @@ function DatabaseList({
{
accessor: 'expose_in_sqllab',
Header: t('Expose in SQL Lab'),
- Cell: ({ row: { original } }: any) =>
+ Cell: ({ row: { original } }: CellProps) =>
original.source_type === 'semantic_layer' ? (
–
) : (
-
+
),
size: 'md',
id: 'expose_in_sqllab',
@@ -657,7 +661,9 @@ function DatabaseList({
changed_on_delta_humanized: changedOn,
},
},
- }: any) => ,
+ }: CellProps) => (
+
+ ),
Header: t('Last modified'),
accessor: 'changed_on_delta_humanized',
size: 'xl',
diff --git a/superset/semantic_layers/api.py b/superset/semantic_layers/api.py
index 14527ffcf8a..d74decb1a82 100644
--- a/superset/semantic_layers/api.py
+++ b/superset/semantic_layers/api.py
@@ -25,6 +25,7 @@ from flask_appbuilder.api.schemas import get_list_schema
from flask_appbuilder.models.sqla.interface import SQLAInterface
from marshmallow import ValidationError
from pydantic import ValidationError as PydanticValidationError
+from sqlalchemy.orm import load_only
from superset import db, event_logger, is_feature_enabled
from superset.commands.semantic_layer.create import CreateSemanticLayerCommand
@@ -314,14 +315,23 @@ class SemanticLayerRestApi(BaseSupersetApi):
if config := body.get("configuration"):
parsed_config = _parse_partial_config(cls, config)
+ warning: str | None = None
try:
schema = cls.get_configuration_schema(parsed_config)
- except Exception: # pylint: disable=broad-except
+ except Exception as ex: # pylint: disable=broad-except
+ warning = str(ex)
+ logger.exception(
+ "Error enriching semantic layer configuration schema for type %s",
+ sl_type,
+ )
# Connection or query failures during schema enrichment should not
# prevent the form from rendering — return the base schema instead.
schema = cls.get_configuration_schema(None)
- resp = make_response(json.dumps({"result": schema}, sort_keys=False), 200)
+ payload: dict[str, Any] = {"result": schema}
+ if warning:
+ payload["warning"] = warning
+ resp = make_response(json.dumps(payload, sort_keys=False), 200)
resp.headers["Content-Type"] = "application/json; charset=utf-8"
return resp
@@ -569,7 +579,7 @@ class SemanticLayerRestApi(BaseSupersetApi):
source_type, name_filter = self._parse_connection_filters(filters)
if not is_feature_enabled("SEMANTIC_LAYERS"):
- source_type = "database"
+ return self.response_404()
all_items = self._fetch_connection_items(source_type, name_filter)
@@ -611,18 +621,41 @@ class SemanticLayerRestApi(BaseSupersetApi):
"""Fetch database and semantic layer items based on filters."""
db_items: list[tuple[str, Database]] = []
if source_type in ("all", "database"):
- db_q = db.session.query(Database)
+ db_q = db.session.query(Database).options(
+ load_only(
+ Database.id,
+ Database.uuid,
+ Database.database_name,
+ Database.backend,
+ Database.allow_run_async,
+ Database.allow_dml,
+ Database.allow_file_upload,
+ Database.expose_in_sqllab,
+ Database.changed_on,
+ Database.changed_by_fk,
+ )
+ )
if name_filter:
db_q = db_q.filter(Database.database_name.ilike(f"%{name_filter}%"))
db_items = [("database", obj) for obj in db_q.all()]
sl_items: list[tuple[str, SemanticLayer]] = []
if source_type in ("all", "semantic_layer"):
- sl_q = db.session.query(SemanticLayer)
+ sl_q = db.session.query(SemanticLayer).options(
+ load_only(
+ SemanticLayer.uuid,
+ SemanticLayer.name,
+ SemanticLayer.type,
+ SemanticLayer.description,
+ SemanticLayer.changed_on,
+ SemanticLayer.changed_by_fk,
+ )
+ )
if name_filter:
sl_q = sl_q.filter(SemanticLayer.name.ilike(f"%{name_filter}%"))
sl_items = [("semantic_layer", obj) for obj in sl_q.all()]
+ # TODO: move sort + pagination to SQL before GA.
return db_items + sl_items # type: ignore
@staticmethod
diff --git a/tests/unit_tests/semantic_layers/api_test.py b/tests/unit_tests/semantic_layers/api_test.py
index c6482c0f139..48a5ab64e57 100644
--- a/tests/unit_tests/semantic_layers/api_test.py
+++ b/tests/unit_tests/semantic_layers/api_test.py
@@ -1181,6 +1181,7 @@ def test_configuration_schema_enrichment_error_fallback(
assert response.status_code == 200
assert response.json["result"] == {"type": "object"}
+ assert response.json["warning"] == "connection failed"
assert mock_cls.get_configuration_schema.call_count == 2
@@ -1218,9 +1219,11 @@ def test_connections_list(
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
+ db_query.options.return_value = db_query
db_query.all.return_value = [mock_db]
db_query.filter.return_value = db_query
sl_query = MagicMock()
+ sl_query.options.return_value = sl_query
sl_query.all.return_value = [mock_layer]
sl_query.filter.return_value = sl_query
mock_db_session.query.side_effect = [db_query, sl_query]
@@ -1252,28 +1255,7 @@ def test_connections_database_only(
full_api_access: None,
mocker: MockerFixture,
) -> None:
- """Test GET /connections/ with semantic layers disabled."""
- from datetime import datetime
-
- mock_db = MagicMock()
- mock_db.id = 1
- mock_db.uuid = uuid_lib.uuid4()
- mock_db.database_name = "PostgreSQL"
- mock_db.backend = "postgresql"
- mock_db.allow_run_async = False
- mock_db.allow_dml = False
- mock_db.allow_file_upload = False
- mock_db.expose_in_sqllab = True
- mock_db.changed_on = datetime(2026, 1, 1)
- mock_db.changed_on_delta_humanized.return_value = "1 month ago"
- mock_db.changed_by = MagicMock()
- mock_db.changed_by.first_name = "John"
- mock_db.changed_by.last_name = "Doe"
-
- mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
- db_query = MagicMock()
- db_query.all.return_value = [mock_db]
- mock_db_session.query.return_value = db_query
+ """Test GET /connections/ returns 404 when feature flag is disabled."""
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
@@ -1282,12 +1264,7 @@ def test_connections_database_only(
response = client.get("/api/v1/semantic_layer/connections/")
- assert response.status_code == 200
- assert response.json["count"] == 1
- result = response.json["result"][0]
- assert result["source_type"] == "database"
- assert result["database_name"] == "PostgreSQL"
- assert result["changed_by"]["first_name"] == "John"
+ assert response.status_code == 404
@SEMANTIC_LAYERS_APP
@@ -1299,9 +1276,11 @@ def test_connections_name_filter(
"""Test GET /connections/ with name filter."""
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
+ db_query.options.return_value = db_query
db_query.all.return_value = []
db_query.filter.return_value = db_query
sl_query = MagicMock()
+ sl_query.options.return_value = sl_query
sl_query.all.return_value = []
sl_query.filter.return_value = sl_query
mock_db_session.query.side_effect = [db_query, sl_query]
@@ -1359,8 +1338,10 @@ def test_connections_sort_by_name(
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
+ db_query.options.return_value = db_query
db_query.all.return_value = [mock_db]
sl_query = MagicMock()
+ sl_query.options.return_value = sl_query
sl_query.all.return_value = [mock_layer]
mock_db_session.query.side_effect = [db_query, sl_query]
@@ -1412,6 +1393,7 @@ def test_connections_source_type_filter(
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
+ db_query.options.return_value = db_query
db_query.all.return_value = [mock_db]
mock_db_session.query.return_value = db_query
@@ -1454,6 +1436,7 @@ def test_connections_source_type_semantic_layer_only(
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
sl_query = MagicMock()
+ sl_query.options.return_value = sl_query
sl_query.all.return_value = [mock_layer]
mock_db_session.query.return_value = sl_query