Compare commits

...

3 Commits

Author SHA1 Message Date
Beto Dealmeida
0f08f016d2 Address semantic layer review nits
Improve semantic layer schema refresh error handling and connections endpoint behavior to reduce noisy failures while keeping this feature branch focused. Also restore frontend typing consistency and add debounce coverage for dynamic schema refresh.
2026-04-23 14:00:59 -04:00
Beto Dealmeida
65fb2ff834 Fix rebase 2026-04-23 13:33:47 -04:00
Beto Dealmeida
d659089c59 feat: UI for semantic layers 2026-04-23 13:33:47 -04:00
13 changed files with 2309 additions and 129 deletions

View File

@@ -0,0 +1,73 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from pydantic import BaseModel
def build_configuration_schema(
config_class: type[BaseModel],
configuration: BaseModel | None = None,
) -> dict[str, Any]:
"""
Build a JSON schema from a Pydantic configuration class.
Handles generic boilerplate that any semantic layer with dynamic fields needs:
- Reorders properties to match model field order (Pydantic sorts alphabetically)
- When ``configuration`` is None, sets ``enum: []`` on all ``x-dynamic`` properties
so the frontend renders them as empty dropdowns
Semantic layer implementations call this instead of
``model_json_schema()`` directly,
then only need to add their own dynamic population logic.
"""
schema = config_class.model_json_schema()
# Pydantic sorts properties alphabetically; restore model field order
field_order = [
field.alias or name for name, field in config_class.model_fields.items()
]
schema["properties"] = {
key: schema["properties"][key]
for key in field_order
if key in schema["properties"]
}
if configuration is None:
for prop_schema in schema["properties"].values():
if prop_schema.get("x-dynamic"):
prop_schema["enum"] = []
return schema
def check_dependencies(
prop_schema: dict[str, Any],
configuration: BaseModel,
) -> bool:
"""
Check whether a dynamic property's dependencies are satisfied.
Reads the ``x-dependsOn`` list from the property schema and returns ``True``
when every referenced attribute on ``configuration`` is truthy.
"""
dependencies = prop_schema.get("x-dependsOn", [])
return all(getattr(configuration, dep, None) for dep in dependencies)

View File

@@ -32,6 +32,8 @@ class SemanticLayer(ABC, Generic[ConfigT, SemanticViewT]):
Abstract base class for semantic layers.
"""
configuration_class: type[BaseModel]
@classmethod
@abstractmethod
def from_configuration(

View File

@@ -28,8 +28,14 @@
"@emotion/cache": "^11.4.0",
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.1",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/ibm-plex-mono": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@googleapis/sheets": "^13.0.1",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@luma.gl/constants": "~9.2.5",
"@luma.gl/core": "~9.2.5",
"@luma.gl/engine": "~9.2.5",
@@ -37,6 +43,7 @@
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",
@@ -4042,6 +4049,15 @@
}
}
},
"node_modules/@fontsource/fira-code": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz",
"integrity": "sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==",
"license": "OFL-1.1",
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@fontsource/ibm-plex-mono": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.2.7.tgz",
@@ -4052,15 +4068,34 @@
}
},
"node_modules/@fontsource/inter": {
"version": "5.2.6",
"resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.6.tgz",
"integrity": "sha512-CZs9S1CrjD0jPwsNy9W6j0BhsmRSQrgwlTNkgQXTsAeDRM42LBRLo3eo9gCzfH4GvV7zpyf78Ozfl773826csw==",
"version": "5.2.8",
"resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.8.tgz",
"integrity": "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg==",
"license": "OFL-1.1",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@great-expectations/jsonforms-antd-renderers": {
"version": "2.2.11",
"resolved": "https://registry.npmjs.org/@great-expectations/jsonforms-antd-renderers/-/jsonforms-antd-renderers-2.2.11.tgz",
"integrity": "sha512-QeKI6RP+vZo5Bf5WX5Mx6CPEBYvR83bIyeezHoyVVc1+pGsDqO9lsFdbaFKpqozV+s/TRB1KmVAW4GxpMzLuAw==",
"license": "MIT",
"dependencies": {
"lodash.isempty": "^4.4.0",
"lodash.merge": "^4.6.2",
"lodash.range": "^3.2.0",
"lodash.startcase": "^4.4.0"
},
"peerDependencies": {
"@ant-design/icons": "^5.3.0",
"@jsonforms/core": "^3.3.0",
"@jsonforms/react": "^3.3.0",
"antd": "^5.14.0",
"dayjs": "^1",
"react": "^17 || ^18"
}
},
"node_modules/@googleapis/sheets": {
"version": "13.0.1",
"resolved": "https://registry.npmjs.org/@googleapis/sheets/-/sheets-13.0.1.tgz",
@@ -6120,6 +6155,45 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@jsonforms/core": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/core/-/core-3.7.0.tgz",
"integrity": "sha512-CE9viWtwi9QWLqlWLeOul1/R1GRAyOA9y6OoUpsCc0FhyR+g5p29F3k0fUExHWxL0Sf4KHcXYkfhtqfRBPS8ww==",
"license": "MIT",
"dependencies": {
"@types/json-schema": "^7.0.3",
"ajv": "^8.6.1",
"ajv-formats": "^2.1.0",
"lodash": "^4.17.21"
}
},
"node_modules/@jsonforms/react": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/react/-/react-3.7.0.tgz",
"integrity": "sha512-HkY7qAx8vW97wPEgZ7GxCB3iiXG1c95GuObxtcDHGPBJWMwnxWBnVYJmv5h7nthrInKsQKHZL5OusnC/sj/1GQ==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonforms/vanilla-renderers": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/vanilla-renderers/-/vanilla-renderers-3.7.0.tgz",
"integrity": "sha512-RdXQGsheARUJVbaTe6SqGw9W4/yrm0BgUok6OKUj8krp1NF4fqXc5UbYGHFksMR/p7LCuoYHCtQzKLXEfxJbDw==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"@jsonforms/react": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonjoy.com/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz",
@@ -9487,6 +9561,89 @@
"integrity": "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ==",
"license": "MIT"
},
"node_modules/@rjsf/antd": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/antd/-/antd-5.24.13.tgz",
"integrity": "sha512-UiWE8xoBxxCoe/SEkdQEmL5E6z3I1pw0+y0dTyGt8SHfAxxFc4/OWn7tKOAiNsKCXgf83t0JKn6CHWLD01sAdQ==",
"license": "Apache-2.0",
"dependencies": {
"classnames": "^2.5.1",
"lodash": "^4.17.21",
"lodash-es": "^4.17.21",
"rc-picker": "2.7.6"
},
"engines": {
"node": ">=14"
},
"peerDependencies": {
"@ant-design/icons": "^4.0.0 || ^5.0.0",
"@rjsf/core": "^5.24.x",
"@rjsf/utils": "^5.24.x",
"antd": "^4.24.0 || ^5.8.5",
"dayjs": "^1.8.0",
"react": "^16.14.0 || >=17"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker": {
"version": "2.7.6",
"resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-2.7.6.tgz",
"integrity": "sha512-H9if/BUJUZBOhPfWcPeT15JUI3/ntrG9muzERrXDkSoWmDj4yzmBvumozpxYrHwjcKnjyDGAke68d+whWwvhHA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "^2.2.1",
"date-fns": "2.x",
"dayjs": "1.x",
"moment": "^2.24.0",
"rc-trigger": "^5.0.4",
"rc-util": "^5.37.0",
"shallowequal": "^1.1.0"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger": {
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.4.tgz",
"integrity": "sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.18.3",
"classnames": "^2.2.6",
"rc-align": "^4.0.0",
"rc-motion": "^2.0.0",
"rc-util": "^5.19.2"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger/node_modules/rc-align": {
"version": "4.0.15",
"resolved": "https://registry.npmjs.org/rc-align/-/rc-align-4.0.15.tgz",
"integrity": "sha512-wqJtVH60pka/nOX7/IspElA8gjPNQKIx/ZqJ6heATCkXpe1Zg4cPVrMD2vC96wjsFFL8WsmhPbx9tdMo1qqlIA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "2.x",
"dom-align": "^1.7.0",
"rc-util": "^5.26.0",
"resize-observer-polyfill": "^1.5.1"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/core": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/core/-/core-5.24.13.tgz",
@@ -22435,6 +22592,22 @@
"integrity": "sha512-O/gRkjWULp3xVX8K85V0H3tsSGole0WYt77KVpGZO2xTGLuVFuvE6JIsIli3fvFHCYBhGFn/8OHEEyMYF+QehA==",
"license": "MIT"
},
"node_modules/date-fns": {
"version": "2.30.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz",
"integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.21.0"
},
"engines": {
"node": ">=0.11"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/date-fns"
}
},
"node_modules/dateformat": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.2.tgz",
@@ -23040,6 +23213,12 @@
"integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
"license": "MIT"
},
"node_modules/dom-align": {
"version": "1.12.4",
"resolved": "https://registry.npmjs.org/dom-align/-/dom-align-1.12.4.tgz",
"integrity": "sha512-R8LUSEay/68zE5c8/3BDxiTEvgb4xZTF0RKmAHfiEVN3klfIpXfi2/QCoiWPccVQ0J/ZGdz9OjzL4uJEP/MRAw==",
"license": "MIT"
},
"node_modules/dom-converter": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz",
@@ -35677,6 +35856,12 @@
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
"license": "MIT"
},
"node_modules/lodash.isempty": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz",
"integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==",
"license": "MIT"
},
"node_modules/lodash.isequal": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
@@ -35708,7 +35893,6 @@
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.once": {
@@ -35719,6 +35903,18 @@
"license": "MIT",
"peer": true
},
"node_modules/lodash.range": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/lodash.range/-/lodash.range-3.2.0.tgz",
"integrity": "sha512-Fgkb7SinmuzqgIhNhAElo0BL/R1rHCnhwSZf78omqSwvWqD0kD2ssOAutQonDKH/ldS8BxA72ORYI09qAY9CYg==",
"license": "MIT"
},
"node_modules/lodash.startcase": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
"integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
"license": "MIT"
},
"node_modules/lodash.uniq": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
@@ -37240,7 +37436,6 @@
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
"license": "MIT",
"peer": true,
"engines": {
"node": "*"
}
@@ -44766,6 +44961,12 @@
"integrity": "sha512-b6i4ZpVuUxB9h5gfCxPiusKYkqTMOjEbBs4wMaFbkfia4yFv92UKZ6Df8WXcKbn08JNL/abvg3FnMAOfakDvUw==",
"license": "MIT"
},
"node_modules/shallowequal": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz",
"integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==",
"license": "MIT"
},
"node_modules/shapefile": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/shapefile/-/shapefile-0.3.1.tgz",
@@ -51505,7 +51706,7 @@
"react-js-cron": "^5.2.0",
"react-markdown": "^8.0.7",
"react-resize-detector": "^7.1.2",
"react-syntax-highlighter": "^16.1.0",
"react-syntax-highlighter": "^16.1.1",
"react-ultimate-pagination": "^1.3.2",
"regenerator-runtime": "^0.14.1",
"rehype-raw": "^7.0.0",

View File

@@ -117,7 +117,14 @@
"@luma.gl/gltf": "~9.2.5",
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",

View File

@@ -150,6 +150,7 @@ export interface ButtonProps {
buttonStyle: 'primary' | 'secondary' | 'dashed' | 'link' | 'tertiary';
loading?: boolean;
icon?: IconType;
component?: ReactNode;
}
export interface SubMenuProps {
@@ -300,18 +301,22 @@ const SubMenuComponent: FunctionComponent<SubMenuProps> = props => {
</SubMenu>
))}
</Menu>
{props.buttons?.map((btn, i) => (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
))}
{props.buttons?.map((btn, i) =>
btn.component ? (
<span key={i}>{btn.component}</span>
) : (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
),
)}
</div>
</Row>
{props.children}

View File

@@ -0,0 +1,127 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SupersetClient } from '@superset-ui/core';
import { render, waitFor } from 'spec/helpers/testing-library';
import SemanticLayerModal from './SemanticLayerModal';
let mockJsonFormsChangeTriggered = false;
jest.mock('@jsonforms/react', () => ({
...jest.requireActual('@jsonforms/react'),
JsonForms: ({ onChange }: { onChange: (value: unknown) => void }) => {
// eslint-disable-next-line react-hooks/rules-of-hooks
if (!mockJsonFormsChangeTriggered) {
mockJsonFormsChangeTriggered = true;
onChange({
data: { warehouse: 'wh1' },
errors: [],
});
}
return null;
},
}));
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
get: jest.fn(),
post: jest.fn(),
put: jest.fn(),
},
getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
}));
const mockedGet = SupersetClient.get as jest.Mock;
const mockedPost = SupersetClient.post as jest.Mock;
const props = {
show: true,
onHide: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
semanticLayerUuid: '11111111-1111-1111-1111-111111111111',
};
beforeEach(() => {
mockJsonFormsChangeTriggered = false;
jest.useFakeTimers();
mockedGet.mockReset();
mockedPost.mockReset();
mockedGet
.mockResolvedValueOnce({
json: {
result: [{ id: 'snowflake', name: 'Snowflake', description: '' }],
},
})
.mockResolvedValueOnce({
json: {
result: {
name: 'Layer 1',
type: 'snowflake',
configuration: { warehouse: 'wh0' },
},
},
});
mockedPost.mockResolvedValue({
json: {
result: {
type: 'object',
properties: {
warehouse: {
type: 'string',
'x-dynamic': true,
'x-dependsOn': ['warehouse'],
},
},
},
},
});
});
afterEach(() => {
jest.runOnlyPendingTimers();
jest.useRealTimers();
});
test('posts configuration schema refresh after debounce', async () => {
render(<SemanticLayerModal {...props} />);
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type: 'snowflake' },
});
});
jest.advanceTimersByTime(501);
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: {
type: 'snowflake',
configuration: { warehouse: 'wh1' },
},
});
});
});

View File

@@ -0,0 +1,616 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { t } from '@apache-superset/core/translation';
import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import { Input, Select, Button } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import { JsonForms, withJsonFormsControlProps } from '@jsonforms/react';
import type {
JsonSchema,
UISchemaElement,
ControlProps,
} from '@jsonforms/core';
import {
rankWith,
and,
isStringControl,
formatIs,
schemaMatches,
} from '@jsonforms/core';
import {
rendererRegistryEntries,
cellRegistryEntries,
TextControl,
} from '@great-expectations/jsonforms-antd-renderers';
import type { ErrorObject } from 'ajv';
import {
StandardModal,
ModalFormField,
MODAL_STANDARD_WIDTH,
MODAL_MEDIUM_WIDTH,
} from 'src/components/Modal';
/**
* Custom renderer that renders `Input.Password` for fields with
* `format: "password"` in the JSON Schema (e.g. Pydantic `SecretStr`).
*/
function PasswordControl(props: ControlProps) {
const uischema = {
...props.uischema,
options: { ...props.uischema.options, type: 'password' },
};
return TextControl({ ...props, uischema });
}
const PasswordRenderer = withJsonFormsControlProps(PasswordControl);
const passwordEntry = {
tester: rankWith(3, and(isStringControl, formatIs('password'))),
renderer: PasswordRenderer,
};
/**
* Renderer for `const` properties (e.g. Pydantic discriminator fields).
* Renders nothing visually but ensures the const value is set in form data,
* so discriminated unions resolve correctly on the backend.
*/
function ConstControl({ data, handleChange, path, schema }: ControlProps) {
const constValue = (schema as Record<string, unknown>).const;
useEffect(() => {
if (constValue !== undefined && data !== constValue) {
handleChange(path, constValue);
}
}, [constValue, data, handleChange, path]);
return null;
}
const ConstRenderer = withJsonFormsControlProps(ConstControl);
const constEntry = {
tester: rankWith(
10,
schemaMatches(s => s !== undefined && 'const' in s),
),
renderer: ConstRenderer,
};
/**
* Checks whether all dependency values are filled (non-empty).
* Handles nested objects (like auth) by checking they have at least one key.
*/
function areDependenciesSatisfied(
dependencies: string[],
data: Record<string, unknown>,
): boolean {
return dependencies.every(dep => {
const value = data[dep];
if (value === null || value === undefined || value === '') return false;
if (typeof value === 'object' && Object.keys(value).length === 0)
return false;
return true;
});
}
/**
* Renderer for fields marked `x-dynamic` in the JSON Schema.
* Shows a loading spinner inside the input while the schema is being
* refreshed with dynamic values from the backend.
*/
function DynamicFieldControl(props: ControlProps) {
const { refreshingSchema, formData: cfgData } = props.config ?? {};
const deps = (props.schema as Record<string, unknown>)?.['x-dependsOn'];
const refreshing =
refreshingSchema &&
Array.isArray(deps) &&
areDependenciesSatisfied(
deps as string[],
(cfgData as Record<string, unknown>) ?? {},
);
if (!refreshing) {
return TextControl(props);
}
const uischema = {
...props.uischema,
options: {
...props.uischema.options,
placeholderText: t('Loading...'),
inputProps: { suffix: <Icons.LoadingOutlined iconSize="s" /> },
},
};
return TextControl({ ...props, uischema, enabled: false });
}
const DynamicFieldRenderer = withJsonFormsControlProps(DynamicFieldControl);
const dynamicFieldEntry = {
tester: rankWith(
3,
and(
isStringControl,
schemaMatches(
s => (s as Record<string, unknown>)?.['x-dynamic'] === true,
),
),
),
renderer: DynamicFieldRenderer,
};
const renderers = [
...rendererRegistryEntries,
passwordEntry,
constEntry,
dynamicFieldEntry,
];
type Step = 'type' | 'config';
type ValidationMode = 'ValidateAndHide' | 'ValidateAndShow';
const SCHEMA_REFRESH_DEBOUNCE_MS = 500;
/**
* Removes empty `enum` arrays from schema properties. The JSON Schema spec
* requires `enum` to have at least one item, and AJV rejects empty arrays.
* Fields with empty enums are rendered as plain text inputs instead.
*/
function sanitizeSchema(schema: JsonSchema): JsonSchema {
if (!schema.properties) return schema;
const properties: Record<string, JsonSchema> = {};
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'enum' in prop &&
Array.isArray(prop.enum) &&
prop.enum.length === 0
) {
const { enum: _empty, ...rest } = prop;
properties[key] = rest;
} else {
properties[key] = prop as JsonSchema;
}
}
return { ...schema, properties } as JsonSchema;
}
/**
* Builds a JSON Forms UI schema from a JSON Schema, using the first
* `examples` entry as placeholder text for each string property.
*/
function buildUiSchema(schema: JsonSchema): UISchemaElement | undefined {
if (!schema.properties) return undefined;
// Use explicit property order from backend if available,
// otherwise fall back to the JSON object key order
const propertyOrder: string[] =
((schema as Record<string, unknown>)['x-propertyOrder'] as string[]) ??
Object.keys(schema.properties);
const elements = propertyOrder
.filter(key => key in (schema.properties ?? {}))
.map(key => {
const prop = schema.properties![key];
const control: Record<string, unknown> = {
type: 'Control',
scope: `#/properties/${key}`,
};
if (typeof prop === 'object' && prop !== null) {
const options: Record<string, unknown> = {};
if (
'examples' in prop &&
Array.isArray(prop.examples) &&
prop.examples.length > 0
) {
options.placeholderText = String(prop.examples[0]);
}
if ('description' in prop && typeof prop.description === 'string') {
options.tooltip = prop.description;
}
if (Object.keys(options).length > 0) {
control.options = options;
}
}
return control;
});
return { type: 'VerticalLayout', elements } as UISchemaElement;
}
/**
* Extracts dynamic field dependency mappings from the schema.
* Returns a map of field name → list of dependency field names.
*/
function getDynamicDependencies(schema: JsonSchema): Record<string, string[]> {
const deps: Record<string, string[]> = {};
if (!schema.properties) return deps;
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'x-dynamic' in prop &&
'x-dependsOn' in prop &&
Array.isArray((prop as Record<string, unknown>)['x-dependsOn'])
) {
deps[key] = (prop as Record<string, unknown>)['x-dependsOn'] as string[];
}
}
return deps;
}
/**
* Serializes the dependency values for a set of fields into a stable string
* for comparison, so we only re-fetch when dependency values actually change.
*/
function serializeDependencyValues(
dynamicDeps: Record<string, string[]>,
data: Record<string, unknown>,
): string {
const allDepKeys = new Set<string>();
for (const deps of Object.values(dynamicDeps)) {
for (const dep of deps) {
allDepKeys.add(dep);
}
}
const snapshot: Record<string, unknown> = {};
for (const key of [...allDepKeys].sort()) {
snapshot[key] = data[key];
}
return JSON.stringify(snapshot);
}
interface SemanticLayerType {
id: string;
name: string;
description: string;
}
interface SemanticLayerModalProps {
show: boolean;
onHide: () => void;
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
semanticLayerUuid?: string;
}
export default function SemanticLayerModal({
show,
onHide,
addDangerToast,
addSuccessToast,
semanticLayerUuid,
}: SemanticLayerModalProps) {
const isEditMode = !!semanticLayerUuid;
const [step, setStep] = useState<Step>('type');
const [name, setName] = useState('');
const [selectedType, setSelectedType] = useState<string | null>(null);
const [types, setTypes] = useState<SemanticLayerType[]>([]);
const [loading, setLoading] = useState(false);
const [configSchema, setConfigSchema] = useState<JsonSchema | null>(null);
const [uiSchema, setUiSchema] = useState<UISchemaElement | undefined>(
undefined,
);
const [formData, setFormData] = useState<Record<string, unknown>>({});
const [saving, setSaving] = useState(false);
const [hasErrors, setHasErrors] = useState(true);
const [refreshingSchema, setRefreshingSchema] = useState(false);
const [validationMode, setValidationMode] =
useState<ValidationMode>('ValidateAndHide');
const errorsRef = useRef<ErrorObject[]>([]);
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const lastDepSnapshotRef = useRef<string>('');
const dynamicDepsRef = useRef<Record<string, string[]>>({});
const fetchTypes = useCallback(async () => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: '/api/v1/semantic_layer/types',
});
setTypes(json.result ?? []);
} catch {
addDangerToast(
t('An error occurred while fetching semantic layer types'),
);
} finally {
setLoading(false);
}
}, [addDangerToast]);
const applySchema = useCallback((rawSchema: JsonSchema) => {
const schema = sanitizeSchema(rawSchema);
setConfigSchema(schema);
setUiSchema(buildUiSchema(schema));
dynamicDepsRef.current = getDynamicDependencies(rawSchema);
}, []);
const fetchConfigSchema = useCallback(
async (type: string, configuration?: Record<string, unknown>) => {
const isInitialFetch = !configuration;
if (isInitialFetch) setLoading(true);
else setRefreshingSchema(true);
try {
const { json } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type, configuration },
});
applySchema(json.result);
if (json.warning) {
addDangerToast(String(json.warning));
}
if (isInitialFetch) setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
if (isInitialFetch) {
addDangerToast(
clientError.error ||
t('An error occurred while fetching the configuration schema'),
);
} else {
addDangerToast(
clientError.error ||
t('An error occurred while refreshing the configuration schema'),
);
}
} finally {
if (isInitialFetch) setLoading(false);
else setRefreshingSchema(false);
}
},
[addDangerToast, applySchema],
);
const fetchExistingLayer = useCallback(
async (uuid: string) => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: `/api/v1/semantic_layer/${uuid}`,
});
const layer = json.result;
setName(layer.name ?? '');
setSelectedType(layer.type);
setFormData(layer.configuration ?? {});
setHasErrors(false);
// Fetch base schema (no configuration → no Snowflake connection) to
// show the form immediately. The existing maybeRefreshSchema machinery
// will trigger an enriched fetch in the background once deps are
// satisfied, and DynamicFieldControl will show per-field spinners.
const { json: schemaJson } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type: layer.type },
});
applySchema(schemaJson.result);
setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
t('An error occurred while fetching the semantic layer'),
);
} finally {
setLoading(false);
}
},
[addDangerToast, applySchema],
);
useEffect(() => {
if (show) {
if (isEditMode && semanticLayerUuid) {
fetchTypes();
fetchExistingLayer(semanticLayerUuid);
} else {
fetchTypes();
}
} else {
setStep('type');
setName('');
setSelectedType(null);
setTypes([]);
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setHasErrors(true);
setRefreshingSchema(false);
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
}
}, [show, fetchTypes, isEditMode, semanticLayerUuid, fetchExistingLayer]);
const handleStepAdvance = () => {
if (selectedType) {
fetchConfigSchema(selectedType);
}
};
const handleBack = () => {
setStep('type');
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
};
const handleCreate = async () => {
setSaving(true);
try {
if (isEditMode && semanticLayerUuid) {
await SupersetClient.put({
endpoint: `/api/v1/semantic_layer/${semanticLayerUuid}`,
jsonPayload: { name, configuration: formData },
});
addSuccessToast(t('Semantic layer updated'));
} else {
await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/',
jsonPayload: { name, type: selectedType, configuration: formData },
});
addSuccessToast(t('Semantic layer created'));
}
onHide();
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
(isEditMode
? t('An error occurred while updating the semantic layer')
: t('An error occurred while creating the semantic layer')),
);
} finally {
setSaving(false);
}
};
const handleSave = () => {
if (step === 'type') {
handleStepAdvance();
} else {
setValidationMode('ValidateAndShow');
if (errorsRef.current.length === 0) {
handleCreate();
}
}
};
const maybeRefreshSchema = useCallback(
(data: Record<string, unknown>) => {
if (!selectedType) return;
const dynamicDeps = dynamicDepsRef.current;
if (Object.keys(dynamicDeps).length === 0) return;
// Check if any dynamic field has all dependencies satisfied
const hasSatisfiedDeps = Object.values(dynamicDeps).some(deps =>
areDependenciesSatisfied(deps, data),
);
if (!hasSatisfiedDeps) return;
// Only re-fetch if dependency values actually changed
const snapshot = serializeDependencyValues(dynamicDeps, data);
if (snapshot === lastDepSnapshotRef.current) return;
lastDepSnapshotRef.current = snapshot;
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
debounceTimerRef.current = setTimeout(() => {
fetchConfigSchema(selectedType, data);
}, SCHEMA_REFRESH_DEBOUNCE_MS);
},
[selectedType, fetchConfigSchema],
);
const handleFormChange = useCallback(
({
data,
errors,
}: {
data: Record<string, unknown>;
errors?: ErrorObject[];
}) => {
setFormData(data);
errorsRef.current = errors ?? [];
setHasErrors(errorsRef.current.length > 0);
maybeRefreshSchema(data);
},
[maybeRefreshSchema],
);
const selectedTypeName =
types.find(type => type.id === selectedType)?.name ?? '';
const title = isEditMode
? t('Edit %s', selectedTypeName || t('Semantic Layer'))
: step === 'type'
? t('New Semantic Layer')
: t('Configure %s', selectedTypeName);
return (
<StandardModal
show={show}
onHide={onHide}
onSave={handleSave}
title={title}
icon={isEditMode ? <Icons.EditOutlined /> : <Icons.PlusOutlined />}
width={step === 'type' ? MODAL_STANDARD_WIDTH : MODAL_MEDIUM_WIDTH}
saveDisabled={
step === 'type' ? !selectedType : saving || !name.trim() || hasErrors
}
saveText={
step === 'type' ? undefined : isEditMode ? t('Save') : t('Create')
}
saveLoading={saving}
contentLoading={loading}
>
{step === 'type' ? (
<>
<ModalFormField label={t('Type')}>
<Select
ariaLabel={t('Semantic layer type')}
placeholder={t('Select a semantic layer type')}
value={selectedType}
onChange={value => setSelectedType(value as string)}
options={types.map(type => ({
value: type.id,
label: type.name,
}))}
getPopupContainer={() => document.body}
dropdownAlign={{
points: ['tl', 'bl'],
offset: [0, 4],
overflow: { adjustX: 0, adjustY: 1 },
}}
/>
</ModalFormField>
</>
) : (
<>
{!isEditMode && (
<Button
buttonStyle="link"
icon={<Icons.CaretLeftOutlined iconSize="s" />}
onClick={handleBack}
>
{t('Back')}
</Button>
)}
<ModalFormField label={t('Name')} required>
<Input
value={name}
onChange={e => setName(e.target.value)}
placeholder={t('Name of the semantic layer')}
/>
</ModalFormField>
{configSchema && (
<JsonForms
schema={configSchema}
uischema={uiSchema}
data={formData}
renderers={renderers}
cells={cellRegistryEntries}
config={{ refreshingSchema, formData }}
validationMode={validationMode}
onChange={handleFormChange}
/>
)}
</>
)}
</StandardModal>
);
}

View File

@@ -17,9 +17,15 @@
* under the License.
*/
import { t } from '@apache-superset/core/translation';
import { getExtensionsRegistry, SupersetClient } from '@superset-ui/core';
import { styled } from '@apache-superset/core/theme';
import {
getExtensionsRegistry,
SupersetClient,
isFeatureEnabled,
FeatureFlag,
} from '@superset-ui/core';
import { css, styled, useTheme } from '@apache-superset/core/theme';
import { useState, useMemo, useEffect, useCallback } from 'react';
import type { CellProps } from 'react-table';
import rison from 'rison';
import { useSelector } from 'react-redux';
import { useQueryParams, BooleanParam } from 'use-query-params';
@@ -33,7 +39,9 @@ import {
import withToasts from 'src/components/MessageToasts/withToasts';
import SubMenu, { SubMenuProps } from 'src/features/home/SubMenu';
import {
Button,
DeleteModal,
Dropdown,
Tooltip,
List,
Loading,
@@ -43,6 +51,7 @@ import {
ListView,
ListViewFilterOperator as FilterOperator,
ListViewFilters,
type ListViewFetchDataConfig,
} from 'src/components';
import { Typography } from '@superset-ui/core/components/Typography';
import { getUrlParam } from 'src/utils/urlUtils';
@@ -55,10 +64,12 @@ import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes';
import type { MenuObjectProps } from 'src/types/bootstrapTypes';
import DatabaseModal from 'src/features/databases/DatabaseModal';
import UploadDataModal from 'src/features/databases/UploadDataModel';
import SemanticLayerModal from 'src/features/semanticLayers/SemanticLayerModal';
import { DatabaseObject } from 'src/features/databases/types';
import { QueryObjectColumns } from 'src/views/CRUD/types';
import { WIDER_DROPDOWN_WIDTH } from 'src/components/ListView/utils';
import { ModalTitleWithIcon } from 'src/components/ModalTitleWithIcon';
import type Owner from 'src/types/Owner';
const extensionsRegistry = getExtensionsRegistry();
const DatabaseDeleteRelatedExtension = extensionsRegistry.get(
@@ -70,6 +81,13 @@ const dbConfigExtraExtension = extensionsRegistry.get(
const PAGE_SIZE = 25;
type ConnectionItem = DatabaseObject & {
source_type?: 'database' | 'semantic_layer';
sl_type?: string;
changed_by?: Owner;
changed_on_delta_humanized?: string;
};
interface DatabaseDeleteObject extends DatabaseObject {
charts: any;
dashboards: any;
@@ -108,20 +126,106 @@ function DatabaseList({
addSuccessToast,
user,
}: DatabaseListProps) {
const theme = useTheme();
const showSemanticLayers = isFeatureEnabled(FeatureFlag.SemanticLayers);
// Standard database list view resource (used when SL flag is OFF)
const {
state: {
loading,
resourceCount: databaseCount,
resourceCollection: databases,
loading: dbLoading,
resourceCount: dbCount,
resourceCollection: dbCollection,
},
hasPerm,
fetchData,
refreshData,
fetchData: dbFetchData,
refreshData: dbRefreshData,
} = useListViewResource<DatabaseObject>(
'database',
t('database'),
addDangerToast,
);
// Combined endpoint state (used when SL flag is ON)
const [combinedItems, setCombinedItems] = useState<ConnectionItem[]>([]);
const [combinedCount, setCombinedCount] = useState(0);
const [combinedLoading, setCombinedLoading] = useState(true);
const [lastFetchConfig, setLastFetchConfig] =
useState<ListViewFetchDataConfig | null>(null);
const combinedFetchData = useCallback(
(config: ListViewFetchDataConfig) => {
setLastFetchConfig(config);
setCombinedLoading(true);
const { pageIndex, pageSize, sortBy, filters: filterValues } = config;
const sourceTypeFilter = filterValues.find(f => f.id === 'source_type');
const otherFilters = filterValues
.filter(f => f.id !== 'source_type')
.filter(
({ value }) => value !== '' && value !== null && value !== undefined,
)
.map(({ id, operator: opr, value }) => ({
col: id,
opr,
value:
value && typeof value === 'object' && 'value' in value
? value.value
: value,
}));
const sourceTypeValue =
sourceTypeFilter?.value && typeof sourceTypeFilter.value === 'object'
? (sourceTypeFilter.value as { value: string }).value
: (sourceTypeFilter?.value as string | undefined);
if (sourceTypeValue) {
otherFilters.push({
col: 'source_type',
opr: 'eq',
value: sourceTypeValue,
});
}
const queryParams = rison.encode_uri({
order_column: sortBy[0].id,
order_direction: sortBy[0].desc ? 'desc' : 'asc',
page: pageIndex,
page_size: pageSize,
...(otherFilters.length ? { filters: otherFilters } : {}),
});
return SupersetClient.get({
endpoint: `/api/v1/semantic_layer/connections/?q=${queryParams}`,
})
.then(({ json = {} }) => {
setCombinedItems(json.result);
setCombinedCount(json.count);
})
.catch(() => {
addDangerToast(t('An error occurred while fetching connections'));
})
.finally(() => {
setCombinedLoading(false);
});
},
[addDangerToast],
);
const combinedRefreshData = useCallback(() => {
if (lastFetchConfig) {
return combinedFetchData(lastFetchConfig);
}
return undefined;
}, [lastFetchConfig, combinedFetchData]);
// Select the right data source based on feature flag
const loading = showSemanticLayers ? combinedLoading : dbLoading;
const databaseCount = showSemanticLayers ? combinedCount : dbCount;
const databases: ConnectionItem[] = showSemanticLayers
? combinedItems
: dbCollection;
const fetchData = showSemanticLayers ? combinedFetchData : dbFetchData;
const refreshData = showSemanticLayers ? combinedRefreshData : dbRefreshData;
const fullUser = useSelector<any, UserWithPermissionsAndRoles>(
state => state.user,
);
@@ -148,6 +252,13 @@ function DatabaseList({
useState<boolean>(false);
const [columnarUploadDataModalOpen, setColumnarUploadDataModalOpen] =
useState<boolean>(false);
const [semanticLayerModalOpen, setSemanticLayerModalOpen] =
useState<boolean>(false);
const [slCurrentlyEditing, setSlCurrentlyEditing] = useState<string | null>(
null,
);
const [slCurrentlyDeleting, setSlCurrentlyDeleting] =
useState<ConnectionItem | null>(null);
const [allowUploads, setAllowUploads] = useState<boolean>(false);
const isAdmin = isUserAdmin(fullUser);
@@ -320,18 +431,63 @@ function DatabaseList({
};
if (canCreate) {
menuData.buttons = [
{
'data-test': 'btn-create-database',
icon: <Icons.PlusOutlined iconSize="m" />,
name: t('Database'),
buttonStyle: 'primary',
onClick: () => {
// Ensure modal will be opened in add mode
handleDatabaseEditModal({ modalOpen: true });
const openDatabaseModal = () =>
handleDatabaseEditModal({ modalOpen: true });
if (isFeatureEnabled(FeatureFlag.SemanticLayers)) {
menuData.buttons = [
{
name: t('New'),
buttonStyle: 'primary',
component: (
<Dropdown
menu={{
items: [
{
key: 'database',
label: t('Database'),
onClick: openDatabaseModal,
},
{
key: 'semantic-layer',
label: t('Semantic Layer'),
onClick: () => {
setSemanticLayerModalOpen(true);
},
},
],
}}
trigger={['click']}
>
<Button
data-test="btn-create-new"
buttonStyle="primary"
icon={<Icons.PlusOutlined iconSize="m" />}
>
{t('New')}
<Icons.DownOutlined
iconSize="s"
css={css`
margin-left: ${theme.sizeUnit * 1.5}px;
margin-right: -${theme.sizeUnit * 2}px;
`}
/>
</Button>
</Dropdown>
),
},
},
];
];
} else {
menuData.buttons = [
{
'data-test': 'btn-create-database',
icon: <Icons.PlusOutlined iconSize="m" />,
name: t('Database'),
buttonStyle: 'primary',
onClick: openDatabaseModal,
},
];
}
}
const handleDatabaseExport = useCallback(
@@ -345,7 +501,7 @@ function DatabaseList({
await handleResourceExport('database', [database.id], () => {
setPreparingExport(false);
});
} catch (error) {
} catch {
setPreparingExport(false);
addDangerToast(t('There was an issue exporting the database'));
}
@@ -401,6 +557,23 @@ function DatabaseList({
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
function handleSemanticLayerDelete(item: ConnectionItem) {
SupersetClient.delete({
endpoint: `/api/v1/semantic_layer/${item.uuid}`,
}).then(
() => {
refreshData();
addSuccessToast(t('Deleted: %s', item.database_name));
setSlCurrentlyDeleting(null);
},
createErrorHandler(errMsg =>
addDangerToast(
t('There was an issue deleting %s: %s', item.database_name, errMsg),
),
),
);
}
const columns = useMemo(
() => [
{
@@ -413,7 +586,7 @@ function DatabaseList({
accessor: 'backend',
Header: t('Backend'),
size: 'xl',
disableSortBy: true, // TODO: api support for sorting by 'backend'
disableSortBy: true,
id: 'backend',
},
{
@@ -427,13 +600,12 @@ function DatabaseList({
<span>{t('AQE')}</span>
</Tooltip>
),
Cell: ({
row: {
original: { allow_run_async: allowRunAsync },
},
}: {
row: { original: { allow_run_async: boolean } };
}) => <BooleanDisplay value={allowRunAsync} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_run_async)} />
),
size: 'sm',
id: 'allow_run_async',
},
@@ -448,33 +620,36 @@ function DatabaseList({
<span>{t('DML')}</span>
</Tooltip>
),
Cell: ({
row: {
original: { allow_dml: allowDML },
},
}: any) => <BooleanDisplay value={allowDML} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_dml)} />
),
size: 'sm',
id: 'allow_dml',
},
{
accessor: 'allow_file_upload',
Header: t('File upload'),
Cell: ({
row: {
original: { allow_file_upload: allowFileUpload },
},
}: any) => <BooleanDisplay value={allowFileUpload} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_file_upload)} />
),
size: 'md',
id: 'allow_file_upload',
},
{
accessor: 'expose_in_sqllab',
Header: t('Expose in SQL Lab'),
Cell: ({
row: {
original: { expose_in_sqllab: exposeInSqllab },
},
}: any) => <BooleanDisplay value={exposeInSqllab} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.expose_in_sqllab)} />
),
size: 'md',
id: 'expose_in_sqllab',
},
@@ -486,7 +661,9 @@ function DatabaseList({
changed_on_delta_humanized: changedOn,
},
},
}: any) => <ModifiedInfo date={changedOn} user={changedBy} />,
}: CellProps<ConnectionItem>) => (
<ModifiedInfo date={changedOn || ''} user={changedBy} />
),
Header: t('Last modified'),
accessor: 'changed_on_delta_humanized',
size: 'xl',
@@ -494,6 +671,48 @@ function DatabaseList({
},
{
Cell: ({ row: { original } }: any) => {
const isSemanticLayer = original.source_type === 'semantic_layer';
if (isSemanticLayer) {
if (!canEdit && !canDelete) return null;
return (
<Actions className="actions">
{canDelete && (
<Tooltip
id="delete-action-tooltip"
title={t('Delete')}
placement="bottom"
>
<span
role="button"
tabIndex={0}
className="action-button"
onClick={() => setSlCurrentlyDeleting(original)}
>
<Icons.DeleteOutlined iconSize="l" />
</span>
</Tooltip>
)}
{canEdit && (
<Tooltip
id="edit-action-tooltip"
title={t('Edit')}
placement="bottom"
>
<span
role="button"
tabIndex={0}
className="action-button"
onClick={() => setSlCurrentlyEditing(original.uuid)}
>
<Icons.EditOutlined iconSize="l" />
</span>
</Tooltip>
)}
</Actions>
);
}
const handleEdit = () =>
handleDatabaseEditModal({ database: original, modalOpen: true });
const handleDelete = () => openDatabaseDeleteModal(original);
@@ -579,6 +798,12 @@ function DatabaseList({
hidden: !canEdit && !canDelete,
disableSortBy: true,
},
{
accessor: 'source_type',
hidden: true,
disableSortBy: true,
id: 'source_type',
},
{
accessor: QueryObjectColumns.ChangedBy,
hidden: true,
@@ -596,8 +821,8 @@ function DatabaseList({
],
);
const filters: ListViewFilters = useMemo(
() => [
const filters: ListViewFilters = useMemo(() => {
const baseFilters: ListViewFilters = [
{
Header: t('Name'),
key: 'search',
@@ -605,62 +830,83 @@ function DatabaseList({
input: 'search',
operator: FilterOperator.Contains,
},
{
Header: t('Expose in SQL Lab'),
key: 'expose_in_sql_lab',
id: 'expose_in_sqllab',
];
if (showSemanticLayers) {
baseFilters.push({
Header: t('Source'),
key: 'source_type',
id: 'source_type',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
{ label: t('Database'), value: 'database' },
{ label: t('Semantic Layer'), value: 'semantic_layer' },
],
},
{
Header: (
<Tooltip
id="allow-run-async-filter-header-tooltip"
title={t('Asynchronous query execution')}
placement="top"
>
<span>{t('AQE')}</span>
</Tooltip>
),
key: 'allow_run_async',
id: 'allow_run_async',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: t('Modified by'),
key: 'changed_by',
id: 'changed_by',
input: 'select',
operator: FilterOperator.RelationOneMany,
unfilteredLabel: t('All'),
fetchSelects: createFetchRelated(
'database',
'changed_by',
createErrorHandler(errMsg =>
t(
'An error occurred while fetching dataset datasource values: %s',
errMsg,
),
});
}
if (!showSemanticLayers) {
baseFilters.push(
{
Header: t('Expose in SQL Lab'),
key: 'expose_in_sql_lab',
id: 'expose_in_sqllab',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: (
<Tooltip
id="allow-run-async-filter-header-tooltip"
title={t('Asynchronous query execution')}
placement="top"
>
<span>{t('AQE')}</span>
</Tooltip>
),
user,
),
paginate: true,
dropdownStyle: { minWidth: WIDER_DROPDOWN_WIDTH },
},
],
[user],
);
key: 'allow_run_async',
id: 'allow_run_async',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: t('Modified by'),
key: 'changed_by',
id: 'changed_by',
input: 'select',
operator: FilterOperator.RelationOneMany,
unfilteredLabel: t('All'),
fetchSelects: createFetchRelated(
'database',
'changed_by',
createErrorHandler(errMsg =>
t(
'An error occurred while fetching dataset datasource values: %s',
errMsg,
),
),
user,
),
paginate: true,
dropdownStyle: { minWidth: WIDER_DROPDOWN_WIDTH },
},
);
}
return baseFilters;
}, [showSemanticLayers]);
return (
<>
@@ -703,6 +949,48 @@ function DatabaseList({
allowedExtensions={COLUMNAR_EXTENSIONS}
type="columnar"
/>
<SemanticLayerModal
show={semanticLayerModalOpen}
onHide={() => {
setSemanticLayerModalOpen(false);
refreshData();
}}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
/>
<SemanticLayerModal
show={!!slCurrentlyEditing}
onHide={() => {
setSlCurrentlyEditing(null);
refreshData();
}}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
semanticLayerUuid={slCurrentlyEditing ?? undefined}
/>
{slCurrentlyDeleting && (
<DeleteModal
description={
<p>
{t('Are you sure you want to delete')}{' '}
<b>{slCurrentlyDeleting.database_name}</b>?
</p>
}
onConfirm={() => {
if (slCurrentlyDeleting) {
handleSemanticLayerDelete(slCurrentlyDeleting);
}
}}
onHide={() => setSlCurrentlyDeleting(null)}
open
title={
<ModalTitleWithIcon
icon={<Icons.DeleteOutlined />}
title={t('Delete Semantic Layer?')}
/>
}
/>
)}
{databaseCurrentlyDeleting && (
<DeleteModal
description={

View File

@@ -30,6 +30,7 @@ from superset.commands.semantic_layer.exceptions import (
)
from superset.daos.semantic_layer import SemanticLayerDAO
from superset.semantic_layers.registry import registry
from superset.utils import json
from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__)
@@ -48,6 +49,10 @@ class CreateSemanticLayerCommand(BaseCommand):
)
def run(self) -> Model:
self.validate()
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticLayerDAO.create(attributes=self._properties)
def validate(self) -> None:

View File

@@ -105,6 +105,10 @@ class UpdateSemanticLayerCommand(BaseCommand):
def run(self) -> Model:
self.validate()
assert self._model
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticLayerDAO.update(self._model, attributes=self._properties)
def validate(self) -> None:

View File

@@ -19,12 +19,15 @@ from __future__ import annotations
import logging
from typing import Any
from flask import request, Response
from flask_appbuilder.api import expose, protect, safe
from flask import make_response, request, Response
from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.api.schemas import get_list_schema
from flask_appbuilder.models.sqla.interface import SQLAInterface
from marshmallow import ValidationError
from pydantic import ValidationError as PydanticValidationError
from sqlalchemy.orm import load_only
from superset import event_logger
from superset import db, event_logger, is_feature_enabled
from superset.commands.semantic_layer.create import CreateSemanticLayerCommand
from superset.commands.semantic_layer.delete import DeleteSemanticLayerCommand
from superset.commands.semantic_layer.exceptions import (
@@ -44,6 +47,7 @@ from superset.commands.semantic_layer.update import (
)
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
from superset.daos.semantic_layer import SemanticLayerDAO
from superset.models.core import Database
from superset.semantic_layers.models import SemanticLayer, SemanticView
from superset.semantic_layers.registry import registry
from superset.semantic_layers.schemas import (
@@ -52,6 +56,7 @@ from superset.semantic_layers.schemas import (
SemanticViewPutSchema,
)
from superset.superset_typing import FlaskResponse
from superset.utils import json
from superset.views.base_api import (
BaseSupersetApi,
BaseSupersetModelRestApi,
@@ -63,15 +68,93 @@ logger = logging.getLogger(__name__)
def _serialize_layer(layer: SemanticLayer) -> dict[str, Any]:
config = layer.configuration
if isinstance(config, str):
config = json.loads(config)
return {
"uuid": str(layer.uuid),
"name": layer.name,
"description": layer.description,
"type": layer.type,
"cache_timeout": layer.cache_timeout,
"configuration": config or {},
"changed_on_delta_humanized": layer.changed_on_delta_humanized(),
}
def _infer_discriminators(
schema: dict[str, Any],
data: dict[str, Any],
) -> dict[str, Any]:
"""
Infer discriminator values for union fields when the frontend omits them.
Walks the schema's properties looking for discriminated unions (fields with a
``discriminator.mapping``). For each one, tries to match the submitted data
against one of the variants by checking which variant's required fields are
present, then injects the discriminator value.
"""
defs = schema.get("$defs", {})
for prop_name, prop_schema in schema.get("properties", {}).items():
value = data.get(prop_name)
if not isinstance(value, dict):
continue
# Find discriminated union via discriminator mapping
mapping = (
prop_schema.get("discriminator", {}).get("mapping")
if "discriminator" in prop_schema
else None
)
if not mapping:
continue
discriminator_field = prop_schema["discriminator"].get("propertyName")
if not discriminator_field or discriminator_field in value:
continue
# Try each variant: match by required fields present in the data
for disc_value, ref in mapping.items():
ref_name = ref.rsplit("/", 1)[-1] if "/" in ref else ref
variant_def = defs.get(ref_name, {})
required = set(variant_def.get("required", []))
# Exclude the discriminator itself from the check
required.discard(discriminator_field)
if required and required.issubset(value.keys()):
data = {
**data,
prop_name: {**value, discriminator_field: disc_value},
}
break
return data
def _parse_partial_config(
cls: Any,
config: dict[str, Any],
) -> Any:
"""
Parse a partial configuration, handling discriminator inference and
falling back to lenient validation when strict parsing fails.
"""
config_class = cls.configuration_class
# Infer discriminator values the frontend may have omitted
schema = config_class.model_json_schema()
config = _infer_discriminators(schema, config)
try:
return config_class.model_validate(config)
except (PydanticValidationError, ValueError):
pass
try:
return config_class.model_validate(config, context={"partial": True})
except (PydanticValidationError, ValueError):
return None
class SemanticViewRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(SemanticView)
@@ -230,13 +313,27 @@ class SemanticLayerRestApi(BaseSupersetApi):
parsed_config = None
if config := body.get("configuration"):
try:
parsed_config = cls.from_configuration(config).configuration # type: ignore[attr-defined]
except Exception: # pylint: disable=broad-except
parsed_config = None
parsed_config = _parse_partial_config(cls, config)
schema = cls.get_configuration_schema(parsed_config)
return self.response(200, result=schema)
warning: str | None = None
try:
schema = cls.get_configuration_schema(parsed_config)
except Exception as ex: # pylint: disable=broad-except
warning = str(ex)
logger.exception(
"Error enriching semantic layer configuration schema for type %s",
sl_type,
)
# Connection or query failures during schema enrichment should not
# prevent the form from rendering — return the base schema instead.
schema = cls.get_configuration_schema(None)
payload: dict[str, Any] = {"result": schema}
if warning:
payload["warning"] = warning
resp = make_response(json.dumps(payload, sort_keys=False), 200)
resp.headers["Content-Type"] = "application/json; charset=utf-8"
return resp
@expose("/<uuid>/schema/runtime", methods=("POST",))
@protect()
@@ -443,6 +540,199 @@ class SemanticLayerRestApi(BaseSupersetApi):
)
return self.response_422(message=str(ex))
@expose("/connections/", methods=("GET",))
@protect()
@safe
@statsd_metrics
@rison(get_list_schema)
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.connections",
log_to_statsd=False,
)
def connections(self, **kwargs: Any) -> FlaskResponse:
"""List databases and semantic layers combined.
---
get:
summary: List databases and semantic layers combined
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_list_schema'
responses:
200:
description: Combined list of databases and semantic layers
401:
$ref: '#/components/responses/401'
500:
$ref: '#/components/responses/500'
"""
args = kwargs.get("rison", {})
page = args.get("page", 0)
page_size = args.get("page_size", 25)
order_column = args.get("order_column", "changed_on")
order_direction = args.get("order_direction", "desc")
filters = args.get("filters", [])
source_type, name_filter = self._parse_connection_filters(filters)
if not is_feature_enabled("SEMANTIC_LAYERS"):
return self.response_404()
all_items = self._fetch_connection_items(source_type, name_filter)
sort_key = self._get_connection_sort_key(order_column)
all_items.sort(key=sort_key, reverse=order_direction == "desc") # type: ignore
total_count = len(all_items)
start = page * page_size
page_items = all_items[start : start + page_size]
result = [
self._serialize_database(obj)
if item_type == "database"
else self._serialize_semantic_layer(obj)
for item_type, obj in page_items
]
return self.response(200, count=total_count, result=result)
@staticmethod
def _parse_connection_filters(
filters: list[dict[str, Any]],
) -> tuple[str, str | None]:
"""Parse filters into source_type and name_filter."""
source_type = "all"
name_filter = None
for f in filters:
if f.get("col") == "source_type":
source_type = f.get("value", "all")
elif f.get("col") == "database_name" and f.get("opr") == "ct":
name_filter = f.get("value")
return source_type, name_filter
@staticmethod
def _fetch_connection_items(
source_type: str,
name_filter: str | None,
) -> list[tuple[str, Any]]:
"""Fetch database and semantic layer items based on filters."""
db_items: list[tuple[str, Database]] = []
if source_type in ("all", "database"):
db_q = db.session.query(Database).options(
load_only(
Database.id,
Database.uuid,
Database.database_name,
Database.backend,
Database.allow_run_async,
Database.allow_dml,
Database.allow_file_upload,
Database.expose_in_sqllab,
Database.changed_on,
Database.changed_by_fk,
)
)
if name_filter:
db_q = db_q.filter(Database.database_name.ilike(f"%{name_filter}%"))
db_items = [("database", obj) for obj in db_q.all()]
sl_items: list[tuple[str, SemanticLayer]] = []
if source_type in ("all", "semantic_layer"):
sl_q = db.session.query(SemanticLayer).options(
load_only(
SemanticLayer.uuid,
SemanticLayer.name,
SemanticLayer.type,
SemanticLayer.description,
SemanticLayer.changed_on,
SemanticLayer.changed_by_fk,
)
)
if name_filter:
sl_q = sl_q.filter(SemanticLayer.name.ilike(f"%{name_filter}%"))
sl_items = [("semantic_layer", obj) for obj in sl_q.all()]
# TODO: move sort + pagination to SQL before GA.
return db_items + sl_items # type: ignore
@staticmethod
def _get_connection_sort_key(order_column: str) -> Any:
"""Return a sort key function for connection items."""
def _sort_key_changed_on(
item: tuple[str, Database | SemanticLayer],
) -> float:
changed_on = item[1].changed_on
return changed_on.timestamp() if changed_on else 0.0
def _sort_key_name(
item: tuple[str, Database | SemanticLayer],
) -> str:
obj = item[1]
raw = (
obj.database_name # type: ignore[union-attr]
if item[0] == "database"
else obj.name
)
return raw.lower()
sort_key_map = {
"changed_on_delta_humanized": _sort_key_changed_on,
"database_name": _sort_key_name,
}
return sort_key_map.get(order_column, _sort_key_changed_on)
@staticmethod
def _serialize_database(obj: Database) -> dict[str, Any]:
changed_by = obj.changed_by
return {
"source_type": "database",
"id": obj.id,
"uuid": str(obj.uuid),
"database_name": obj.database_name,
"backend": obj.backend,
"allow_run_async": obj.allow_run_async,
"allow_dml": obj.allow_dml,
"allow_file_upload": obj.allow_file_upload,
"expose_in_sqllab": obj.expose_in_sqllab,
"changed_on_delta_humanized": obj.changed_on_delta_humanized(),
"changed_by": {
"first_name": changed_by.first_name,
"last_name": changed_by.last_name,
}
if changed_by
else None,
}
@staticmethod
def _serialize_semantic_layer(obj: SemanticLayer) -> dict[str, Any]:
changed_by = obj.changed_by
sl_type = obj.type
cls = registry.get(sl_type)
type_name = cls.name if cls else sl_type # type: ignore[attr-defined]
return {
"source_type": "semantic_layer",
"uuid": str(obj.uuid),
"database_name": obj.name,
"backend": type_name,
"sl_type": sl_type,
"description": obj.description,
"allow_run_async": None,
"allow_dml": None,
"allow_file_upload": None,
"expose_in_sqllab": None,
"changed_on_delta_humanized": obj.changed_on_delta_humanized(),
"changed_by": {
"first_name": changed_by.first_name,
"last_name": changed_by.last_name,
}
if changed_by
else None,
}
@expose("/", methods=("GET",))
@protect()
@safe

View File

@@ -51,7 +51,8 @@ def test_create_semantic_layer_success(mocker: MockerFixture) -> None:
result = CreateSemanticLayerCommand(data).run()
assert result == new_model
dao.create.assert_called_once_with(attributes=data)
expected = {**data, "configuration": '{"account": "test"}'}
dao.create.assert_called_once_with(attributes=expected)
mock_cls.from_configuration.assert_called_once_with({"account": "test"})

View File

@@ -353,11 +353,14 @@ def test_configuration_schema_with_partial_config(
mocker: MockerFixture,
) -> None:
"""Test POST /schema/configuration enriches schema with partial config."""
mock_instance = MagicMock()
mock_instance.configuration = {"account": "test"}
mock_config_obj = MagicMock()
mock_cls = MagicMock()
mock_cls.from_configuration.return_value = mock_instance
mock_cls.configuration_class.model_json_schema.return_value = {
"type": "object",
"properties": {"account": {"type": "string"}},
}
mock_cls.configuration_class.model_validate.return_value = mock_config_obj
mock_cls.get_configuration_schema.return_value = {
"type": "object",
"properties": {"database": {"enum": ["db1", "db2"]}},
@@ -375,7 +378,7 @@ def test_configuration_schema_with_partial_config(
)
assert response.status_code == 200
mock_cls.get_configuration_schema.assert_called_once_with({"account": "test"})
mock_cls.get_configuration_schema.assert_called_once_with(mock_config_obj)
@SEMANTIC_LAYERS_APP
@@ -385,8 +388,19 @@ def test_configuration_schema_with_invalid_partial_config(
mocker: MockerFixture,
) -> None:
"""Test /schema/configuration returns schema when partial config fails."""
from pydantic import ValidationError as PydanticValidationError
mock_cls = MagicMock()
mock_cls.from_configuration.side_effect = ValueError("bad config")
mock_cls.configuration_class.model_json_schema.return_value = {
"type": "object",
"properties": {},
}
mock_cls.configuration_class.model_validate.side_effect = (
PydanticValidationError.from_exception_data(
title="test",
line_errors=[],
)
)
mock_cls.get_configuration_schema.return_value = {"type": "object"}
mocker.patch.dict(
@@ -832,6 +846,8 @@ def test_get_list_semantic_layers(
layer1.description = "First"
layer1.type = "snowflake"
layer1.cache_timeout = None
layer1.configuration = "{}"
layer1.changed_on_delta_humanized.return_value = "1 day ago"
layer2 = MagicMock()
layer2.uuid = uuid_lib.uuid4()
@@ -839,6 +855,8 @@ def test_get_list_semantic_layers(
layer2.description = None
layer2.type = "snowflake"
layer2.cache_timeout = 300
layer2.configuration = '{"account": "test"}'
layer2.changed_on_delta_humanized.return_value = "2 hours ago"
mock_dao = mocker.patch("superset.semantic_layers.api.SemanticLayerDAO")
mock_dao.find_all.return_value = [layer1, layer2]
@@ -851,6 +869,7 @@ def test_get_list_semantic_layers(
assert result[0]["name"] == "Layer 1"
assert result[1]["name"] == "Layer 2"
assert result[1]["cache_timeout"] == 300
assert result[1]["configuration"] == {"account": "test"}
@SEMANTIC_LAYERS_APP
@@ -883,6 +902,8 @@ def test_get_semantic_layer(
layer.description = "A layer"
layer.type = "snowflake"
layer.cache_timeout = 600
layer.configuration = '{"account": "test"}'
layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_dao = mocker.patch("superset.semantic_layers.api.SemanticLayerDAO")
mock_dao.find_by_uuid.return_value = layer
@@ -895,6 +916,7 @@ def test_get_semantic_layer(
assert result["name"] == "My Layer"
assert result["type"] == "snowflake"
assert result["cache_timeout"] == 600
assert result["configuration"] == {"account": "test"}
@SEMANTIC_LAYERS_APP
@@ -910,3 +932,542 @@ def test_get_semantic_layer_not_found(
response = client.get(f"/api/v1/semantic_layer/{uuid_lib.uuid4()}")
assert response.status_code == 404
@SEMANTIC_LAYERS_APP
def test_serialize_layer_string_config(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test _serialize_layer handles string configuration (JSON)."""
layer = MagicMock()
layer.uuid = uuid_lib.uuid4()
layer.name = "Layer"
layer.description = None
layer.type = "snowflake"
layer.cache_timeout = None
layer.configuration = '{"account": "test"}'
layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_dao = mocker.patch("superset.semantic_layers.api.SemanticLayerDAO")
mock_dao.find_by_uuid.return_value = layer
response = client.get(f"/api/v1/semantic_layer/{layer.uuid}")
assert response.status_code == 200
assert response.json["result"]["configuration"] == {"account": "test"}
@SEMANTIC_LAYERS_APP
def test_serialize_layer_dict_config(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test _serialize_layer handles dict configuration."""
layer = MagicMock()
layer.uuid = uuid_lib.uuid4()
layer.name = "Layer"
layer.description = None
layer.type = "snowflake"
layer.cache_timeout = None
layer.configuration = {"account": "test"}
layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_dao = mocker.patch("superset.semantic_layers.api.SemanticLayerDAO")
mock_dao.find_by_uuid.return_value = layer
response = client.get(f"/api/v1/semantic_layer/{layer.uuid}")
assert response.status_code == 200
assert response.json["result"]["configuration"] == {"account": "test"}
@SEMANTIC_LAYERS_APP
def test_serialize_layer_none_config(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test _serialize_layer handles None configuration."""
layer = MagicMock()
layer.uuid = uuid_lib.uuid4()
layer.name = "Layer"
layer.description = None
layer.type = "snowflake"
layer.cache_timeout = None
layer.configuration = None
layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_dao = mocker.patch("superset.semantic_layers.api.SemanticLayerDAO")
mock_dao.find_by_uuid.return_value = layer
response = client.get(f"/api/v1/semantic_layer/{layer.uuid}")
assert response.status_code == 200
assert response.json["result"]["configuration"] == {}
def test_infer_discriminators_injects_discriminator() -> None:
"""Test _infer_discriminators injects discriminator values."""
from superset.semantic_layers.api import _infer_discriminators
schema = {
"$defs": {
"VariantA": {"required": ["disc", "field_a"]},
},
"properties": {
"auth": {
"discriminator": {
"propertyName": "disc",
"mapping": {"a": "#/$defs/VariantA"},
},
},
},
}
data = {"auth": {"field_a": "value"}}
result = _infer_discriminators(schema, data)
assert result["auth"]["disc"] == "a"
def test_infer_discriminators_no_match() -> None:
"""Test _infer_discriminators returns data unchanged when no match."""
from superset.semantic_layers.api import _infer_discriminators
schema = {
"$defs": {
"VariantA": {"required": ["disc", "field_a"]},
},
"properties": {
"auth": {
"discriminator": {
"propertyName": "disc",
"mapping": {"a": "#/$defs/VariantA"},
},
},
},
}
data = {"auth": {"other": "value"}}
result = _infer_discriminators(schema, data)
assert "disc" not in result["auth"]
def test_infer_discriminators_skips_non_dict() -> None:
"""Test _infer_discriminators skips non-dict values."""
from superset.semantic_layers.api import _infer_discriminators
schema = {
"$defs": {},
"properties": {"auth": {"discriminator": {"propertyName": "disc"}}},
}
data = {"auth": "a string"}
result = _infer_discriminators(schema, data)
assert result == data
def test_infer_discriminators_skips_if_discriminator_present() -> None:
"""Test _infer_discriminators skips when discriminator already set."""
from superset.semantic_layers.api import _infer_discriminators
schema = {
"$defs": {},
"properties": {
"auth": {
"discriminator": {
"propertyName": "disc",
"mapping": {"a": "#/$defs/VariantA"},
},
},
},
}
data = {"auth": {"disc": "a", "field_a": "value"}}
result = _infer_discriminators(schema, data)
assert result["auth"]["disc"] == "a"
def test_infer_discriminators_no_discriminator() -> None:
"""Test _infer_discriminators skips properties without discriminator."""
from superset.semantic_layers.api import _infer_discriminators
schema = {
"$defs": {},
"properties": {"auth": {"type": "object"}},
}
data = {"auth": {"key": "val"}}
result = _infer_discriminators(schema, data)
assert result == data
def test_parse_partial_config_strict_success() -> None:
"""Test _parse_partial_config returns config on strict validation."""
from superset.semantic_layers.api import _parse_partial_config
mock_cls = MagicMock()
mock_cls.configuration_class.model_json_schema.return_value = {
"properties": {},
}
validated = MagicMock()
mock_cls.configuration_class.model_validate.return_value = validated
result = _parse_partial_config(mock_cls, {"key": "val"})
assert result == validated
def test_parse_partial_config_falls_back_to_partial() -> None:
"""Test _parse_partial_config falls back to partial validation."""
from pydantic import ValidationError as PydanticValidationError
from superset.semantic_layers.api import _parse_partial_config
mock_cls = MagicMock()
mock_cls.configuration_class.model_json_schema.return_value = {
"properties": {},
}
partial_result = MagicMock()
mock_cls.configuration_class.model_validate.side_effect = [
PydanticValidationError.from_exception_data(title="test", line_errors=[]),
partial_result,
]
result = _parse_partial_config(mock_cls, {"key": "val"})
assert result == partial_result
def test_parse_partial_config_returns_none_on_failure() -> None:
"""Test _parse_partial_config returns None when all validation fails."""
from pydantic import ValidationError as PydanticValidationError
from superset.semantic_layers.api import _parse_partial_config
mock_cls = MagicMock()
mock_cls.configuration_class.model_json_schema.return_value = {
"properties": {},
}
err = PydanticValidationError.from_exception_data(title="test", line_errors=[])
mock_cls.configuration_class.model_validate.side_effect = err
result = _parse_partial_config(mock_cls, {"key": "val"})
assert result is None
@SEMANTIC_LAYERS_APP
def test_configuration_schema_enrichment_error_fallback(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test configuration_schema falls back when enrichment raises."""
mock_cls = MagicMock()
mock_cls.configuration_class.model_json_schema.return_value = {
"properties": {},
}
mock_cls.configuration_class.model_validate.return_value = MagicMock()
mock_cls.get_configuration_schema.side_effect = [
RuntimeError("connection failed"),
{"type": "object"},
]
mocker.patch.dict(
"superset.semantic_layers.api.registry",
{"snowflake": mock_cls},
clear=True,
)
response = client.post(
"/api/v1/semantic_layer/schema/configuration",
json={"type": "snowflake", "configuration": {"account": "test"}},
)
assert response.status_code == 200
assert response.json["result"] == {"type": "object"}
assert response.json["warning"] == "connection failed"
assert mock_cls.get_configuration_schema.call_count == 2
@SEMANTIC_LAYERS_APP
def test_connections_list(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test GET /connections/ returns combined database and layer list."""
from datetime import datetime
mock_db = MagicMock()
mock_db.id = 1
mock_db.uuid = uuid_lib.uuid4()
mock_db.database_name = "PostgreSQL"
mock_db.backend = "postgresql"
mock_db.allow_run_async = False
mock_db.allow_dml = False
mock_db.allow_file_upload = False
mock_db.expose_in_sqllab = True
mock_db.changed_on = datetime(2026, 1, 1)
mock_db.changed_on_delta_humanized.return_value = "1 month ago"
mock_db.changed_by = None
mock_layer = MagicMock()
mock_layer.uuid = uuid_lib.uuid4()
mock_layer.name = "My Layer"
mock_layer.type = "snowflake"
mock_layer.description = "A layer"
mock_layer.cache_timeout = None
mock_layer.changed_on = datetime(2026, 2, 1)
mock_layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_layer.changed_by = None
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
db_query.options.return_value = db_query
db_query.all.return_value = [mock_db]
db_query.filter.return_value = db_query
sl_query = MagicMock()
sl_query.options.return_value = sl_query
sl_query.all.return_value = [mock_layer]
sl_query.filter.return_value = sl_query
mock_db_session.query.side_effect = [db_query, sl_query]
mock_cls = MagicMock()
mock_cls.name = "Snowflake"
mocker.patch.dict(
"superset.semantic_layers.api.registry",
{"snowflake": mock_cls},
clear=True,
)
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
return_value=True,
)
response = client.get("/api/v1/semantic_layer/connections/")
assert response.status_code == 200
assert response.json["count"] == 2
result = response.json["result"]
assert len(result) == 2
@SEMANTIC_LAYERS_APP
def test_connections_database_only(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test GET /connections/ returns 404 when feature flag is disabled."""
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
return_value=False,
)
response = client.get("/api/v1/semantic_layer/connections/")
assert response.status_code == 404
@SEMANTIC_LAYERS_APP
def test_connections_name_filter(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test GET /connections/ with name filter."""
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
db_query.options.return_value = db_query
db_query.all.return_value = []
db_query.filter.return_value = db_query
sl_query = MagicMock()
sl_query.options.return_value = sl_query
sl_query.all.return_value = []
sl_query.filter.return_value = sl_query
mock_db_session.query.side_effect = [db_query, sl_query]
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
return_value=True,
)
import prison as rison_lib
q = rison_lib.dumps(
{"filters": [{"col": "database_name", "opr": "ct", "value": "post"}]}
)
response = client.get(f"/api/v1/semantic_layer/connections/?q={q}")
assert response.status_code == 200
assert response.json["count"] == 0
# Verify filter was applied to both queries
db_query.filter.assert_called_once()
sl_query.filter.assert_called_once()
@SEMANTIC_LAYERS_APP
def test_connections_sort_by_name(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test GET /connections/ sorts by database_name."""
from datetime import datetime
mock_db = MagicMock()
mock_db.id = 1
mock_db.uuid = uuid_lib.uuid4()
mock_db.database_name = "Zebra DB"
mock_db.backend = "postgresql"
mock_db.allow_run_async = False
mock_db.allow_dml = False
mock_db.allow_file_upload = False
mock_db.expose_in_sqllab = True
mock_db.changed_on = datetime(2026, 1, 1)
mock_db.changed_on_delta_humanized.return_value = "1 month ago"
mock_db.changed_by = None
mock_layer = MagicMock()
mock_layer.uuid = uuid_lib.uuid4()
mock_layer.name = "Alpha Layer"
mock_layer.type = "snowflake"
mock_layer.description = None
mock_layer.cache_timeout = None
mock_layer.changed_on = datetime(2026, 2, 1)
mock_layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_layer.changed_by = None
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
db_query.options.return_value = db_query
db_query.all.return_value = [mock_db]
sl_query = MagicMock()
sl_query.options.return_value = sl_query
sl_query.all.return_value = [mock_layer]
mock_db_session.query.side_effect = [db_query, sl_query]
mock_cls = MagicMock()
mock_cls.name = "Snowflake"
mocker.patch.dict(
"superset.semantic_layers.api.registry",
{"snowflake": mock_cls},
clear=True,
)
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
return_value=True,
)
import prison as rison_lib
q = rison_lib.dumps({"order_column": "database_name", "order_direction": "asc"})
response = client.get(f"/api/v1/semantic_layer/connections/?q={q}")
assert response.status_code == 200
result = response.json["result"]
assert result[0]["database_name"] == "Alpha Layer"
assert result[1]["database_name"] == "Zebra DB"
@SEMANTIC_LAYERS_APP
def test_connections_source_type_filter(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test GET /connections/ with source_type filter."""
from datetime import datetime
mock_db = MagicMock()
mock_db.id = 1
mock_db.uuid = uuid_lib.uuid4()
mock_db.database_name = "PostgreSQL"
mock_db.backend = "postgresql"
mock_db.allow_run_async = False
mock_db.allow_dml = False
mock_db.allow_file_upload = False
mock_db.expose_in_sqllab = True
mock_db.changed_on = datetime(2026, 1, 1)
mock_db.changed_on_delta_humanized.return_value = "1 month ago"
mock_db.changed_by = None
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
db_query = MagicMock()
db_query.options.return_value = db_query
db_query.all.return_value = [mock_db]
mock_db_session.query.return_value = db_query
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
return_value=True,
)
import prison as rison_lib
q = rison_lib.dumps(
{"filters": [{"col": "source_type", "opr": "eq", "value": "database"}]}
)
response = client.get(f"/api/v1/semantic_layer/connections/?q={q}")
assert response.status_code == 200
assert response.json["count"] == 1
# Only one query call (for Database), not two
mock_db_session.query.assert_called_once()
@SEMANTIC_LAYERS_APP
def test_connections_source_type_semantic_layer_only(
client: Any,
full_api_access: None,
mocker: MockerFixture,
) -> None:
"""Test GET /connections/ with source_type=semantic_layer filter."""
from datetime import datetime
mock_layer = MagicMock()
mock_layer.uuid = uuid_lib.uuid4()
mock_layer.name = "My Layer"
mock_layer.type = "snowflake"
mock_layer.description = None
mock_layer.cache_timeout = None
mock_layer.changed_on = datetime(2026, 1, 1)
mock_layer.changed_on_delta_humanized.return_value = "1 day ago"
mock_layer.changed_by = None
mock_db_session = mocker.patch("superset.semantic_layers.api.db.session")
sl_query = MagicMock()
sl_query.options.return_value = sl_query
sl_query.all.return_value = [mock_layer]
mock_db_session.query.return_value = sl_query
mock_cls = MagicMock()
mock_cls.name = "Snowflake"
mocker.patch.dict(
"superset.semantic_layers.api.registry",
{"snowflake": mock_cls},
clear=True,
)
mocker.patch(
"superset.semantic_layers.api.is_feature_enabled",
return_value=True,
)
import prison as rison_lib
q = rison_lib.dumps(
{
"filters": [
{"col": "source_type", "opr": "eq", "value": "semantic_layer"},
{"col": "other_col", "opr": "eq", "value": "ignored"},
]
}
)
response = client.get(f"/api/v1/semantic_layer/connections/?q={q}")
assert response.status_code == 200
assert response.json["count"] == 1
result = response.json["result"][0]
assert result["source_type"] == "semantic_layer"
# Only one query (SemanticLayer), no Database query
mock_db_session.query.assert_called_once()