Compare commits

...

6 Commits

Author SHA1 Message Date
Beto Dealmeida
0f08f016d2 Address semantic layer review nits
Improve semantic layer schema refresh error handling and connections endpoint behavior to reduce noisy failures while keeping this feature branch focused. Also restore frontend typing consistency and add debounce coverage for dynamic schema refresh.
2026-04-23 14:00:59 -04:00
Beto Dealmeida
65fb2ff834 Fix rebase 2026-04-23 13:33:47 -04:00
Beto Dealmeida
d659089c59 feat: UI for semantic layers 2026-04-23 13:33:47 -04:00
Beto Dealmeida
5e046a857c Update permissions 2026-04-23 13:33:47 -04:00
Beto Dealmeida
36554237aa Address comments 2026-04-23 13:33:47 -04:00
Beto Dealmeida
6f93e1cbb1 feat: API for semantic layers 2026-04-23 13:33:47 -04:00
23 changed files with 4026 additions and 125 deletions

View File

@@ -285,6 +285,7 @@ module = [
"superset.tags.filters",
"superset.commands.security.update",
"superset.commands.security.create",
"superset.semantic_layers.api",
]
warn_unused_ignores = false

View File

@@ -0,0 +1,73 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from pydantic import BaseModel
def build_configuration_schema(
config_class: type[BaseModel],
configuration: BaseModel | None = None,
) -> dict[str, Any]:
"""
Build a JSON schema from a Pydantic configuration class.
Handles generic boilerplate that any semantic layer with dynamic fields needs:
- Reorders properties to match model field order (Pydantic sorts alphabetically)
- When ``configuration`` is None, sets ``enum: []`` on all ``x-dynamic`` properties
so the frontend renders them as empty dropdowns
Semantic layer implementations call this instead of
``model_json_schema()`` directly,
then only need to add their own dynamic population logic.
"""
schema = config_class.model_json_schema()
# Pydantic sorts properties alphabetically; restore model field order
field_order = [
field.alias or name for name, field in config_class.model_fields.items()
]
schema["properties"] = {
key: schema["properties"][key]
for key in field_order
if key in schema["properties"]
}
if configuration is None:
for prop_schema in schema["properties"].values():
if prop_schema.get("x-dynamic"):
prop_schema["enum"] = []
return schema
def check_dependencies(
prop_schema: dict[str, Any],
configuration: BaseModel,
) -> bool:
"""
Check whether a dynamic property's dependencies are satisfied.
Reads the ``x-dependsOn`` list from the property schema and returns ``True``
when every referenced attribute on ``configuration`` is truthy.
"""
dependencies = prop_schema.get("x-dependsOn", [])
return all(getattr(configuration, dep, None) for dep in dependencies)

View File

@@ -32,6 +32,8 @@ class SemanticLayer(ABC, Generic[ConfigT, SemanticViewT]):
Abstract base class for semantic layers.
"""
configuration_class: type[BaseModel]
@classmethod
@abstractmethod
def from_configuration(

View File

@@ -28,8 +28,14 @@
"@emotion/cache": "^11.4.0",
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.1",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/ibm-plex-mono": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@googleapis/sheets": "^13.0.1",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@luma.gl/constants": "~9.2.5",
"@luma.gl/core": "~9.2.5",
"@luma.gl/engine": "~9.2.5",
@@ -37,6 +43,7 @@
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",
@@ -4042,6 +4049,15 @@
}
}
},
"node_modules/@fontsource/fira-code": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz",
"integrity": "sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==",
"license": "OFL-1.1",
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@fontsource/ibm-plex-mono": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.2.7.tgz",
@@ -4052,15 +4068,34 @@
}
},
"node_modules/@fontsource/inter": {
"version": "5.2.6",
"resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.6.tgz",
"integrity": "sha512-CZs9S1CrjD0jPwsNy9W6j0BhsmRSQrgwlTNkgQXTsAeDRM42LBRLo3eo9gCzfH4GvV7zpyf78Ozfl773826csw==",
"version": "5.2.8",
"resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.8.tgz",
"integrity": "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg==",
"license": "OFL-1.1",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@great-expectations/jsonforms-antd-renderers": {
"version": "2.2.11",
"resolved": "https://registry.npmjs.org/@great-expectations/jsonforms-antd-renderers/-/jsonforms-antd-renderers-2.2.11.tgz",
"integrity": "sha512-QeKI6RP+vZo5Bf5WX5Mx6CPEBYvR83bIyeezHoyVVc1+pGsDqO9lsFdbaFKpqozV+s/TRB1KmVAW4GxpMzLuAw==",
"license": "MIT",
"dependencies": {
"lodash.isempty": "^4.4.0",
"lodash.merge": "^4.6.2",
"lodash.range": "^3.2.0",
"lodash.startcase": "^4.4.0"
},
"peerDependencies": {
"@ant-design/icons": "^5.3.0",
"@jsonforms/core": "^3.3.0",
"@jsonforms/react": "^3.3.0",
"antd": "^5.14.0",
"dayjs": "^1",
"react": "^17 || ^18"
}
},
"node_modules/@googleapis/sheets": {
"version": "13.0.1",
"resolved": "https://registry.npmjs.org/@googleapis/sheets/-/sheets-13.0.1.tgz",
@@ -6120,6 +6155,45 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@jsonforms/core": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/core/-/core-3.7.0.tgz",
"integrity": "sha512-CE9viWtwi9QWLqlWLeOul1/R1GRAyOA9y6OoUpsCc0FhyR+g5p29F3k0fUExHWxL0Sf4KHcXYkfhtqfRBPS8ww==",
"license": "MIT",
"dependencies": {
"@types/json-schema": "^7.0.3",
"ajv": "^8.6.1",
"ajv-formats": "^2.1.0",
"lodash": "^4.17.21"
}
},
"node_modules/@jsonforms/react": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/react/-/react-3.7.0.tgz",
"integrity": "sha512-HkY7qAx8vW97wPEgZ7GxCB3iiXG1c95GuObxtcDHGPBJWMwnxWBnVYJmv5h7nthrInKsQKHZL5OusnC/sj/1GQ==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonforms/vanilla-renderers": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/vanilla-renderers/-/vanilla-renderers-3.7.0.tgz",
"integrity": "sha512-RdXQGsheARUJVbaTe6SqGw9W4/yrm0BgUok6OKUj8krp1NF4fqXc5UbYGHFksMR/p7LCuoYHCtQzKLXEfxJbDw==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"@jsonforms/react": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonjoy.com/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz",
@@ -9487,6 +9561,89 @@
"integrity": "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ==",
"license": "MIT"
},
"node_modules/@rjsf/antd": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/antd/-/antd-5.24.13.tgz",
"integrity": "sha512-UiWE8xoBxxCoe/SEkdQEmL5E6z3I1pw0+y0dTyGt8SHfAxxFc4/OWn7tKOAiNsKCXgf83t0JKn6CHWLD01sAdQ==",
"license": "Apache-2.0",
"dependencies": {
"classnames": "^2.5.1",
"lodash": "^4.17.21",
"lodash-es": "^4.17.21",
"rc-picker": "2.7.6"
},
"engines": {
"node": ">=14"
},
"peerDependencies": {
"@ant-design/icons": "^4.0.0 || ^5.0.0",
"@rjsf/core": "^5.24.x",
"@rjsf/utils": "^5.24.x",
"antd": "^4.24.0 || ^5.8.5",
"dayjs": "^1.8.0",
"react": "^16.14.0 || >=17"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker": {
"version": "2.7.6",
"resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-2.7.6.tgz",
"integrity": "sha512-H9if/BUJUZBOhPfWcPeT15JUI3/ntrG9muzERrXDkSoWmDj4yzmBvumozpxYrHwjcKnjyDGAke68d+whWwvhHA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "^2.2.1",
"date-fns": "2.x",
"dayjs": "1.x",
"moment": "^2.24.0",
"rc-trigger": "^5.0.4",
"rc-util": "^5.37.0",
"shallowequal": "^1.1.0"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger": {
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.4.tgz",
"integrity": "sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.18.3",
"classnames": "^2.2.6",
"rc-align": "^4.0.0",
"rc-motion": "^2.0.0",
"rc-util": "^5.19.2"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger/node_modules/rc-align": {
"version": "4.0.15",
"resolved": "https://registry.npmjs.org/rc-align/-/rc-align-4.0.15.tgz",
"integrity": "sha512-wqJtVH60pka/nOX7/IspElA8gjPNQKIx/ZqJ6heATCkXpe1Zg4cPVrMD2vC96wjsFFL8WsmhPbx9tdMo1qqlIA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "2.x",
"dom-align": "^1.7.0",
"rc-util": "^5.26.0",
"resize-observer-polyfill": "^1.5.1"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/core": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/core/-/core-5.24.13.tgz",
@@ -22435,6 +22592,22 @@
"integrity": "sha512-O/gRkjWULp3xVX8K85V0H3tsSGole0WYt77KVpGZO2xTGLuVFuvE6JIsIli3fvFHCYBhGFn/8OHEEyMYF+QehA==",
"license": "MIT"
},
"node_modules/date-fns": {
"version": "2.30.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz",
"integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.21.0"
},
"engines": {
"node": ">=0.11"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/date-fns"
}
},
"node_modules/dateformat": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.2.tgz",
@@ -23040,6 +23213,12 @@
"integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
"license": "MIT"
},
"node_modules/dom-align": {
"version": "1.12.4",
"resolved": "https://registry.npmjs.org/dom-align/-/dom-align-1.12.4.tgz",
"integrity": "sha512-R8LUSEay/68zE5c8/3BDxiTEvgb4xZTF0RKmAHfiEVN3klfIpXfi2/QCoiWPccVQ0J/ZGdz9OjzL4uJEP/MRAw==",
"license": "MIT"
},
"node_modules/dom-converter": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz",
@@ -35677,6 +35856,12 @@
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
"license": "MIT"
},
"node_modules/lodash.isempty": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz",
"integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==",
"license": "MIT"
},
"node_modules/lodash.isequal": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
@@ -35708,7 +35893,6 @@
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.once": {
@@ -35719,6 +35903,18 @@
"license": "MIT",
"peer": true
},
"node_modules/lodash.range": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/lodash.range/-/lodash.range-3.2.0.tgz",
"integrity": "sha512-Fgkb7SinmuzqgIhNhAElo0BL/R1rHCnhwSZf78omqSwvWqD0kD2ssOAutQonDKH/ldS8BxA72ORYI09qAY9CYg==",
"license": "MIT"
},
"node_modules/lodash.startcase": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
"integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
"license": "MIT"
},
"node_modules/lodash.uniq": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
@@ -37240,7 +37436,6 @@
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
"license": "MIT",
"peer": true,
"engines": {
"node": "*"
}
@@ -44766,6 +44961,12 @@
"integrity": "sha512-b6i4ZpVuUxB9h5gfCxPiusKYkqTMOjEbBs4wMaFbkfia4yFv92UKZ6Df8WXcKbn08JNL/abvg3FnMAOfakDvUw==",
"license": "MIT"
},
"node_modules/shallowequal": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz",
"integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==",
"license": "MIT"
},
"node_modules/shapefile": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/shapefile/-/shapefile-0.3.1.tgz",
@@ -51505,7 +51706,7 @@
"react-js-cron": "^5.2.0",
"react-markdown": "^8.0.7",
"react-resize-detector": "^7.1.2",
"react-syntax-highlighter": "^16.1.0",
"react-syntax-highlighter": "^16.1.1",
"react-ultimate-pagination": "^1.3.2",
"regenerator-runtime": "^0.14.1",
"rehype-raw": "^7.0.0",

View File

@@ -117,7 +117,14 @@
"@luma.gl/gltf": "~9.2.5",
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",

View File

@@ -150,6 +150,7 @@ export interface ButtonProps {
buttonStyle: 'primary' | 'secondary' | 'dashed' | 'link' | 'tertiary';
loading?: boolean;
icon?: IconType;
component?: ReactNode;
}
export interface SubMenuProps {
@@ -300,18 +301,22 @@ const SubMenuComponent: FunctionComponent<SubMenuProps> = props => {
</SubMenu>
))}
</Menu>
{props.buttons?.map((btn, i) => (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
))}
{props.buttons?.map((btn, i) =>
btn.component ? (
<span key={i}>{btn.component}</span>
) : (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
),
)}
</div>
</Row>
{props.children}

View File

@@ -0,0 +1,127 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SupersetClient } from '@superset-ui/core';
import { render, waitFor } from 'spec/helpers/testing-library';
import SemanticLayerModal from './SemanticLayerModal';
let mockJsonFormsChangeTriggered = false;
jest.mock('@jsonforms/react', () => ({
...jest.requireActual('@jsonforms/react'),
JsonForms: ({ onChange }: { onChange: (value: unknown) => void }) => {
// eslint-disable-next-line react-hooks/rules-of-hooks
if (!mockJsonFormsChangeTriggered) {
mockJsonFormsChangeTriggered = true;
onChange({
data: { warehouse: 'wh1' },
errors: [],
});
}
return null;
},
}));
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
get: jest.fn(),
post: jest.fn(),
put: jest.fn(),
},
getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
}));
const mockedGet = SupersetClient.get as jest.Mock;
const mockedPost = SupersetClient.post as jest.Mock;
const props = {
show: true,
onHide: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
semanticLayerUuid: '11111111-1111-1111-1111-111111111111',
};
beforeEach(() => {
mockJsonFormsChangeTriggered = false;
jest.useFakeTimers();
mockedGet.mockReset();
mockedPost.mockReset();
mockedGet
.mockResolvedValueOnce({
json: {
result: [{ id: 'snowflake', name: 'Snowflake', description: '' }],
},
})
.mockResolvedValueOnce({
json: {
result: {
name: 'Layer 1',
type: 'snowflake',
configuration: { warehouse: 'wh0' },
},
},
});
mockedPost.mockResolvedValue({
json: {
result: {
type: 'object',
properties: {
warehouse: {
type: 'string',
'x-dynamic': true,
'x-dependsOn': ['warehouse'],
},
},
},
},
});
});
afterEach(() => {
jest.runOnlyPendingTimers();
jest.useRealTimers();
});
test('posts configuration schema refresh after debounce', async () => {
render(<SemanticLayerModal {...props} />);
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type: 'snowflake' },
});
});
jest.advanceTimersByTime(501);
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: {
type: 'snowflake',
configuration: { warehouse: 'wh1' },
},
});
});
});

View File

@@ -0,0 +1,616 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { t } from '@apache-superset/core/translation';
import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import { Input, Select, Button } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import { JsonForms, withJsonFormsControlProps } from '@jsonforms/react';
import type {
JsonSchema,
UISchemaElement,
ControlProps,
} from '@jsonforms/core';
import {
rankWith,
and,
isStringControl,
formatIs,
schemaMatches,
} from '@jsonforms/core';
import {
rendererRegistryEntries,
cellRegistryEntries,
TextControl,
} from '@great-expectations/jsonforms-antd-renderers';
import type { ErrorObject } from 'ajv';
import {
StandardModal,
ModalFormField,
MODAL_STANDARD_WIDTH,
MODAL_MEDIUM_WIDTH,
} from 'src/components/Modal';
/**
* Custom renderer that renders `Input.Password` for fields with
* `format: "password"` in the JSON Schema (e.g. Pydantic `SecretStr`).
*/
function PasswordControl(props: ControlProps) {
const uischema = {
...props.uischema,
options: { ...props.uischema.options, type: 'password' },
};
return TextControl({ ...props, uischema });
}
const PasswordRenderer = withJsonFormsControlProps(PasswordControl);
const passwordEntry = {
tester: rankWith(3, and(isStringControl, formatIs('password'))),
renderer: PasswordRenderer,
};
/**
* Renderer for `const` properties (e.g. Pydantic discriminator fields).
* Renders nothing visually but ensures the const value is set in form data,
* so discriminated unions resolve correctly on the backend.
*/
function ConstControl({ data, handleChange, path, schema }: ControlProps) {
const constValue = (schema as Record<string, unknown>).const;
useEffect(() => {
if (constValue !== undefined && data !== constValue) {
handleChange(path, constValue);
}
}, [constValue, data, handleChange, path]);
return null;
}
const ConstRenderer = withJsonFormsControlProps(ConstControl);
const constEntry = {
tester: rankWith(
10,
schemaMatches(s => s !== undefined && 'const' in s),
),
renderer: ConstRenderer,
};
/**
* Checks whether all dependency values are filled (non-empty).
* Handles nested objects (like auth) by checking they have at least one key.
*/
function areDependenciesSatisfied(
dependencies: string[],
data: Record<string, unknown>,
): boolean {
return dependencies.every(dep => {
const value = data[dep];
if (value === null || value === undefined || value === '') return false;
if (typeof value === 'object' && Object.keys(value).length === 0)
return false;
return true;
});
}
/**
* Renderer for fields marked `x-dynamic` in the JSON Schema.
* Shows a loading spinner inside the input while the schema is being
* refreshed with dynamic values from the backend.
*/
function DynamicFieldControl(props: ControlProps) {
const { refreshingSchema, formData: cfgData } = props.config ?? {};
const deps = (props.schema as Record<string, unknown>)?.['x-dependsOn'];
const refreshing =
refreshingSchema &&
Array.isArray(deps) &&
areDependenciesSatisfied(
deps as string[],
(cfgData as Record<string, unknown>) ?? {},
);
if (!refreshing) {
return TextControl(props);
}
const uischema = {
...props.uischema,
options: {
...props.uischema.options,
placeholderText: t('Loading...'),
inputProps: { suffix: <Icons.LoadingOutlined iconSize="s" /> },
},
};
return TextControl({ ...props, uischema, enabled: false });
}
const DynamicFieldRenderer = withJsonFormsControlProps(DynamicFieldControl);
const dynamicFieldEntry = {
tester: rankWith(
3,
and(
isStringControl,
schemaMatches(
s => (s as Record<string, unknown>)?.['x-dynamic'] === true,
),
),
),
renderer: DynamicFieldRenderer,
};
const renderers = [
...rendererRegistryEntries,
passwordEntry,
constEntry,
dynamicFieldEntry,
];
type Step = 'type' | 'config';
type ValidationMode = 'ValidateAndHide' | 'ValidateAndShow';
const SCHEMA_REFRESH_DEBOUNCE_MS = 500;
/**
* Removes empty `enum` arrays from schema properties. The JSON Schema spec
* requires `enum` to have at least one item, and AJV rejects empty arrays.
* Fields with empty enums are rendered as plain text inputs instead.
*/
function sanitizeSchema(schema: JsonSchema): JsonSchema {
if (!schema.properties) return schema;
const properties: Record<string, JsonSchema> = {};
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'enum' in prop &&
Array.isArray(prop.enum) &&
prop.enum.length === 0
) {
const { enum: _empty, ...rest } = prop;
properties[key] = rest;
} else {
properties[key] = prop as JsonSchema;
}
}
return { ...schema, properties } as JsonSchema;
}
/**
* Builds a JSON Forms UI schema from a JSON Schema, using the first
* `examples` entry as placeholder text for each string property.
*/
function buildUiSchema(schema: JsonSchema): UISchemaElement | undefined {
if (!schema.properties) return undefined;
// Use explicit property order from backend if available,
// otherwise fall back to the JSON object key order
const propertyOrder: string[] =
((schema as Record<string, unknown>)['x-propertyOrder'] as string[]) ??
Object.keys(schema.properties);
const elements = propertyOrder
.filter(key => key in (schema.properties ?? {}))
.map(key => {
const prop = schema.properties![key];
const control: Record<string, unknown> = {
type: 'Control',
scope: `#/properties/${key}`,
};
if (typeof prop === 'object' && prop !== null) {
const options: Record<string, unknown> = {};
if (
'examples' in prop &&
Array.isArray(prop.examples) &&
prop.examples.length > 0
) {
options.placeholderText = String(prop.examples[0]);
}
if ('description' in prop && typeof prop.description === 'string') {
options.tooltip = prop.description;
}
if (Object.keys(options).length > 0) {
control.options = options;
}
}
return control;
});
return { type: 'VerticalLayout', elements } as UISchemaElement;
}
/**
* Extracts dynamic field dependency mappings from the schema.
* Returns a map of field name → list of dependency field names.
*/
function getDynamicDependencies(schema: JsonSchema): Record<string, string[]> {
const deps: Record<string, string[]> = {};
if (!schema.properties) return deps;
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'x-dynamic' in prop &&
'x-dependsOn' in prop &&
Array.isArray((prop as Record<string, unknown>)['x-dependsOn'])
) {
deps[key] = (prop as Record<string, unknown>)['x-dependsOn'] as string[];
}
}
return deps;
}
/**
* Serializes the dependency values for a set of fields into a stable string
* for comparison, so we only re-fetch when dependency values actually change.
*/
function serializeDependencyValues(
dynamicDeps: Record<string, string[]>,
data: Record<string, unknown>,
): string {
const allDepKeys = new Set<string>();
for (const deps of Object.values(dynamicDeps)) {
for (const dep of deps) {
allDepKeys.add(dep);
}
}
const snapshot: Record<string, unknown> = {};
for (const key of [...allDepKeys].sort()) {
snapshot[key] = data[key];
}
return JSON.stringify(snapshot);
}
interface SemanticLayerType {
id: string;
name: string;
description: string;
}
interface SemanticLayerModalProps {
show: boolean;
onHide: () => void;
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
semanticLayerUuid?: string;
}
export default function SemanticLayerModal({
show,
onHide,
addDangerToast,
addSuccessToast,
semanticLayerUuid,
}: SemanticLayerModalProps) {
const isEditMode = !!semanticLayerUuid;
const [step, setStep] = useState<Step>('type');
const [name, setName] = useState('');
const [selectedType, setSelectedType] = useState<string | null>(null);
const [types, setTypes] = useState<SemanticLayerType[]>([]);
const [loading, setLoading] = useState(false);
const [configSchema, setConfigSchema] = useState<JsonSchema | null>(null);
const [uiSchema, setUiSchema] = useState<UISchemaElement | undefined>(
undefined,
);
const [formData, setFormData] = useState<Record<string, unknown>>({});
const [saving, setSaving] = useState(false);
const [hasErrors, setHasErrors] = useState(true);
const [refreshingSchema, setRefreshingSchema] = useState(false);
const [validationMode, setValidationMode] =
useState<ValidationMode>('ValidateAndHide');
const errorsRef = useRef<ErrorObject[]>([]);
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const lastDepSnapshotRef = useRef<string>('');
const dynamicDepsRef = useRef<Record<string, string[]>>({});
const fetchTypes = useCallback(async () => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: '/api/v1/semantic_layer/types',
});
setTypes(json.result ?? []);
} catch {
addDangerToast(
t('An error occurred while fetching semantic layer types'),
);
} finally {
setLoading(false);
}
}, [addDangerToast]);
const applySchema = useCallback((rawSchema: JsonSchema) => {
const schema = sanitizeSchema(rawSchema);
setConfigSchema(schema);
setUiSchema(buildUiSchema(schema));
dynamicDepsRef.current = getDynamicDependencies(rawSchema);
}, []);
const fetchConfigSchema = useCallback(
async (type: string, configuration?: Record<string, unknown>) => {
const isInitialFetch = !configuration;
if (isInitialFetch) setLoading(true);
else setRefreshingSchema(true);
try {
const { json } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type, configuration },
});
applySchema(json.result);
if (json.warning) {
addDangerToast(String(json.warning));
}
if (isInitialFetch) setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
if (isInitialFetch) {
addDangerToast(
clientError.error ||
t('An error occurred while fetching the configuration schema'),
);
} else {
addDangerToast(
clientError.error ||
t('An error occurred while refreshing the configuration schema'),
);
}
} finally {
if (isInitialFetch) setLoading(false);
else setRefreshingSchema(false);
}
},
[addDangerToast, applySchema],
);
const fetchExistingLayer = useCallback(
async (uuid: string) => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: `/api/v1/semantic_layer/${uuid}`,
});
const layer = json.result;
setName(layer.name ?? '');
setSelectedType(layer.type);
setFormData(layer.configuration ?? {});
setHasErrors(false);
// Fetch base schema (no configuration → no Snowflake connection) to
// show the form immediately. The existing maybeRefreshSchema machinery
// will trigger an enriched fetch in the background once deps are
// satisfied, and DynamicFieldControl will show per-field spinners.
const { json: schemaJson } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type: layer.type },
});
applySchema(schemaJson.result);
setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
t('An error occurred while fetching the semantic layer'),
);
} finally {
setLoading(false);
}
},
[addDangerToast, applySchema],
);
useEffect(() => {
if (show) {
if (isEditMode && semanticLayerUuid) {
fetchTypes();
fetchExistingLayer(semanticLayerUuid);
} else {
fetchTypes();
}
} else {
setStep('type');
setName('');
setSelectedType(null);
setTypes([]);
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setHasErrors(true);
setRefreshingSchema(false);
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
}
}, [show, fetchTypes, isEditMode, semanticLayerUuid, fetchExistingLayer]);
const handleStepAdvance = () => {
if (selectedType) {
fetchConfigSchema(selectedType);
}
};
const handleBack = () => {
setStep('type');
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
};
const handleCreate = async () => {
setSaving(true);
try {
if (isEditMode && semanticLayerUuid) {
await SupersetClient.put({
endpoint: `/api/v1/semantic_layer/${semanticLayerUuid}`,
jsonPayload: { name, configuration: formData },
});
addSuccessToast(t('Semantic layer updated'));
} else {
await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/',
jsonPayload: { name, type: selectedType, configuration: formData },
});
addSuccessToast(t('Semantic layer created'));
}
onHide();
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
(isEditMode
? t('An error occurred while updating the semantic layer')
: t('An error occurred while creating the semantic layer')),
);
} finally {
setSaving(false);
}
};
const handleSave = () => {
if (step === 'type') {
handleStepAdvance();
} else {
setValidationMode('ValidateAndShow');
if (errorsRef.current.length === 0) {
handleCreate();
}
}
};
const maybeRefreshSchema = useCallback(
(data: Record<string, unknown>) => {
if (!selectedType) return;
const dynamicDeps = dynamicDepsRef.current;
if (Object.keys(dynamicDeps).length === 0) return;
// Check if any dynamic field has all dependencies satisfied
const hasSatisfiedDeps = Object.values(dynamicDeps).some(deps =>
areDependenciesSatisfied(deps, data),
);
if (!hasSatisfiedDeps) return;
// Only re-fetch if dependency values actually changed
const snapshot = serializeDependencyValues(dynamicDeps, data);
if (snapshot === lastDepSnapshotRef.current) return;
lastDepSnapshotRef.current = snapshot;
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
debounceTimerRef.current = setTimeout(() => {
fetchConfigSchema(selectedType, data);
}, SCHEMA_REFRESH_DEBOUNCE_MS);
},
[selectedType, fetchConfigSchema],
);
const handleFormChange = useCallback(
({
data,
errors,
}: {
data: Record<string, unknown>;
errors?: ErrorObject[];
}) => {
setFormData(data);
errorsRef.current = errors ?? [];
setHasErrors(errorsRef.current.length > 0);
maybeRefreshSchema(data);
},
[maybeRefreshSchema],
);
const selectedTypeName =
types.find(type => type.id === selectedType)?.name ?? '';
const title = isEditMode
? t('Edit %s', selectedTypeName || t('Semantic Layer'))
: step === 'type'
? t('New Semantic Layer')
: t('Configure %s', selectedTypeName);
return (
<StandardModal
show={show}
onHide={onHide}
onSave={handleSave}
title={title}
icon={isEditMode ? <Icons.EditOutlined /> : <Icons.PlusOutlined />}
width={step === 'type' ? MODAL_STANDARD_WIDTH : MODAL_MEDIUM_WIDTH}
saveDisabled={
step === 'type' ? !selectedType : saving || !name.trim() || hasErrors
}
saveText={
step === 'type' ? undefined : isEditMode ? t('Save') : t('Create')
}
saveLoading={saving}
contentLoading={loading}
>
{step === 'type' ? (
<>
<ModalFormField label={t('Type')}>
<Select
ariaLabel={t('Semantic layer type')}
placeholder={t('Select a semantic layer type')}
value={selectedType}
onChange={value => setSelectedType(value as string)}
options={types.map(type => ({
value: type.id,
label: type.name,
}))}
getPopupContainer={() => document.body}
dropdownAlign={{
points: ['tl', 'bl'],
offset: [0, 4],
overflow: { adjustX: 0, adjustY: 1 },
}}
/>
</ModalFormField>
</>
) : (
<>
{!isEditMode && (
<Button
buttonStyle="link"
icon={<Icons.CaretLeftOutlined iconSize="s" />}
onClick={handleBack}
>
{t('Back')}
</Button>
)}
<ModalFormField label={t('Name')} required>
<Input
value={name}
onChange={e => setName(e.target.value)}
placeholder={t('Name of the semantic layer')}
/>
</ModalFormField>
{configSchema && (
<JsonForms
schema={configSchema}
uischema={uiSchema}
data={formData}
renderers={renderers}
cells={cellRegistryEntries}
config={{ refreshingSchema, formData }}
validationMode={validationMode}
onChange={handleFormChange}
/>
)}
</>
)}
</StandardModal>
);
}

View File

@@ -17,9 +17,15 @@
* under the License.
*/
import { t } from '@apache-superset/core/translation';
import { getExtensionsRegistry, SupersetClient } from '@superset-ui/core';
import { styled } from '@apache-superset/core/theme';
import {
getExtensionsRegistry,
SupersetClient,
isFeatureEnabled,
FeatureFlag,
} from '@superset-ui/core';
import { css, styled, useTheme } from '@apache-superset/core/theme';
import { useState, useMemo, useEffect, useCallback } from 'react';
import type { CellProps } from 'react-table';
import rison from 'rison';
import { useSelector } from 'react-redux';
import { useQueryParams, BooleanParam } from 'use-query-params';
@@ -33,7 +39,9 @@ import {
import withToasts from 'src/components/MessageToasts/withToasts';
import SubMenu, { SubMenuProps } from 'src/features/home/SubMenu';
import {
Button,
DeleteModal,
Dropdown,
Tooltip,
List,
Loading,
@@ -43,6 +51,7 @@ import {
ListView,
ListViewFilterOperator as FilterOperator,
ListViewFilters,
type ListViewFetchDataConfig,
} from 'src/components';
import { Typography } from '@superset-ui/core/components/Typography';
import { getUrlParam } from 'src/utils/urlUtils';
@@ -55,10 +64,12 @@ import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes';
import type { MenuObjectProps } from 'src/types/bootstrapTypes';
import DatabaseModal from 'src/features/databases/DatabaseModal';
import UploadDataModal from 'src/features/databases/UploadDataModel';
import SemanticLayerModal from 'src/features/semanticLayers/SemanticLayerModal';
import { DatabaseObject } from 'src/features/databases/types';
import { QueryObjectColumns } from 'src/views/CRUD/types';
import { WIDER_DROPDOWN_WIDTH } from 'src/components/ListView/utils';
import { ModalTitleWithIcon } from 'src/components/ModalTitleWithIcon';
import type Owner from 'src/types/Owner';
const extensionsRegistry = getExtensionsRegistry();
const DatabaseDeleteRelatedExtension = extensionsRegistry.get(
@@ -70,6 +81,13 @@ const dbConfigExtraExtension = extensionsRegistry.get(
const PAGE_SIZE = 25;
type ConnectionItem = DatabaseObject & {
source_type?: 'database' | 'semantic_layer';
sl_type?: string;
changed_by?: Owner;
changed_on_delta_humanized?: string;
};
interface DatabaseDeleteObject extends DatabaseObject {
charts: any;
dashboards: any;
@@ -108,20 +126,106 @@ function DatabaseList({
addSuccessToast,
user,
}: DatabaseListProps) {
const theme = useTheme();
const showSemanticLayers = isFeatureEnabled(FeatureFlag.SemanticLayers);
// Standard database list view resource (used when SL flag is OFF)
const {
state: {
loading,
resourceCount: databaseCount,
resourceCollection: databases,
loading: dbLoading,
resourceCount: dbCount,
resourceCollection: dbCollection,
},
hasPerm,
fetchData,
refreshData,
fetchData: dbFetchData,
refreshData: dbRefreshData,
} = useListViewResource<DatabaseObject>(
'database',
t('database'),
addDangerToast,
);
// Combined endpoint state (used when SL flag is ON)
const [combinedItems, setCombinedItems] = useState<ConnectionItem[]>([]);
const [combinedCount, setCombinedCount] = useState(0);
const [combinedLoading, setCombinedLoading] = useState(true);
const [lastFetchConfig, setLastFetchConfig] =
useState<ListViewFetchDataConfig | null>(null);
const combinedFetchData = useCallback(
(config: ListViewFetchDataConfig) => {
setLastFetchConfig(config);
setCombinedLoading(true);
const { pageIndex, pageSize, sortBy, filters: filterValues } = config;
const sourceTypeFilter = filterValues.find(f => f.id === 'source_type');
const otherFilters = filterValues
.filter(f => f.id !== 'source_type')
.filter(
({ value }) => value !== '' && value !== null && value !== undefined,
)
.map(({ id, operator: opr, value }) => ({
col: id,
opr,
value:
value && typeof value === 'object' && 'value' in value
? value.value
: value,
}));
const sourceTypeValue =
sourceTypeFilter?.value && typeof sourceTypeFilter.value === 'object'
? (sourceTypeFilter.value as { value: string }).value
: (sourceTypeFilter?.value as string | undefined);
if (sourceTypeValue) {
otherFilters.push({
col: 'source_type',
opr: 'eq',
value: sourceTypeValue,
});
}
const queryParams = rison.encode_uri({
order_column: sortBy[0].id,
order_direction: sortBy[0].desc ? 'desc' : 'asc',
page: pageIndex,
page_size: pageSize,
...(otherFilters.length ? { filters: otherFilters } : {}),
});
return SupersetClient.get({
endpoint: `/api/v1/semantic_layer/connections/?q=${queryParams}`,
})
.then(({ json = {} }) => {
setCombinedItems(json.result);
setCombinedCount(json.count);
})
.catch(() => {
addDangerToast(t('An error occurred while fetching connections'));
})
.finally(() => {
setCombinedLoading(false);
});
},
[addDangerToast],
);
const combinedRefreshData = useCallback(() => {
if (lastFetchConfig) {
return combinedFetchData(lastFetchConfig);
}
return undefined;
}, [lastFetchConfig, combinedFetchData]);
// Select the right data source based on feature flag
const loading = showSemanticLayers ? combinedLoading : dbLoading;
const databaseCount = showSemanticLayers ? combinedCount : dbCount;
const databases: ConnectionItem[] = showSemanticLayers
? combinedItems
: dbCollection;
const fetchData = showSemanticLayers ? combinedFetchData : dbFetchData;
const refreshData = showSemanticLayers ? combinedRefreshData : dbRefreshData;
const fullUser = useSelector<any, UserWithPermissionsAndRoles>(
state => state.user,
);
@@ -148,6 +252,13 @@ function DatabaseList({
useState<boolean>(false);
const [columnarUploadDataModalOpen, setColumnarUploadDataModalOpen] =
useState<boolean>(false);
const [semanticLayerModalOpen, setSemanticLayerModalOpen] =
useState<boolean>(false);
const [slCurrentlyEditing, setSlCurrentlyEditing] = useState<string | null>(
null,
);
const [slCurrentlyDeleting, setSlCurrentlyDeleting] =
useState<ConnectionItem | null>(null);
const [allowUploads, setAllowUploads] = useState<boolean>(false);
const isAdmin = isUserAdmin(fullUser);
@@ -320,18 +431,63 @@ function DatabaseList({
};
if (canCreate) {
menuData.buttons = [
{
'data-test': 'btn-create-database',
icon: <Icons.PlusOutlined iconSize="m" />,
name: t('Database'),
buttonStyle: 'primary',
onClick: () => {
// Ensure modal will be opened in add mode
handleDatabaseEditModal({ modalOpen: true });
const openDatabaseModal = () =>
handleDatabaseEditModal({ modalOpen: true });
if (isFeatureEnabled(FeatureFlag.SemanticLayers)) {
menuData.buttons = [
{
name: t('New'),
buttonStyle: 'primary',
component: (
<Dropdown
menu={{
items: [
{
key: 'database',
label: t('Database'),
onClick: openDatabaseModal,
},
{
key: 'semantic-layer',
label: t('Semantic Layer'),
onClick: () => {
setSemanticLayerModalOpen(true);
},
},
],
}}
trigger={['click']}
>
<Button
data-test="btn-create-new"
buttonStyle="primary"
icon={<Icons.PlusOutlined iconSize="m" />}
>
{t('New')}
<Icons.DownOutlined
iconSize="s"
css={css`
margin-left: ${theme.sizeUnit * 1.5}px;
margin-right: -${theme.sizeUnit * 2}px;
`}
/>
</Button>
</Dropdown>
),
},
},
];
];
} else {
menuData.buttons = [
{
'data-test': 'btn-create-database',
icon: <Icons.PlusOutlined iconSize="m" />,
name: t('Database'),
buttonStyle: 'primary',
onClick: openDatabaseModal,
},
];
}
}
const handleDatabaseExport = useCallback(
@@ -345,7 +501,7 @@ function DatabaseList({
await handleResourceExport('database', [database.id], () => {
setPreparingExport(false);
});
} catch (error) {
} catch {
setPreparingExport(false);
addDangerToast(t('There was an issue exporting the database'));
}
@@ -401,6 +557,23 @@ function DatabaseList({
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
function handleSemanticLayerDelete(item: ConnectionItem) {
SupersetClient.delete({
endpoint: `/api/v1/semantic_layer/${item.uuid}`,
}).then(
() => {
refreshData();
addSuccessToast(t('Deleted: %s', item.database_name));
setSlCurrentlyDeleting(null);
},
createErrorHandler(errMsg =>
addDangerToast(
t('There was an issue deleting %s: %s', item.database_name, errMsg),
),
),
);
}
const columns = useMemo(
() => [
{
@@ -413,7 +586,7 @@ function DatabaseList({
accessor: 'backend',
Header: t('Backend'),
size: 'xl',
disableSortBy: true, // TODO: api support for sorting by 'backend'
disableSortBy: true,
id: 'backend',
},
{
@@ -427,13 +600,12 @@ function DatabaseList({
<span>{t('AQE')}</span>
</Tooltip>
),
Cell: ({
row: {
original: { allow_run_async: allowRunAsync },
},
}: {
row: { original: { allow_run_async: boolean } };
}) => <BooleanDisplay value={allowRunAsync} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_run_async)} />
),
size: 'sm',
id: 'allow_run_async',
},
@@ -448,33 +620,36 @@ function DatabaseList({
<span>{t('DML')}</span>
</Tooltip>
),
Cell: ({
row: {
original: { allow_dml: allowDML },
},
}: any) => <BooleanDisplay value={allowDML} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_dml)} />
),
size: 'sm',
id: 'allow_dml',
},
{
accessor: 'allow_file_upload',
Header: t('File upload'),
Cell: ({
row: {
original: { allow_file_upload: allowFileUpload },
},
}: any) => <BooleanDisplay value={allowFileUpload} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_file_upload)} />
),
size: 'md',
id: 'allow_file_upload',
},
{
accessor: 'expose_in_sqllab',
Header: t('Expose in SQL Lab'),
Cell: ({
row: {
original: { expose_in_sqllab: exposeInSqllab },
},
}: any) => <BooleanDisplay value={exposeInSqllab} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.expose_in_sqllab)} />
),
size: 'md',
id: 'expose_in_sqllab',
},
@@ -486,7 +661,9 @@ function DatabaseList({
changed_on_delta_humanized: changedOn,
},
},
}: any) => <ModifiedInfo date={changedOn} user={changedBy} />,
}: CellProps<ConnectionItem>) => (
<ModifiedInfo date={changedOn || ''} user={changedBy} />
),
Header: t('Last modified'),
accessor: 'changed_on_delta_humanized',
size: 'xl',
@@ -494,6 +671,48 @@ function DatabaseList({
},
{
Cell: ({ row: { original } }: any) => {
const isSemanticLayer = original.source_type === 'semantic_layer';
if (isSemanticLayer) {
if (!canEdit && !canDelete) return null;
return (
<Actions className="actions">
{canDelete && (
<Tooltip
id="delete-action-tooltip"
title={t('Delete')}
placement="bottom"
>
<span
role="button"
tabIndex={0}
className="action-button"
onClick={() => setSlCurrentlyDeleting(original)}
>
<Icons.DeleteOutlined iconSize="l" />
</span>
</Tooltip>
)}
{canEdit && (
<Tooltip
id="edit-action-tooltip"
title={t('Edit')}
placement="bottom"
>
<span
role="button"
tabIndex={0}
className="action-button"
onClick={() => setSlCurrentlyEditing(original.uuid)}
>
<Icons.EditOutlined iconSize="l" />
</span>
</Tooltip>
)}
</Actions>
);
}
const handleEdit = () =>
handleDatabaseEditModal({ database: original, modalOpen: true });
const handleDelete = () => openDatabaseDeleteModal(original);
@@ -579,6 +798,12 @@ function DatabaseList({
hidden: !canEdit && !canDelete,
disableSortBy: true,
},
{
accessor: 'source_type',
hidden: true,
disableSortBy: true,
id: 'source_type',
},
{
accessor: QueryObjectColumns.ChangedBy,
hidden: true,
@@ -596,8 +821,8 @@ function DatabaseList({
],
);
const filters: ListViewFilters = useMemo(
() => [
const filters: ListViewFilters = useMemo(() => {
const baseFilters: ListViewFilters = [
{
Header: t('Name'),
key: 'search',
@@ -605,62 +830,83 @@ function DatabaseList({
input: 'search',
operator: FilterOperator.Contains,
},
{
Header: t('Expose in SQL Lab'),
key: 'expose_in_sql_lab',
id: 'expose_in_sqllab',
];
if (showSemanticLayers) {
baseFilters.push({
Header: t('Source'),
key: 'source_type',
id: 'source_type',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
{ label: t('Database'), value: 'database' },
{ label: t('Semantic Layer'), value: 'semantic_layer' },
],
},
{
Header: (
<Tooltip
id="allow-run-async-filter-header-tooltip"
title={t('Asynchronous query execution')}
placement="top"
>
<span>{t('AQE')}</span>
</Tooltip>
),
key: 'allow_run_async',
id: 'allow_run_async',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: t('Modified by'),
key: 'changed_by',
id: 'changed_by',
input: 'select',
operator: FilterOperator.RelationOneMany,
unfilteredLabel: t('All'),
fetchSelects: createFetchRelated(
'database',
'changed_by',
createErrorHandler(errMsg =>
t(
'An error occurred while fetching dataset datasource values: %s',
errMsg,
),
});
}
if (!showSemanticLayers) {
baseFilters.push(
{
Header: t('Expose in SQL Lab'),
key: 'expose_in_sql_lab',
id: 'expose_in_sqllab',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: (
<Tooltip
id="allow-run-async-filter-header-tooltip"
title={t('Asynchronous query execution')}
placement="top"
>
<span>{t('AQE')}</span>
</Tooltip>
),
user,
),
paginate: true,
dropdownStyle: { minWidth: WIDER_DROPDOWN_WIDTH },
},
],
[user],
);
key: 'allow_run_async',
id: 'allow_run_async',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: t('Modified by'),
key: 'changed_by',
id: 'changed_by',
input: 'select',
operator: FilterOperator.RelationOneMany,
unfilteredLabel: t('All'),
fetchSelects: createFetchRelated(
'database',
'changed_by',
createErrorHandler(errMsg =>
t(
'An error occurred while fetching dataset datasource values: %s',
errMsg,
),
),
user,
),
paginate: true,
dropdownStyle: { minWidth: WIDER_DROPDOWN_WIDTH },
},
);
}
return baseFilters;
}, [showSemanticLayers]);
return (
<>
@@ -703,6 +949,48 @@ function DatabaseList({
allowedExtensions={COLUMNAR_EXTENSIONS}
type="columnar"
/>
<SemanticLayerModal
show={semanticLayerModalOpen}
onHide={() => {
setSemanticLayerModalOpen(false);
refreshData();
}}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
/>
<SemanticLayerModal
show={!!slCurrentlyEditing}
onHide={() => {
setSlCurrentlyEditing(null);
refreshData();
}}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
semanticLayerUuid={slCurrentlyEditing ?? undefined}
/>
{slCurrentlyDeleting && (
<DeleteModal
description={
<p>
{t('Are you sure you want to delete')}{' '}
<b>{slCurrentlyDeleting.database_name}</b>?
</p>
}
onConfirm={() => {
if (slCurrentlyDeleting) {
handleSemanticLayerDelete(slCurrentlyDeleting);
}
}}
onHide={() => setSlCurrentlyDeleting(null)}
open
title={
<ModalTitleWithIcon
icon={<Icons.DeleteOutlined />}
title={t('Delete Semantic Layer?')}
/>
}
/>
)}
{databaseCurrentlyDeleting && (
<DeleteModal
description={

View File

@@ -0,0 +1,69 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from functools import partial
from typing import Any
from flask_appbuilder.models.sqla import Model
from sqlalchemy.exc import SQLAlchemyError
from superset.commands.base import BaseCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerCreateFailedError,
SemanticLayerInvalidError,
)
from superset.daos.semantic_layer import SemanticLayerDAO
from superset.semantic_layers.registry import registry
from superset.utils import json
from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__)
class CreateSemanticLayerCommand(BaseCommand):
def __init__(self, data: dict[str, Any]):
self._properties = data.copy()
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError, ValueError),
reraise=SemanticLayerCreateFailedError,
)
)
def run(self) -> Model:
self.validate()
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticLayerDAO.create(attributes=self._properties)
def validate(self) -> None:
sl_type = self._properties.get("type")
if sl_type not in registry:
raise SemanticLayerInvalidError(f"Unknown type: {sl_type}")
name: str = self._properties.get("name", "")
if not SemanticLayerDAO.validate_uniqueness(name):
raise SemanticLayerInvalidError(f"Name already exists: {name}")
# Validate configuration against the plugin
cls = registry[sl_type]
cls.from_configuration(self._properties["configuration"])

View File

@@ -0,0 +1,56 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from functools import partial
from sqlalchemy.exc import SQLAlchemyError
from superset.commands.base import BaseCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerDeleteFailedError,
SemanticLayerNotFoundError,
)
from superset.daos.semantic_layer import SemanticLayerDAO
from superset.semantic_layers.models import SemanticLayer
from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__)
class DeleteSemanticLayerCommand(BaseCommand):
def __init__(self, uuid: str):
self._uuid = uuid
self._model: SemanticLayer | None = None
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError,),
reraise=SemanticLayerDeleteFailedError,
)
)
def run(self) -> None:
self.validate()
assert self._model
SemanticLayerDAO.delete([self._model])
def validate(self) -> None:
self._model = SemanticLayerDAO.find_by_uuid(self._uuid)
if not self._model:
raise SemanticLayerNotFoundError()

View File

@@ -19,6 +19,8 @@ from flask_babel import lazy_gettext as _
from superset.commands.exceptions import (
CommandException,
CommandInvalidError,
CreateFailedError,
DeleteFailedError,
ForbiddenError,
UpdateFailedError,
)
@@ -39,3 +41,28 @@ class SemanticViewInvalidError(CommandInvalidError):
class SemanticViewUpdateFailedError(UpdateFailedError):
message = _("Semantic view could not be updated.")
class SemanticLayerNotFoundError(CommandException):
status = 404
message = _("Semantic layer does not exist")
class SemanticLayerForbiddenError(ForbiddenError):
message = _("Changing this semantic layer is forbidden")
class SemanticLayerInvalidError(CommandInvalidError):
message = _("Semantic layer parameters are invalid.")
class SemanticLayerCreateFailedError(CreateFailedError):
message = _("Semantic layer could not be created.")
class SemanticLayerUpdateFailedError(UpdateFailedError):
message = _("Semantic layer could not be updated.")
class SemanticLayerDeleteFailedError(DeleteFailedError):
message = _("Semantic layer could not be deleted.")

View File

@@ -26,13 +26,17 @@ from sqlalchemy.exc import SQLAlchemyError
from superset import security_manager
from superset.commands.base import BaseCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerInvalidError,
SemanticLayerNotFoundError,
SemanticLayerUpdateFailedError,
SemanticViewForbiddenError,
SemanticViewNotFoundError,
SemanticViewUpdateFailedError,
)
from superset.daos.semantic_layer import SemanticViewDAO
from superset.daos.semantic_layer import SemanticLayerDAO, SemanticViewDAO
from superset.exceptions import SupersetSecurityException
from superset.semantic_layers.models import SemanticView
from superset.semantic_layers.models import SemanticLayer, SemanticView
from superset.semantic_layers.registry import registry
from superset.utils import json
from superset.utils.decorators import on_error, transaction
@@ -83,3 +87,40 @@ class UpdateSemanticViewCommand(BaseCommand):
f"A semantic view with name '{name}' and the same "
"configuration already exists in this semantic layer."
)
class UpdateSemanticLayerCommand(BaseCommand):
def __init__(self, uuid: str, data: dict[str, Any]):
self._uuid = uuid
self._properties = data.copy()
self._model: SemanticLayer | None = None
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError, ValueError),
reraise=SemanticLayerUpdateFailedError,
)
)
def run(self) -> Model:
self.validate()
assert self._model
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticLayerDAO.update(self._model, attributes=self._properties)
def validate(self) -> None:
self._model = SemanticLayerDAO.find_by_uuid(self._uuid)
if not self._model:
raise SemanticLayerNotFoundError()
name = self._properties.get("name")
if name and not SemanticLayerDAO.validate_update_uniqueness(self._uuid, name):
raise SemanticLayerInvalidError(f"Name already exists: {name}")
if configuration := self._properties.get("configuration"):
sl_type = self._model.type
cls = registry[sl_type]
cls.from_configuration(configuration)

View File

@@ -21,6 +21,8 @@ from __future__ import annotations
from typing import Any
from sqlalchemy.exc import StatementError
from superset_core.semantic_layers.daos import (
AbstractSemanticLayerDAO,
AbstractSemanticViewDAO,
@@ -38,6 +40,23 @@ class SemanticLayerDAO(AbstractSemanticLayerDAO):
model_cls = SemanticLayer
@staticmethod
def find_by_uuid(uuid_str: str) -> SemanticLayer | None:
try:
return (
db.session.query(SemanticLayer)
.filter(SemanticLayer.uuid == uuid_str)
.one_or_none()
)
except (ValueError, StatementError):
return None
@classmethod
def find_all(cls, skip_base_filter: bool = False) -> list[SemanticLayer]:
query = db.session.query(SemanticLayer)
query = cls._apply_base_filter(query, skip_base_filter)
return query.all()
@classmethod
def validate_uniqueness(cls, name: str) -> bool:
"""

View File

@@ -269,8 +269,12 @@ class SupersetAppInitializer: # pylint: disable=too-many-public-methods
appbuilder.add_api(RLSRestApi)
appbuilder.add_api(SavedQueryRestApi)
if feature_flag_manager.is_feature_enabled("SEMANTIC_LAYERS"):
from superset.semantic_layers.api import SemanticViewRestApi
from superset.semantic_layers.api import (
SemanticLayerRestApi,
SemanticViewRestApi,
)
appbuilder.add_api(SemanticLayerRestApi)
appbuilder.add_api(SemanticViewRestApi)
appbuilder.add_api(TagRestApi)
appbuilder.add_api(SqlLabRestApi)

View File

@@ -17,24 +17,48 @@
from __future__ import annotations
import logging
from typing import Any
from flask import request, Response
from flask_appbuilder.api import expose, protect
from flask import make_response, request, Response
from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.api.schemas import get_list_schema
from flask_appbuilder.models.sqla.interface import SQLAInterface
from marshmallow import ValidationError
from pydantic import ValidationError as PydanticValidationError
from sqlalchemy.orm import load_only
from superset import event_logger
from superset import db, event_logger, is_feature_enabled
from superset.commands.semantic_layer.create import CreateSemanticLayerCommand
from superset.commands.semantic_layer.delete import DeleteSemanticLayerCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerCreateFailedError,
SemanticLayerDeleteFailedError,
SemanticLayerInvalidError,
SemanticLayerNotFoundError,
SemanticLayerUpdateFailedError,
SemanticViewForbiddenError,
SemanticViewInvalidError,
SemanticViewNotFoundError,
SemanticViewUpdateFailedError,
)
from superset.commands.semantic_layer.update import UpdateSemanticViewCommand
from superset.commands.semantic_layer.update import (
UpdateSemanticLayerCommand,
UpdateSemanticViewCommand,
)
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
from superset.semantic_layers.models import SemanticView
from superset.semantic_layers.schemas import SemanticViewPutSchema
from superset.daos.semantic_layer import SemanticLayerDAO
from superset.models.core import Database
from superset.semantic_layers.models import SemanticLayer, SemanticView
from superset.semantic_layers.registry import registry
from superset.semantic_layers.schemas import (
SemanticLayerPostSchema,
SemanticLayerPutSchema,
SemanticViewPutSchema,
)
from superset.superset_typing import FlaskResponse
from superset.utils import json
from superset.views.base_api import (
BaseSupersetApi,
BaseSupersetModelRestApi,
requires_json,
statsd_metrics,
@@ -43,6 +67,94 @@ from superset.views.base_api import (
logger = logging.getLogger(__name__)
def _serialize_layer(layer: SemanticLayer) -> dict[str, Any]:
config = layer.configuration
if isinstance(config, str):
config = json.loads(config)
return {
"uuid": str(layer.uuid),
"name": layer.name,
"description": layer.description,
"type": layer.type,
"cache_timeout": layer.cache_timeout,
"configuration": config or {},
"changed_on_delta_humanized": layer.changed_on_delta_humanized(),
}
def _infer_discriminators(
schema: dict[str, Any],
data: dict[str, Any],
) -> dict[str, Any]:
"""
Infer discriminator values for union fields when the frontend omits them.
Walks the schema's properties looking for discriminated unions (fields with a
``discriminator.mapping``). For each one, tries to match the submitted data
against one of the variants by checking which variant's required fields are
present, then injects the discriminator value.
"""
defs = schema.get("$defs", {})
for prop_name, prop_schema in schema.get("properties", {}).items():
value = data.get(prop_name)
if not isinstance(value, dict):
continue
# Find discriminated union via discriminator mapping
mapping = (
prop_schema.get("discriminator", {}).get("mapping")
if "discriminator" in prop_schema
else None
)
if not mapping:
continue
discriminator_field = prop_schema["discriminator"].get("propertyName")
if not discriminator_field or discriminator_field in value:
continue
# Try each variant: match by required fields present in the data
for disc_value, ref in mapping.items():
ref_name = ref.rsplit("/", 1)[-1] if "/" in ref else ref
variant_def = defs.get(ref_name, {})
required = set(variant_def.get("required", []))
# Exclude the discriminator itself from the check
required.discard(discriminator_field)
if required and required.issubset(value.keys()):
data = {
**data,
prop_name: {**value, discriminator_field: disc_value},
}
break
return data
def _parse_partial_config(
cls: Any,
config: dict[str, Any],
) -> Any:
"""
Parse a partial configuration, handling discriminator inference and
falling back to lenient validation when strict parsing fails.
"""
config_class = cls.configuration_class
# Infer discriminator values the frontend may have omitted
schema = config_class.model_json_schema()
config = _infer_discriminators(schema, config)
try:
return config_class.model_validate(config)
except (PydanticValidationError, ValueError):
pass
try:
return config_class.model_validate(config, context={"partial": True})
except (PydanticValidationError, ValueError):
return None
class SemanticViewRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(SemanticView)
@@ -126,3 +238,543 @@ class SemanticViewRestApi(BaseSupersetModelRestApi):
)
response = self.response_422(message=str(ex))
return response
class SemanticLayerRestApi(BaseSupersetApi):
resource_name = "semantic_layer"
allow_browser_login = True
class_permission_name = "SemanticLayer"
method_permission_name = {
**MODEL_API_RW_METHOD_PERMISSION_MAP,
"types": "read",
"configuration_schema": "read",
"runtime_schema": "read",
}
openapi_spec_tag = "Semantic Layers"
add_model_schema = SemanticLayerPostSchema()
edit_model_schema = SemanticLayerPutSchema()
@expose("/types", methods=("GET",))
@protect()
@safe
@statsd_metrics
def types(self) -> FlaskResponse:
"""List available semantic layer types.
---
get:
summary: List available semantic layer types
responses:
200:
description: A list of semantic layer types
401:
$ref: '#/components/responses/401'
"""
result = [
{"id": key, "name": cls.name, "description": cls.description} # type: ignore[attr-defined]
for key, cls in registry.items()
]
return self.response(200, result=result)
@expose("/schema/configuration", methods=("POST",))
@protect()
@safe
@statsd_metrics
@requires_json
def configuration_schema(self) -> FlaskResponse:
"""Get configuration schema for a semantic layer type.
---
post:
summary: Get configuration schema for a semantic layer type
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
type:
type: string
configuration:
type: object
responses:
200:
description: Configuration JSON Schema
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
"""
body = request.json or {}
sl_type = body.get("type")
cls = registry.get(sl_type) # type: ignore[arg-type]
if not cls:
return self.response_400(message=f"Unknown type: {sl_type}")
parsed_config = None
if config := body.get("configuration"):
parsed_config = _parse_partial_config(cls, config)
warning: str | None = None
try:
schema = cls.get_configuration_schema(parsed_config)
except Exception as ex: # pylint: disable=broad-except
warning = str(ex)
logger.exception(
"Error enriching semantic layer configuration schema for type %s",
sl_type,
)
# Connection or query failures during schema enrichment should not
# prevent the form from rendering — return the base schema instead.
schema = cls.get_configuration_schema(None)
payload: dict[str, Any] = {"result": schema}
if warning:
payload["warning"] = warning
resp = make_response(json.dumps(payload, sort_keys=False), 200)
resp.headers["Content-Type"] = "application/json; charset=utf-8"
return resp
@expose("/<uuid>/schema/runtime", methods=("POST",))
@protect()
@safe
@statsd_metrics
def runtime_schema(self, uuid: str) -> FlaskResponse:
"""Get runtime schema for a stored semantic layer.
---
post:
summary: Get runtime schema for a semantic layer
parameters:
- in: path
schema:
type: string
name: uuid
requestBody:
content:
application/json:
schema:
type: object
properties:
runtime_data:
type: object
responses:
200:
description: Runtime JSON Schema
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
"""
layer = SemanticLayerDAO.find_by_uuid(uuid)
if not layer:
return self.response_404()
body = request.get_json(silent=True) or {}
runtime_data = body.get("runtime_data")
cls = registry.get(layer.type)
if not cls:
return self.response_400(message=f"Unknown type: {layer.type}")
try:
schema = cls.get_runtime_schema(
layer.implementation.configuration, # type: ignore[attr-defined]
runtime_data,
)
except Exception as ex: # pylint: disable=broad-except
return self.response_400(message=str(ex))
return self.response(200, result=schema)
@expose("/", methods=("POST",))
@protect()
@safe
@statsd_metrics
@requires_json
def post(self) -> FlaskResponse:
"""Create a semantic layer.
---
post:
summary: Create a semantic layer
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
name:
type: string
description:
type: string
type:
type: string
configuration:
type: object
cache_timeout:
type: integer
responses:
201:
description: Semantic layer created
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
422:
$ref: '#/components/responses/422'
"""
try:
item = self.add_model_schema.load(request.json)
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = CreateSemanticLayerCommand(item).run()
return self.response(201, result={"uuid": str(new_model.uuid)})
except SemanticLayerInvalidError as ex:
return self.response_422(message=str(ex))
except SemanticLayerCreateFailedError as ex:
logger.error(
"Error creating semantic layer: %s",
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
@expose("/<uuid>", methods=("PUT",))
@protect()
@safe
@statsd_metrics
@requires_json
def put(self, uuid: str) -> FlaskResponse:
"""Update a semantic layer.
---
put:
summary: Update a semantic layer
parameters:
- in: path
schema:
type: string
name: uuid
requestBody:
required: true
content:
application/json:
schema:
type: object
properties:
name:
type: string
description:
type: string
configuration:
type: object
cache_timeout:
type: integer
responses:
200:
description: Semantic layer updated
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
"""
try:
item = self.edit_model_schema.load(request.json)
except ValidationError as error:
return self.response_400(message=error.messages)
try:
changed_model = UpdateSemanticLayerCommand(uuid, item).run()
return self.response(200, result={"uuid": str(changed_model.uuid)})
except SemanticLayerNotFoundError:
return self.response_404()
except SemanticLayerInvalidError as ex:
return self.response_422(message=str(ex))
except SemanticLayerUpdateFailedError as ex:
logger.error(
"Error updating semantic layer: %s",
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
@expose("/<uuid>", methods=("DELETE",))
@protect()
@safe
@statsd_metrics
def delete(self, uuid: str) -> FlaskResponse:
"""Delete a semantic layer.
---
delete:
summary: Delete a semantic layer
parameters:
- in: path
schema:
type: string
name: uuid
responses:
200:
description: Semantic layer deleted
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
"""
try:
DeleteSemanticLayerCommand(uuid).run()
return self.response(200, message="OK")
except SemanticLayerNotFoundError:
return self.response_404()
except SemanticLayerDeleteFailedError as ex:
logger.error(
"Error deleting semantic layer: %s",
str(ex),
exc_info=True,
)
return self.response_422(message=str(ex))
@expose("/connections/", methods=("GET",))
@protect()
@safe
@statsd_metrics
@rison(get_list_schema)
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.connections",
log_to_statsd=False,
)
def connections(self, **kwargs: Any) -> FlaskResponse:
"""List databases and semantic layers combined.
---
get:
summary: List databases and semantic layers combined
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_list_schema'
responses:
200:
description: Combined list of databases and semantic layers
401:
$ref: '#/components/responses/401'
500:
$ref: '#/components/responses/500'
"""
args = kwargs.get("rison", {})
page = args.get("page", 0)
page_size = args.get("page_size", 25)
order_column = args.get("order_column", "changed_on")
order_direction = args.get("order_direction", "desc")
filters = args.get("filters", [])
source_type, name_filter = self._parse_connection_filters(filters)
if not is_feature_enabled("SEMANTIC_LAYERS"):
return self.response_404()
all_items = self._fetch_connection_items(source_type, name_filter)
sort_key = self._get_connection_sort_key(order_column)
all_items.sort(key=sort_key, reverse=order_direction == "desc") # type: ignore
total_count = len(all_items)
start = page * page_size
page_items = all_items[start : start + page_size]
result = [
self._serialize_database(obj)
if item_type == "database"
else self._serialize_semantic_layer(obj)
for item_type, obj in page_items
]
return self.response(200, count=total_count, result=result)
@staticmethod
def _parse_connection_filters(
filters: list[dict[str, Any]],
) -> tuple[str, str | None]:
"""Parse filters into source_type and name_filter."""
source_type = "all"
name_filter = None
for f in filters:
if f.get("col") == "source_type":
source_type = f.get("value", "all")
elif f.get("col") == "database_name" and f.get("opr") == "ct":
name_filter = f.get("value")
return source_type, name_filter
@staticmethod
def _fetch_connection_items(
source_type: str,
name_filter: str | None,
) -> list[tuple[str, Any]]:
"""Fetch database and semantic layer items based on filters."""
db_items: list[tuple[str, Database]] = []
if source_type in ("all", "database"):
db_q = db.session.query(Database).options(
load_only(
Database.id,
Database.uuid,
Database.database_name,
Database.backend,
Database.allow_run_async,
Database.allow_dml,
Database.allow_file_upload,
Database.expose_in_sqllab,
Database.changed_on,
Database.changed_by_fk,
)
)
if name_filter:
db_q = db_q.filter(Database.database_name.ilike(f"%{name_filter}%"))
db_items = [("database", obj) for obj in db_q.all()]
sl_items: list[tuple[str, SemanticLayer]] = []
if source_type in ("all", "semantic_layer"):
sl_q = db.session.query(SemanticLayer).options(
load_only(
SemanticLayer.uuid,
SemanticLayer.name,
SemanticLayer.type,
SemanticLayer.description,
SemanticLayer.changed_on,
SemanticLayer.changed_by_fk,
)
)
if name_filter:
sl_q = sl_q.filter(SemanticLayer.name.ilike(f"%{name_filter}%"))
sl_items = [("semantic_layer", obj) for obj in sl_q.all()]
# TODO: move sort + pagination to SQL before GA.
return db_items + sl_items # type: ignore
@staticmethod
def _get_connection_sort_key(order_column: str) -> Any:
"""Return a sort key function for connection items."""
def _sort_key_changed_on(
item: tuple[str, Database | SemanticLayer],
) -> float:
changed_on = item[1].changed_on
return changed_on.timestamp() if changed_on else 0.0
def _sort_key_name(
item: tuple[str, Database | SemanticLayer],
) -> str:
obj = item[1]
raw = (
obj.database_name # type: ignore[union-attr]
if item[0] == "database"
else obj.name
)
return raw.lower()
sort_key_map = {
"changed_on_delta_humanized": _sort_key_changed_on,
"database_name": _sort_key_name,
}
return sort_key_map.get(order_column, _sort_key_changed_on)
@staticmethod
def _serialize_database(obj: Database) -> dict[str, Any]:
changed_by = obj.changed_by
return {
"source_type": "database",
"id": obj.id,
"uuid": str(obj.uuid),
"database_name": obj.database_name,
"backend": obj.backend,
"allow_run_async": obj.allow_run_async,
"allow_dml": obj.allow_dml,
"allow_file_upload": obj.allow_file_upload,
"expose_in_sqllab": obj.expose_in_sqllab,
"changed_on_delta_humanized": obj.changed_on_delta_humanized(),
"changed_by": {
"first_name": changed_by.first_name,
"last_name": changed_by.last_name,
}
if changed_by
else None,
}
@staticmethod
def _serialize_semantic_layer(obj: SemanticLayer) -> dict[str, Any]:
changed_by = obj.changed_by
sl_type = obj.type
cls = registry.get(sl_type)
type_name = cls.name if cls else sl_type # type: ignore[attr-defined]
return {
"source_type": "semantic_layer",
"uuid": str(obj.uuid),
"database_name": obj.name,
"backend": type_name,
"sl_type": sl_type,
"description": obj.description,
"allow_run_async": None,
"allow_dml": None,
"allow_file_upload": None,
"expose_in_sqllab": None,
"changed_on_delta_humanized": obj.changed_on_delta_humanized(),
"changed_by": {
"first_name": changed_by.first_name,
"last_name": changed_by.last_name,
}
if changed_by
else None,
}
@expose("/", methods=("GET",))
@protect()
@safe
@statsd_metrics
def get_list(self) -> FlaskResponse:
"""List all semantic layers.
---
get:
summary: List all semantic layers
responses:
200:
description: A list of semantic layers
401:
$ref: '#/components/responses/401'
"""
layers = SemanticLayerDAO.find_all()
result = [_serialize_layer(layer) for layer in layers]
return self.response(200, result=result)
@expose("/<uuid>", methods=("GET",))
@protect()
@safe
@statsd_metrics
def get(self, uuid: str) -> FlaskResponse:
"""Get a single semantic layer.
---
get:
summary: Get a semantic layer by UUID
parameters:
- in: path
schema:
type: string
name: uuid
responses:
200:
description: A semantic layer
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
"""
layer = SemanticLayerDAO.find_by_uuid(uuid)
if not layer:
return self.response_404()
return self.response(200, result=_serialize_layer(layer))

View File

@@ -20,3 +20,18 @@ from marshmallow import fields, Schema
class SemanticViewPutSchema(Schema):
description = fields.String(allow_none=True)
cache_timeout = fields.Integer(allow_none=True)
class SemanticLayerPostSchema(Schema):
name = fields.String(required=True)
description = fields.String(allow_none=True)
type = fields.String(required=True)
configuration = fields.Dict(required=True)
cache_timeout = fields.Integer(allow_none=True)
class SemanticLayerPutSchema(Schema):
name = fields.String()
description = fields.String(allow_none=True)
configuration = fields.Dict()
cache_timeout = fields.Integer(allow_none=True)

View File

@@ -0,0 +1,149 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest.mock import MagicMock
import pytest
from pytest_mock import MockerFixture
from superset.commands.semantic_layer.create import CreateSemanticLayerCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerCreateFailedError,
SemanticLayerInvalidError,
)
def test_create_semantic_layer_success(mocker: MockerFixture) -> None:
"""Test successful creation of a semantic layer."""
new_model = MagicMock()
dao = mocker.patch(
"superset.commands.semantic_layer.create.SemanticLayerDAO",
)
dao.validate_uniqueness.return_value = True
dao.create.return_value = new_model
mock_cls = MagicMock()
mocker.patch.dict(
"superset.commands.semantic_layer.create.registry",
{"snowflake": mock_cls},
)
data = {
"name": "My Layer",
"type": "snowflake",
"configuration": {"account": "test"},
}
result = CreateSemanticLayerCommand(data).run()
assert result == new_model
expected = {**data, "configuration": '{"account": "test"}'}
dao.create.assert_called_once_with(attributes=expected)
mock_cls.from_configuration.assert_called_once_with({"account": "test"})
def test_create_semantic_layer_unknown_type(mocker: MockerFixture) -> None:
"""Test that SemanticLayerInvalidError is raised for unknown type."""
mocker.patch(
"superset.commands.semantic_layer.create.SemanticLayerDAO",
)
mocker.patch.dict(
"superset.commands.semantic_layer.create.registry",
{},
clear=True,
)
data = {
"name": "My Layer",
"type": "nonexistent",
"configuration": {},
}
with pytest.raises(SemanticLayerInvalidError):
CreateSemanticLayerCommand(data).run()
def test_create_semantic_layer_duplicate_name(mocker: MockerFixture) -> None:
"""Test that SemanticLayerInvalidError is raised for duplicate names."""
dao = mocker.patch(
"superset.commands.semantic_layer.create.SemanticLayerDAO",
)
dao.validate_uniqueness.return_value = False
mocker.patch.dict(
"superset.commands.semantic_layer.create.registry",
{"snowflake": MagicMock()},
)
data = {
"name": "Duplicate",
"type": "snowflake",
"configuration": {},
}
with pytest.raises(SemanticLayerInvalidError):
CreateSemanticLayerCommand(data).run()
def test_create_semantic_layer_invalid_configuration(
mocker: MockerFixture,
) -> None:
"""Test that invalid configuration is caught by the @transaction decorator."""
dao = mocker.patch(
"superset.commands.semantic_layer.create.SemanticLayerDAO",
)
dao.validate_uniqueness.return_value = True
mock_cls = MagicMock()
mock_cls.from_configuration.side_effect = ValueError("bad config")
mocker.patch.dict(
"superset.commands.semantic_layer.create.registry",
{"snowflake": mock_cls},
)
data = {
"name": "My Layer",
"type": "snowflake",
"configuration": {"bad": "data"},
}
with pytest.raises(SemanticLayerCreateFailedError):
CreateSemanticLayerCommand(data).run()
def test_create_semantic_layer_copies_data(mocker: MockerFixture) -> None:
"""Test that the command copies input data and does not mutate it."""
dao = mocker.patch(
"superset.commands.semantic_layer.create.SemanticLayerDAO",
)
dao.validate_uniqueness.return_value = True
dao.create.return_value = MagicMock()
mocker.patch.dict(
"superset.commands.semantic_layer.create.registry",
{"snowflake": MagicMock()},
)
original_data = {
"name": "Original",
"type": "snowflake",
"configuration": {"account": "test"},
}
CreateSemanticLayerCommand(original_data).run()
assert original_data == {
"name": "Original",
"type": "snowflake",
"configuration": {"account": "test"},
}

View File

@@ -0,0 +1,50 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest.mock import MagicMock
import pytest
from pytest_mock import MockerFixture
from superset.commands.semantic_layer.delete import DeleteSemanticLayerCommand
from superset.commands.semantic_layer.exceptions import SemanticLayerNotFoundError
def test_delete_semantic_layer_success(mocker: MockerFixture) -> None:
"""Test successful deletion of a semantic layer."""
mock_model = MagicMock()
dao = mocker.patch(
"superset.commands.semantic_layer.delete.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = mock_model
DeleteSemanticLayerCommand("some-uuid").run()
dao.find_by_uuid.assert_called_once_with("some-uuid")
dao.delete.assert_called_once_with([mock_model])
def test_delete_semantic_layer_not_found(mocker: MockerFixture) -> None:
"""Test that SemanticLayerNotFoundError is raised when model is missing."""
dao = mocker.patch(
"superset.commands.semantic_layer.delete.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = None
with pytest.raises(SemanticLayerNotFoundError):
DeleteSemanticLayerCommand("missing-uuid").run()

View File

@@ -16,6 +16,12 @@
# under the License.
from superset.commands.semantic_layer.exceptions import (
SemanticLayerCreateFailedError,
SemanticLayerDeleteFailedError,
SemanticLayerForbiddenError,
SemanticLayerInvalidError,
SemanticLayerNotFoundError,
SemanticLayerUpdateFailedError,
SemanticViewForbiddenError,
SemanticViewInvalidError,
SemanticViewNotFoundError,
@@ -46,3 +52,40 @@ def test_semantic_view_update_failed_error() -> None:
"""Test SemanticViewUpdateFailedError has correct message."""
error = SemanticViewUpdateFailedError()
assert str(error.message) == "Semantic view could not be updated."
def test_semantic_layer_not_found_error() -> None:
"""Test SemanticLayerNotFoundError has correct status and message."""
error = SemanticLayerNotFoundError()
assert error.status == 404
assert str(error.message) == "Semantic layer does not exist"
def test_semantic_layer_forbidden_error() -> None:
"""Test SemanticLayerForbiddenError has correct message."""
error = SemanticLayerForbiddenError()
assert str(error.message) == "Changing this semantic layer is forbidden"
def test_semantic_layer_invalid_error() -> None:
"""Test SemanticLayerInvalidError has correct message."""
error = SemanticLayerInvalidError()
assert str(error.message) == "Semantic layer parameters are invalid."
def test_semantic_layer_create_failed_error() -> None:
"""Test SemanticLayerCreateFailedError has correct message."""
error = SemanticLayerCreateFailedError()
assert str(error.message) == "Semantic layer could not be created."
def test_semantic_layer_update_failed_error() -> None:
"""Test SemanticLayerUpdateFailedError has correct message."""
error = SemanticLayerUpdateFailedError()
assert str(error.message) == "Semantic layer could not be updated."
def test_semantic_layer_delete_failed_error() -> None:
"""Test SemanticLayerDeleteFailedError has correct message."""
error = SemanticLayerDeleteFailedError()
assert str(error.message) == "Semantic layer could not be deleted."

View File

@@ -21,10 +21,15 @@ import pytest
from pytest_mock import MockerFixture
from superset.commands.semantic_layer.exceptions import (
SemanticLayerInvalidError,
SemanticLayerNotFoundError,
SemanticViewForbiddenError,
SemanticViewNotFoundError,
)
from superset.commands.semantic_layer.update import UpdateSemanticViewCommand
from superset.commands.semantic_layer.update import (
UpdateSemanticLayerCommand,
UpdateSemanticViewCommand,
)
from superset.exceptions import SupersetSecurityException
@@ -106,6 +111,116 @@ def test_update_semantic_view_copies_data(mocker: MockerFixture) -> None:
assert original_data == {"description": "Original"}
# =============================================================================
# UpdateSemanticLayerCommand tests
# =============================================================================
def test_update_semantic_layer_success(mocker: MockerFixture) -> None:
"""Test successful update of a semantic layer."""
mock_model = MagicMock()
mock_model.type = "snowflake"
dao = mocker.patch(
"superset.commands.semantic_layer.update.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = mock_model
dao.update.return_value = mock_model
data = {"name": "Updated", "description": "New desc"}
result = UpdateSemanticLayerCommand("some-uuid", data).run()
assert result == mock_model
dao.find_by_uuid.assert_called_once_with("some-uuid")
dao.update.assert_called_once_with(mock_model, attributes=data)
def test_update_semantic_layer_not_found(mocker: MockerFixture) -> None:
"""Test that SemanticLayerNotFoundError is raised when model is missing."""
dao = mocker.patch(
"superset.commands.semantic_layer.update.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = None
with pytest.raises(SemanticLayerNotFoundError):
UpdateSemanticLayerCommand("missing-uuid", {"name": "test"}).run()
def test_update_semantic_layer_duplicate_name(mocker: MockerFixture) -> None:
"""Test that SemanticLayerInvalidError is raised for duplicate names."""
mock_model = MagicMock()
mock_model.type = "snowflake"
dao = mocker.patch(
"superset.commands.semantic_layer.update.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = mock_model
dao.validate_update_uniqueness.return_value = False
with pytest.raises(SemanticLayerInvalidError):
UpdateSemanticLayerCommand("some-uuid", {"name": "Duplicate"}).run()
def test_update_semantic_layer_validates_configuration(
mocker: MockerFixture,
) -> None:
"""Test that configuration is validated against the plugin."""
mock_model = MagicMock()
mock_model.type = "snowflake"
dao = mocker.patch(
"superset.commands.semantic_layer.update.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = mock_model
dao.update.return_value = mock_model
mock_cls = MagicMock()
mocker.patch.dict(
"superset.commands.semantic_layer.update.registry",
{"snowflake": mock_cls},
)
config = {"account": "test"}
UpdateSemanticLayerCommand("some-uuid", {"configuration": config}).run()
mock_cls.from_configuration.assert_called_once_with(config)
def test_update_semantic_layer_skips_name_check_when_no_name(
mocker: MockerFixture,
) -> None:
"""Test that name uniqueness is not checked when name is not provided."""
mock_model = MagicMock()
mock_model.type = "snowflake"
dao = mocker.patch(
"superset.commands.semantic_layer.update.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = mock_model
dao.update.return_value = mock_model
UpdateSemanticLayerCommand("some-uuid", {"description": "Updated"}).run()
dao.validate_update_uniqueness.assert_not_called()
def test_update_semantic_layer_copies_data(mocker: MockerFixture) -> None:
"""Test that the command copies input data and does not mutate it."""
mock_model = MagicMock()
mock_model.type = "snowflake"
dao = mocker.patch(
"superset.commands.semantic_layer.update.SemanticLayerDAO",
)
dao.find_by_uuid.return_value = mock_model
dao.update.return_value = mock_model
original_data = {"description": "Original"}
UpdateSemanticLayerCommand("some-uuid", original_data).run()
assert original_data == {"description": "Original"}
def _make_view_model(
uuid: str = "view-uuid-1",
name: str = "my_view",

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,11 @@
import pytest
from marshmallow import ValidationError
from superset.semantic_layers.schemas import SemanticViewPutSchema
from superset.semantic_layers.schemas import (
SemanticLayerPostSchema,
SemanticLayerPutSchema,
SemanticViewPutSchema,
)
def test_semantic_view_put_schema_both_fields() -> None:
@@ -77,3 +81,128 @@ def test_semantic_view_put_schema_unknown_field() -> None:
with pytest.raises(ValidationError) as exc_info:
schema.load({"unknown_field": "value"})
assert "unknown_field" in exc_info.value.messages
# =============================================================================
# SemanticLayerPostSchema tests
# =============================================================================
def test_post_schema_all_fields() -> None:
"""Test loading all fields."""
schema = SemanticLayerPostSchema()
result = schema.load(
{
"name": "My Layer",
"description": "A layer",
"type": "snowflake",
"configuration": {"account": "test"},
"cache_timeout": 300,
}
)
assert result["name"] == "My Layer"
assert result["type"] == "snowflake"
assert result["configuration"] == {"account": "test"}
assert result["cache_timeout"] == 300
def test_post_schema_required_fields_only() -> None:
"""Test loading with only required fields."""
schema = SemanticLayerPostSchema()
result = schema.load(
{
"name": "My Layer",
"type": "snowflake",
"configuration": {"account": "test"},
}
)
assert result["name"] == "My Layer"
assert "description" not in result
assert "cache_timeout" not in result
def test_post_schema_missing_name() -> None:
"""Test that missing name raises ValidationError."""
schema = SemanticLayerPostSchema()
with pytest.raises(ValidationError) as exc_info:
schema.load({"type": "snowflake", "configuration": {}})
assert "name" in exc_info.value.messages
def test_post_schema_missing_type() -> None:
"""Test that missing type raises ValidationError."""
schema = SemanticLayerPostSchema()
with pytest.raises(ValidationError) as exc_info:
schema.load({"name": "My Layer", "configuration": {}})
assert "type" in exc_info.value.messages
def test_post_schema_missing_configuration() -> None:
"""Test that missing configuration raises ValidationError."""
schema = SemanticLayerPostSchema()
with pytest.raises(ValidationError) as exc_info:
schema.load({"name": "My Layer", "type": "snowflake"})
assert "configuration" in exc_info.value.messages
def test_post_schema_null_description() -> None:
"""Test that description accepts None."""
schema = SemanticLayerPostSchema()
result = schema.load(
{
"name": "My Layer",
"type": "snowflake",
"configuration": {},
"description": None,
}
)
assert result["description"] is None
# =============================================================================
# SemanticLayerPutSchema tests
# =============================================================================
def test_put_schema_all_fields() -> None:
"""Test loading all fields."""
schema = SemanticLayerPutSchema()
result = schema.load(
{
"name": "Updated",
"description": "New desc",
"configuration": {"account": "new"},
"cache_timeout": 600,
}
)
assert result["name"] == "Updated"
assert result["configuration"] == {"account": "new"}
def test_put_schema_empty() -> None:
"""Test loading empty payload."""
schema = SemanticLayerPutSchema()
result = schema.load({})
assert result == {}
def test_put_schema_name_only() -> None:
"""Test loading with only name."""
schema = SemanticLayerPutSchema()
result = schema.load({"name": "New Name"})
assert result == {"name": "New Name"}
def test_put_schema_configuration_only() -> None:
"""Test loading with only configuration."""
schema = SemanticLayerPutSchema()
result = schema.load({"configuration": {"key": "value"}})
assert result == {"configuration": {"key": "value"}}
def test_put_schema_unknown_field() -> None:
"""Test that unknown fields raise ValidationError."""
schema = SemanticLayerPutSchema()
with pytest.raises(ValidationError) as exc_info:
schema.load({"unknown_field": "value"})
assert "unknown_field" in exc_info.value.messages