Compare commits

..

2 Commits

Author SHA1 Message Date
Mehmet Salih Yavuz
65aea59df1 fix(DynamicEditableTitle): keep effect deps as [title]
Including isEditing in the dep array re-ran the sync effect when
isEditing flipped from true to false on blur — which clobbered the
just-committed value if the parent's title prop hadn't propagated yet
(e.g. parents that don't update local state in the onSave callback,
which is also the pattern in Header.test.tsx).

Read isEditing via closure instead so the effect only fires when the
title prop actually changes. handleBlur still syncs currentTitle on
commit, so the consistency invariant is preserved.
2026-05-04 20:22:53 +03:00
Mehmet Salih Yavuz
4bb6c8e932 fix(DynamicEditableTitle): preserve in-flight edits when title prop changes
The effect at index.tsx:88 called setCurrentTitle(title) whenever the
title prop changed — including mid-edit, which clobbered unsaved typing.
Gate it on !isEditing so a parent re-render with a new title doesn't
overwrite what the user has typed. handleBlur already syncs currentTitle
on commit, so the consistency invariant is preserved.

Also fix the existing 'prop changes mid-edit do not clobber' regression
test, which rerendered Harness with the same initialTitle="Foo" — the
prop the component received never actually changed, so the test passed
even on broken code. Rerender DynamicEditableTitle directly with a
different title prop so the sync effect actually runs.
2026-05-04 19:42:14 +03:00
195 changed files with 1055 additions and 22752 deletions

View File

@@ -54,7 +54,6 @@ jobs:
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
pytest --durations-min=0.5 --cov=superset/semantic_layers/ ./tests/unit_tests/semantic_layers/ --cache-clear --cov-fail-under=100
- name: Upload code coverage
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
with:

View File

@@ -46,13 +46,6 @@ The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitud
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
### Combined datasource list endpoint
Added a new combined datasource list endpoint at `GET /api/v1/datasource/` to serve datasets and semantic views in one response.
- The endpoint is available to users with at least one of `can_read` on `Dataset` or `SemanticView`.
- Semantic views are included only when the `SEMANTIC_LAYERS` feature flag is enabled.
- The endpoint enforces strict `order_column` validation and returns `400` for invalid sort columns.
### ClickHouse minimum driver version bump
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.

View File

@@ -105,13 +105,7 @@ class CeleryConfig:
CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {
"ALERT_REPORTS": True,
"DATASET_FOLDERS": True,
"ENABLE_EXTENSIONS": True,
"SEMANTIC_LAYERS": True,
}
EXTENSIONS_PATH = "/app/docker/extensions"
FEATURE_FLAGS = {"ALERT_REPORTS": True, "DATASET_FOLDERS": True}
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
# The base URL for the email report hyperlinks.

View File

@@ -224,52 +224,3 @@ async def analysis_guide(ctx: Context) -> str:
```
See [MCP Integration](./mcp) for implementation details.
### Semantic Layers
Extensions can register custom semantic layer implementations that allow Superset to connect to external data modeling frameworks. Each semantic layer defines how to authenticate, discover semantic views (tables/metrics/dimensions), and execute queries against the external system.
```python
from superset_core.semantic_layers.decorators import semantic_layer
from superset_core.semantic_layers.layer import SemanticLayer
from my_extension.config import MyConfig
from my_extension.view import MySemanticView
@semantic_layer(
id="my_platform",
name="My Data Platform",
description="Connect to My Data Platform's semantic layer",
)
class MySemanticLayer(SemanticLayer[MyConfig, MySemanticView]):
configuration_class = MyConfig
@classmethod
def from_configuration(cls, configuration: dict) -> "MySemanticLayer":
config = MyConfig.model_validate(configuration)
return cls(config)
@classmethod
def get_configuration_schema(cls, configuration=None) -> dict:
return MyConfig.model_json_schema()
@classmethod
def get_runtime_schema(cls, configuration=None, runtime_data=None) -> dict:
return {"type": "object", "properties": {}}
def get_semantic_views(self, runtime_configuration: dict) -> set[MySemanticView]:
# Return available views from the external platform
...
def get_semantic_view(self, name: str, additional_configuration: dict) -> MySemanticView:
# Return a specific view by name
...
```
**Note**: The `@semantic_layer` decorator automatically detects context and applies appropriate ID prefixing:
- **Extension context**: ID prefixed as `extensions.{publisher}.{name}.{id}`
- **Host context**: Original ID used as-is
The decorator registers the class in the semantic layers registry, making it available in the UI for users to create connections. The `configuration_class` should be a Pydantic model that defines the fields needed to connect (credentials, project, database, etc.). Superset uses the model's JSON schema to render the configuration form dynamically.

View File

@@ -67,12 +67,12 @@
"@storybook/preview-api": "^8.6.18",
"@storybook/theming": "^8.6.15",
"@superset-ui/core": "^0.20.4",
"@swc/core": "^1.15.33",
"@swc/core": "^1.15.32",
"antd": "^6.3.7",
"baseline-browser-mapping": "^2.10.27",
"baseline-browser-mapping": "^2.10.24",
"caniuse-lite": "^1.0.30001791",
"docusaurus-plugin-openapi-docs": "^5.0.2",
"docusaurus-theme-openapi-docs": "^5.0.2",
"docusaurus-plugin-openapi-docs": "^5.0.1",
"docusaurus-theme-openapi-docs": "^5.0.1",
"js-yaml": "^4.1.1",
"js-yaml-loader": "^1.2.2",
"json-bigint": "^1.0.0",
@@ -103,7 +103,7 @@
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-prettier": "^5.5.5",
"eslint-plugin-react": "^7.37.5",
"globals": "^17.6.0",
"globals": "^17.5.0",
"prettier": "^3.8.3",
"typescript": "~6.0.3",
"typescript-eslint": "^8.59.1",

View File

@@ -81,12 +81,6 @@
"lifecycle": "development",
"description": "Expand nested types in Presto into extra columns/arrays. Experimental, doesn't work with all nested types."
},
{
"name": "SEMANTIC_LAYERS",
"default": false,
"lifecycle": "development",
"description": "Enable semantic layers and show semantic views alongside datasets"
},
{
"name": "TABLE_V2_TIME_COMPARISON_ENABLED",
"default": false,

View File

@@ -4239,86 +4239,86 @@
dependencies:
apg-lite "^1.0.4"
"@swc/core-darwin-arm64@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.33.tgz#d84134fb80417d41128739f0b9014542e3ed9dd3"
integrity sha512-N+L0uXhuO7FIfzqwgxmzv0zIpV0qEp8wPX3QQs2p4atjMoywup2JTeDlXPw+z9pWJGCae3JjM+tZ6myclI+2gA==
"@swc/core-darwin-arm64@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-darwin-arm64/-/core-darwin-arm64-1.15.32.tgz#3592714588fdbb8b7a869f81ff96c7236fcf1c09"
integrity sha512-/YWMvJDPu+AAwuUsM2G+DNQ/7zhodURGzdQyewEqcvgklAdDHs3LwQmLLnyn6SJl8DT8UOxkbzK+D1PmPeelRg==
"@swc/core-darwin-x64@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.15.33.tgz#0badb9834071f1c6005986571d4a96359c1d7cd0"
integrity sha512-/Il4QHSOhV4FekbsDtkrNmKbsX26oSysvgrRswa/RYOHXAkwXDbB4jaeKq6PsJLSPkzJ2KzQ061gtBnk0vNHfA==
"@swc/core-darwin-x64@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-darwin-x64/-/core-darwin-x64-1.15.32.tgz#965044b632933146e319862ea7e4b717eb9f83dd"
integrity sha512-KOTXJXdAhWL+hZ77MYP3z+4pcMFaQhQ74yqyN1uz093q0YnbxpqMtYpPISbYvMHzVRNNx5kN+9RZAXEaadhWVA==
"@swc/core-linux-arm-gnueabihf@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.33.tgz#b7577a825b59d98b6a9a5c991d842046efe1c34a"
integrity sha512-C64hBnBxq4viOPQ8hlx+2lJ23bzZBGnjw7ryALmS+0Q3zHmwO8lw1/DArLENw4Q18/0w5wdEO1k3m1wWNtKGqQ==
"@swc/core-linux-arm-gnueabihf@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.15.32.tgz#70e70ad6ad961055f4a9be9e4947e455c18239e6"
integrity sha512-oOoxLweljlc0A4X8ybsgxV7cVaYTwBOg2iMDJcFR3Sr48C+lsv9VzSmqdK/IVIXF4W4GjLc3VqTAdSMXlfVLuQ==
"@swc/core-linux-arm64-gnu@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.33.tgz#304c48321494a18c67b2913c273b08674ee70d8c"
integrity sha512-TRJfnJbX3jqpxRDRoieMzRiCBS5jOmXNb3iQXmcgjFEHKLnAgK1RZRU8Cq1MsPqO4jAJp/ld1G4O3fXuxv85uw==
"@swc/core-linux-arm64-gnu@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.15.32.tgz#7b82e2cc5995e8f919e29f6ce702285f5f1c3ad1"
integrity sha512-oDzEkdl6D6BAWdMtU5KGO7y3HR5fJcvByNLyEk9+ugj8nP5Ovb7P4kBcStBXc4MPExFGQryehiINMlmY8HlclA==
"@swc/core-linux-arm64-musl@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.33.tgz#d116cbc04ccb4f4ee810da6bca79d4423605dbcd"
integrity sha512-il7tYM+CpUNzieQbwAjFT1P8zqAhmGWNAGhQZBnxurXZ0aNn+5nqYFTEUKNZl7QibtT0uQXzTZrNGHCIj6Y1Og==
"@swc/core-linux-arm64-musl@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.15.32.tgz#16c581b9f859b0175a8bab5cbf694bef7dbf95b8"
integrity sha512-omcqjoZP/b8D8PuczVoRwJieC6ibj7qIxTftNYokz4/aSmKFHvsd7nIFfPk5ZvtzncbH4AY7+Dkr/Lp2gWxYeA==
"@swc/core-linux-ppc64-gnu@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-ppc64-gnu/-/core-linux-ppc64-gnu-1.15.33.tgz#f5354dba36db9414305bab344c817d57b8b457c2"
integrity sha512-ZtNBwN0Z7CFj9Il0FcPaKdjgP7URyKu/3RfH46vq+0paOBqLj4NYldD6Qo//Duif/7IOtAraUfDOmp0PLAufog==
"@swc/core-linux-ppc64-gnu@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-ppc64-gnu/-/core-linux-ppc64-gnu-1.15.32.tgz#420f7744dae327c8e4917c87ced5c1b3e0a38f96"
integrity sha512-KGkTMyz/Tbn3PBNu0AVZ4GTDFKnICrYcTiNPZq8DrvK42pnFsf3GNDrIG9E5AtQlTmC0YigkWKmu0eMcfTrmgA==
"@swc/core-linux-s390x-gnu@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-s390x-gnu/-/core-linux-s390x-gnu-1.15.33.tgz#016df9f4c9d7fd65b85ca9c558c5aec341f06da0"
integrity sha512-De1IyajoOmhOYYjw/lx66bKlyDpHZTueqwpDrWgf5O7T6d1ODeJJO9/OqMBmrBQc5C+dNnlmIufHsp4QVCWufA==
"@swc/core-linux-s390x-gnu@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-s390x-gnu/-/core-linux-s390x-gnu-1.15.32.tgz#9b563a3a73c544f29454e53894bfe533b9a27ffe"
integrity sha512-G3Aa4tVS/3OGZBkoNIwUF9F6RAy+Osb4GOlo62SinLmDiErz/ykmM7KH0wkz6l9kM8jJq1HyAM6atJTUEbBk7g==
"@swc/core-linux-x64-gnu@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.33.tgz#49f36558ede072e71999aa37f123367daed2a662"
integrity sha512-mGTH0YxmUN+x6vRN/I6NOk5X0ogNktkwPnJ94IMvR7QjhRDwL0O8RXEDhyUM0YtwWrryBOqaJQBX4zruxEPRGw==
"@swc/core-linux-x64-gnu@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.15.32.tgz#615c7bcc1890379dffcc74b6780e2277e65f4b61"
integrity sha512-ERsjfGcj6CBmj3vJnGDO8m8rTvw6RqMcWo1dogOtNx3/+/0+NNpJiXDobJrr1GwInI/BHAEkvSFIH6d2LqPcUQ==
"@swc/core-linux-x64-musl@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.33.tgz#b096665f5cfeee2612325f301da5c1590b10d8f3"
integrity sha512-hj628ZkSEJf6zMf5VMbYrG2O6QqyTIp2qwY6VlCjvIa9lAEZ5c2lfPblCLVGYubTeLJDxadLB/CxqQYOQABeEQ==
"@swc/core-linux-x64-musl@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.15.32.tgz#038604d25bdebb1d1ad780d827a44654fa4b5bdd"
integrity sha512-N4Ggahe/8SUbTX50P6EdhbW9YWcgbZVb52R4cq6MK+zsoMjRq7rGvV5ztA05QnbaCYqMYx8rTY7KAIA3Crdo4Q==
"@swc/core-win32-arm64-msvc@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.33.tgz#f3101263a0dbaa173ec47638c9719d0b89838bd2"
integrity sha512-GV2oohtN2/5+KSccl86VULu3aT+LrISC8uzgSq0FRnikpD+Zwc+sBlXmoKQ+Db6jI57ITUOIB8jRkdGMABC29g==
"@swc/core-win32-arm64-msvc@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.15.32.tgz#c82006e6ef92a998e96d2160b1657f5334af4d54"
integrity sha512-01yN0o9jvo8xBTP12aPK2wW8b41jmOlGbDDlAnoynotc4pO6xA0zby9f1z6j++qXDpGBttLySq1omgVrlQKYcw==
"@swc/core-win32-ia32-msvc@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.33.tgz#eb981ef5613d42c9220559bdb0c8bc58cf6c3eb9"
integrity sha512-gtyvzSNR8DHKfFEA2uqb8Ld1myqi6uEg2jyeUq3ikn5ytYs7H8RpZYC8mdy4NXr8hfcdJfCLXPlYaqqfBXpoEQ==
"@swc/core-win32-ia32-msvc@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.15.32.tgz#e2ae1c95bd6599322bc6e9a82685b7537a193f7b"
integrity sha512-fLagI9XZYNpTcmlqAcp3KBtmj7E19WCmYD80Jxj1Kn5tGNa7yxNLd3NNdWxuZGUPl5iC0/KqZru7g08gF6Fsrw==
"@swc/core-win32-x64-msvc@1.15.33":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.33.tgz#a2fed9956933027ceb368857bac4bb4ee203d47c"
integrity sha512-d6fRqQSkJI+kmMEBWaDQ7TMl8+YjLYbwRUPZQ9DY0ORBJeTzOrG0twvfvlZ2xgw6jA0ScQKgfBm4vHLSLl5Hqg==
"@swc/core-win32-x64-msvc@1.15.32":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.15.32.tgz#2535c791821054072a511dee0d13e5de9c5cd29b"
integrity sha512-gbc2bQ/T2CiR+w0OvcVKwLOFAcPZBvmWmolbwpg1E8UrpeC03DGtyMUApOHNXNYWA3SHFrYXCQtosrcMza1YFg==
"@swc/core@^1.15.33", "@swc/core@^1.7.39":
version "1.15.33"
resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.15.33.tgz#2a6571c8aca961925f14beae52b3f43c18370fc6"
integrity sha512-jOlwnFV2xhuuZeAUILGFULeR6vDPfijEJ57evfocwznQldLU3w2cZ9bSDryY9ip+AsM3r1NJKzf47V2NXebkeQ==
"@swc/core@^1.15.32", "@swc/core@^1.7.39":
version "1.15.32"
resolved "https://registry.yarnpkg.com/@swc/core/-/core-1.15.32.tgz#2333d66f4b8e7c4fded087ead13c135ff84ab9d6"
integrity sha512-/eWL0n43D64QWEUHLtTE+jDqjkJhyidjkDhv6f0uJohOUAhywxQ9wXYp845DNNds0JpCdI4Uo0a9bl+vbXf+ew==
dependencies:
"@swc/counter" "^0.1.3"
"@swc/types" "^0.1.26"
optionalDependencies:
"@swc/core-darwin-arm64" "1.15.33"
"@swc/core-darwin-x64" "1.15.33"
"@swc/core-linux-arm-gnueabihf" "1.15.33"
"@swc/core-linux-arm64-gnu" "1.15.33"
"@swc/core-linux-arm64-musl" "1.15.33"
"@swc/core-linux-ppc64-gnu" "1.15.33"
"@swc/core-linux-s390x-gnu" "1.15.33"
"@swc/core-linux-x64-gnu" "1.15.33"
"@swc/core-linux-x64-musl" "1.15.33"
"@swc/core-win32-arm64-msvc" "1.15.33"
"@swc/core-win32-ia32-msvc" "1.15.33"
"@swc/core-win32-x64-msvc" "1.15.33"
"@swc/core-darwin-arm64" "1.15.32"
"@swc/core-darwin-x64" "1.15.32"
"@swc/core-linux-arm-gnueabihf" "1.15.32"
"@swc/core-linux-arm64-gnu" "1.15.32"
"@swc/core-linux-arm64-musl" "1.15.32"
"@swc/core-linux-ppc64-gnu" "1.15.32"
"@swc/core-linux-s390x-gnu" "1.15.32"
"@swc/core-linux-x64-gnu" "1.15.32"
"@swc/core-linux-x64-musl" "1.15.32"
"@swc/core-win32-arm64-msvc" "1.15.32"
"@swc/core-win32-ia32-msvc" "1.15.32"
"@swc/core-win32-x64-msvc" "1.15.32"
"@swc/counter@^0.1.3":
version "0.1.3"
@@ -5794,10 +5794,10 @@ base64-js@^1.3.1, base64-js@^1.5.1:
resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
baseline-browser-mapping@^2.10.27, baseline-browser-mapping@^2.9.0, baseline-browser-mapping@^2.9.19:
version "2.10.27"
resolved "https://registry.yarnpkg.com/baseline-browser-mapping/-/baseline-browser-mapping-2.10.27.tgz#fee941c2a0b42cdf83c6427e4c830b1d0bdab2c3"
integrity sha512-zEs/ufmZoUd7WftKpKyXaT6RFxpQ5Qm9xytKRHvJfxFV9DFJkZph9RvJ1LcOUi0Z1ZVijMte65JbILeV+8QQEA==
baseline-browser-mapping@^2.10.24, baseline-browser-mapping@^2.9.0, baseline-browser-mapping@^2.9.19:
version "2.10.24"
resolved "https://registry.yarnpkg.com/baseline-browser-mapping/-/baseline-browser-mapping-2.10.24.tgz#6dc320c7bf53859ec2bf55d54db6d2e5c078df16"
integrity sha512-I2NkZOOrj2XuguvWCK6OVh9GavsNjZjK908Rq3mIBK25+GD8vPX5w2WdxVqnQ7xx3SrZJiCiZFu+/Oz50oSYSA==
batch@0.6.1:
version "0.6.1"
@@ -6062,7 +6062,12 @@ chalk@^4.0.0, chalk@^4.1.2:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
chalk@^5.0.1, chalk@^5.2.0, chalk@^5.6.2:
chalk@^5.0.1, chalk@^5.2.0:
version "5.6.0"
resolved "https://registry.npmjs.org/chalk/-/chalk-5.6.0.tgz"
integrity sha512-46QrSQFyVSEyYAgQ22hQ+zDa60YHA4fBstHmtSApj1Y5vKtG27fWowW03jCk5KcbXEWPZUIR894aARCA/G1kfQ==
chalk@^5.6.2:
version "5.6.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.6.2.tgz#b1238b6e23ea337af71c7f8a295db5af0c158aea"
integrity sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==
@@ -7300,10 +7305,10 @@ doctrine@^2.1.0:
dependencies:
esutils "^2.0.2"
docusaurus-plugin-openapi-docs@^5.0.2:
version "5.0.2"
resolved "https://registry.yarnpkg.com/docusaurus-plugin-openapi-docs/-/docusaurus-plugin-openapi-docs-5.0.2.tgz#f00028621deb9179065fe7d6a541256692ef941b"
integrity sha512-WCC2m6PpylXZfNga+ScelTG0a7jUGtbB9+AmbR9lUj93FPryTs8VHTMJ3fKtO0senJTWgOU3MDvZw0v+mE3ztA==
docusaurus-plugin-openapi-docs@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/docusaurus-plugin-openapi-docs/-/docusaurus-plugin-openapi-docs-5.0.1.tgz#2fe62b58fc1af11e3d947edc2f0d60e04f1aa149"
integrity sha512-OVfoDovRdiS78DQYWmr2BjuOF2A6kVmJ43mgkQaAEZxASyHbUft4zUIhvfa7gqema6KNL9pVKejDievZdZ3wGQ==
dependencies:
"@apidevtools/json-schema-ref-parser" "^15.3.3"
"@redocly/openapi-core" "^2.25.2"
@@ -7321,10 +7326,10 @@ docusaurus-plugin-openapi-docs@^5.0.2:
swagger2openapi "^7.0.8"
xml-formatter "^3.6.6"
docusaurus-theme-openapi-docs@^5.0.2:
version "5.0.2"
resolved "https://registry.yarnpkg.com/docusaurus-theme-openapi-docs/-/docusaurus-theme-openapi-docs-5.0.2.tgz#2ab6f6b04fc2e494e24971d31432a9187c84a2fe"
integrity sha512-BD6WhbunR6kXqtoUUDlhxO4HlCNM2nYENGr/TbiTEknkgXYKQz+FEIhY4Hyz5GSLpuhPih0CDuNl7Xkfpcz0Yw==
docusaurus-theme-openapi-docs@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/docusaurus-theme-openapi-docs/-/docusaurus-theme-openapi-docs-5.0.1.tgz#a2c2c91346b6238f6d7862752cdb02611fb5396f"
integrity sha512-bVeb7hOqog9LKVrJzYXdNJ7/0N22lk0VE22QK+naAn5GuAvYo41JmpXW9hqLIPkEp2UbexTHoPW9SYVdUsyvvw==
dependencies:
"@hookform/error-message" "^2.0.1"
"@reduxjs/toolkit" "^2.8.2"
@@ -8469,10 +8474,10 @@ globals@^15.14.0:
resolved "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz"
integrity sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==
globals@^17.6.0:
version "17.6.0"
resolved "https://registry.yarnpkg.com/globals/-/globals-17.6.0.tgz#0f0be018d5cca8690e6375ead1f65c4bb96191fc"
integrity sha512-sepffkT8stwnIYbsMBpoCHJuJM5l98FUF2AnE07hfvE0m/qp3R586hw4jF4uadbhvg1ooIdzuu7CsfD2jzCaNA==
globals@^17.5.0:
version "17.5.0"
resolved "https://registry.yarnpkg.com/globals/-/globals-17.5.0.tgz#a82c641d898f8dfbe0e81f66fdff7d0de43f88c6"
integrity sha512-qoV+HK2yFl/366t2/Cb3+xxPUo5BuMynomoDmiaZBIdbs+0pYbjfZU+twLhGKp4uCZ/+NbtpVepH5bGCxRyy2g==
globalthis@^1.0.4:
version "1.0.4"

View File

@@ -29,7 +29,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.15.5 # See [README](https://github.com/apache/superset/blob/master/helm/superset/README.md#versioning) for version details.
version: 0.15.4 # See [README](https://github.com/apache/superset/blob/master/helm/superset/README.md#versioning) for version details.
dependencies:
- name: postgresql
version: 16.7.27

View File

@@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs
# superset
![Version: 0.15.5](https://img.shields.io/badge/Version-0.15.5-informational?style=flat-square)
![Version: 0.15.4](https://img.shields.io/badge/Version-0.15.4-informational?style=flat-square)
Apache Superset is a modern, enterprise-ready business intelligence web application

View File

@@ -844,8 +844,6 @@ postgresql:
database: superset
image:
registry: docker.io
repository: bitnamilegacy/postgresql
tag: "14.17.0-debian-12-r3"
## PostgreSQL Primary parameters
@@ -920,11 +918,6 @@ redis:
accessModes:
- ReadWriteOnce
image:
registry: docker.io
repository: bitnamilegacy/redis
tag: 7.0.10-debian-11-r4
nodeSelector: {}
tolerations: []

View File

@@ -95,7 +95,7 @@ dependencies = [
"redis>=5.0.0, <6.0",
"rison>=2.0.0, <3.0",
"selenium>=4.14.0, <5.0",
"shillelagh[gsheetsapi]>=1.4.4, <2.0",
"shillelagh[gsheetsapi]>=1.4.3, <2.0",
"sshtunnel>=0.4.0, <0.5",
"simplejson>=3.15.0",
"slack_sdk>=3.19.0, <4",
@@ -114,7 +114,7 @@ dependencies = [
[project.optional-dependencies]
athena = ["pyathena[pandas]>=2, <4"]
athena = ["pyathena[pandas]>=2, <3"]
aurora-data-api = ["preset-sqlalchemy-aurora-data-api>=0.2.8,<0.3"]
bigquery = [
"pandas-gbq>=0.19.1",
@@ -135,7 +135,7 @@ databricks = [
"databricks-sqlalchemy==1.0.5",
]
db2 = ["ibm-db-sa>0.3.8, <=0.4.0"]
denodo = ["denodo-sqlalchemy>=1.0.6,<2.1.0"]
denodo = ["denodo-sqlalchemy~=1.0.6"]
dremio = ["sqlalchemy-dremio>=1.2.1, <4"]
drill = ["sqlalchemy-drill>=1.1.4, <2"]
druid = ["pydruid>=0.6.5,<0.7"]
@@ -149,7 +149,7 @@ fastmcp = ["fastmcp>=3.2.4,<4.0"]
firebird = ["sqlalchemy-firebird>=0.7.0, <0.8"]
firebolt = ["firebolt-sqlalchemy>=1.0.0, <2"]
gevent = ["gevent>=23.9.1"]
gsheets = ["shillelagh[gsheetsapi]>=1.4.4, <2"]
gsheets = ["shillelagh[gsheetsapi]>=1.4.3, <2"]
hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"]
hive = [
"pyhive[hive]>=0.6.5;python_version<'3.11'",
@@ -158,7 +158,7 @@ hive = [
"thrift>=0.14.1, <1.0.0",
"thrift_sasl>=0.4.3, < 1.0.0",
]
impala = ["impyla>0.16.2, <0.23"]
impala = ["impyla>0.16.2, <0.17"]
kusto = ["sqlalchemy-kusto>=3.0.0, <4"]
kylin = ["kylinpy>=2.8.1, <2.9"]
mssql = ["pymssql>=2.2.8, <3"]
@@ -171,7 +171,7 @@ ocient = [
"shapely",
"geojson",
]
oracle = ["cx-Oracle>8.0.0, <8.4"]
oracle = ["cx-Oracle>8.0.0, <8.1"]
parseable = ["sqlalchemy-parseable>=0.1.3,<0.2.0"]
pinot = ["pinotdb>=5.0.0, <6.0.0"]
playwright = ["playwright>=1.37.0, <2"]
@@ -181,7 +181,7 @@ trino = ["trino>=0.328.0"]
prophet = ["prophet>=1.1.6, <2"]
redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"]
risingwave = ["sqlalchemy-risingwave"]
shillelagh = ["shillelagh[all]>=1.4.4, <2"]
shillelagh = ["shillelagh[all]>=1.4.3, <2"]
singlestore = ["sqlalchemy-singlestoredb>=1.1.1, <2"]
snowflake = ["snowflake-sqlalchemy>=1.2.4, <2"]
sqlite = ["syntaqlite>=0.1.0"]
@@ -197,7 +197,7 @@ tdengine = [
]
teradata = ["teradatasql>=16.20.0.23"]
thumbnails = [] # deprecated, will be removed in 7.0
vertica = ["sqlalchemy-vertica-python>= 0.5.9, < 0.7"]
vertica = ["sqlalchemy-vertica-python>=0.5.9, < 0.6"]
netezza = ["nzalchemy>=11.0.2"]
starrocks = ["starrocks>=1.0.0"]
doris = ["pydoris>=1.0.0, <2.0.0"]
@@ -288,7 +288,6 @@ module = [
"superset.tags.filters",
"superset.commands.security.update",
"superset.commands.security.create",
"superset.semantic_layers.api",
]
warn_unused_ignores = false

View File

@@ -381,7 +381,7 @@ selenium==4.32.0
# via apache-superset (pyproject.toml)
setuptools==80.9.0
# via -r requirements/base.in
shillelagh==1.4.4
shillelagh==1.4.3
# via apache-superset (pyproject.toml)
simplejson==3.20.1
# via apache-superset (pyproject.toml)

View File

@@ -931,7 +931,7 @@ setuptools==80.9.0
# pydata-google-auth
# zope-event
# zope-interface
shillelagh==1.4.4
shillelagh==1.4.3
# via
# -c requirements/base-constraint.txt
# apache-superset

View File

@@ -43,8 +43,6 @@ classifiers = [
]
dependencies = [
"flask-appbuilder>=5.0.2,<6",
"isodate>=0.7.0",
"pyarrow>=16.0.0",
"pydantic>=2.8.0",
"sqlalchemy>=1.4.0,<2.0",
"sqlalchemy-utils>=0.38.0, <0.43", # expanding lowerbound to work with pydoris

View File

@@ -1,73 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from pydantic import BaseModel
def build_configuration_schema(
config_class: type[BaseModel],
configuration: BaseModel | None = None,
) -> dict[str, Any]:
"""
Build a JSON schema from a Pydantic configuration class.
Handles generic boilerplate that any semantic layer with dynamic fields needs:
- Reorders properties to match model field order (Pydantic sorts alphabetically)
- When ``configuration`` is None, sets ``enum: []`` on all ``x-dynamic`` properties
so the frontend renders them as empty dropdowns
Semantic layer implementations call this instead of
``model_json_schema()`` directly,
then only need to add their own dynamic population logic.
"""
schema = config_class.model_json_schema()
# Pydantic sorts properties alphabetically; restore model field order
field_order = [
field.alias or name for name, field in config_class.model_fields.items()
]
schema["properties"] = {
key: schema["properties"][key]
for key in field_order
if key in schema["properties"]
}
if configuration is None:
for prop_schema in schema["properties"].values():
if prop_schema.get("x-dynamic"):
prop_schema["enum"] = []
return schema
def check_dependencies(
prop_schema: dict[str, Any],
configuration: BaseModel,
) -> bool:
"""
Check whether a dynamic property's dependencies are satisfied.
Reads the ``x-dependsOn`` list from the property schema and returns ``True``
when every referenced attribute on ``configuration`` is truthy.
"""
dependencies = prop_schema.get("x-dependsOn", [])
return all(getattr(configuration, dep, None) for dep in dependencies)

View File

@@ -1,169 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Semantic layer DAO interfaces for superset-core.
Provides abstract DAO classes for semantic layers and views that define the
interface contract. Host implementations replace these with concrete classes
backed by SQLAlchemy during initialization.
Usage:
from superset_core.semantic_layers.daos import (
AbstractSemanticLayerDAO,
AbstractSemanticViewDAO,
)
"""
from __future__ import annotations
from abc import abstractmethod
from typing import Any, ClassVar
from superset_core.common.daos import BaseDAO
from superset_core.semantic_layers.models import SemanticLayerModel, SemanticViewModel
class AbstractSemanticLayerDAO(BaseDAO[SemanticLayerModel]):
"""
Abstract DAO interface for SemanticLayer.
Host implementations will replace this class during initialization
with a concrete DAO providing actual database access.
"""
model_cls: ClassVar[type[Any] | None] = None
base_filter = None
id_column_name = "uuid"
uuid_column_name = "uuid"
@classmethod
@abstractmethod
def validate_uniqueness(cls, name: str) -> bool:
"""
Validate that a semantic layer name is unique.
:param name: Semantic layer name to validate
:return: True if the name is unique, False otherwise
"""
...
@classmethod
@abstractmethod
def validate_update_uniqueness(cls, layer_uuid: str, name: str) -> bool:
"""
Validate that a semantic layer name is unique for an update operation,
excluding the layer being updated.
:param layer_uuid: UUID of the semantic layer being updated
:param name: New name to validate
:return: True if the name is unique, False otherwise
"""
...
@classmethod
@abstractmethod
def find_by_name(cls, name: str) -> SemanticLayerModel | None:
"""
Find a semantic layer by name.
:param name: Semantic layer name
:return: SemanticLayerModel instance or None
"""
...
@classmethod
@abstractmethod
def get_semantic_views(cls, layer_uuid: str) -> list[SemanticViewModel]:
"""
Get all semantic views associated with a semantic layer.
:param layer_uuid: UUID of the semantic layer
:return: List of SemanticViewModel instances
"""
...
class AbstractSemanticViewDAO(BaseDAO[SemanticViewModel]):
"""
Abstract DAO interface for SemanticView.
Host implementations will replace this class during initialization
with a concrete DAO providing actual database access.
"""
model_cls: ClassVar[type[Any] | None] = None
base_filter = None
id_column_name = "id"
uuid_column_name = "uuid"
@classmethod
@abstractmethod
def validate_uniqueness(
cls,
name: str,
layer_uuid: str,
configuration: dict[str, Any],
) -> bool:
"""
Validate that a semantic view is unique within a semantic layer.
Uniqueness is determined by the combination of name, layer UUID, and
configuration.
:param name: View name
:param layer_uuid: UUID of the parent semantic layer
:param configuration: Configuration dict to compare
:return: True if unique, False otherwise
"""
...
@classmethod
@abstractmethod
def validate_update_uniqueness(
cls,
view_uuid: str,
name: str,
layer_uuid: str,
configuration: dict[str, Any],
) -> bool:
"""
Validate that a semantic view is unique within a semantic layer for an
update operation, excluding the view being updated.
:param view_uuid: UUID of the view being updated
:param name: New name to validate
:param layer_uuid: UUID of the parent semantic layer
:param configuration: Configuration dict to compare
:return: True if unique, False otherwise
"""
...
@classmethod
@abstractmethod
def find_by_name(cls, name: str, layer_uuid: str) -> SemanticViewModel | None:
"""
Find a semantic view by name within a semantic layer.
:param name: View name
:param layer_uuid: UUID of the parent semantic layer
:return: SemanticViewModel instance or None
"""
...
__all__ = ["AbstractSemanticLayerDAO", "AbstractSemanticViewDAO"]

View File

@@ -1,102 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Semantic layer registration decorator for Superset.
This module provides a decorator interface to register semantic layer
implementations with the host application, enabling automatic discovery
by the extensions framework.
Usage:
from superset_core.semantic_layers.decorators import semantic_layer
@semantic_layer(
id="snowflake",
name="Snowflake Cortex",
description="Snowflake semantic layer via Cortex Analyst",
)
class SnowflakeSemanticLayer(SemanticLayer[SnowflakeConfig, SnowflakeView]):
...
# Or with minimal arguments:
@semantic_layer(id="dbt", name="dbt Semantic Layer")
class DbtSemanticLayer(SemanticLayer[DbtConfig, DbtView]):
...
"""
from __future__ import annotations
from typing import Callable, TypeVar
# Type variable for decorated semantic layer classes
T = TypeVar("T")
def semantic_layer(
id: str,
name: str,
description: str | None = None,
) -> Callable[[T], T]:
"""
Decorator to register a semantic layer implementation.
Automatically detects extension context and applies appropriate
namespacing to prevent ID conflicts between host and extension
semantic layers.
Host implementations will replace this function during initialization
with a concrete implementation providing actual functionality.
Args:
id: Unique semantic layer type identifier (e.g., "snowflake",
"dbt"). Used as the key in the semantic layers registry and
stored in the ``type`` column of the ``SemanticLayer`` model.
name: Human-readable display name (e.g., "Snowflake Cortex").
Shown in the UI when listing available semantic layer types.
description: Optional description for documentation and UI
tooltips.
Returns:
Decorated semantic layer class registered with the host
application.
Raises:
NotImplementedError: If called before host implementation is
initialized.
Example:
from superset_core.semantic_layers.decorators import semantic_layer
from superset_core.semantic_layers.layer import SemanticLayer
@semantic_layer(
id="snowflake",
name="Snowflake Cortex",
description="Connect to Snowflake Cortex Analyst",
)
class SnowflakeSemanticLayer(
SemanticLayer[SnowflakeConfig, SnowflakeView]
):
...
"""
raise NotImplementedError(
"Semantic layer decorator not initialized. "
"This decorator should be replaced during Superset startup."
)
__all__ = ["semantic_layer"]

View File

@@ -1,129 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Any, Generic, TypeVar
from pydantic import BaseModel
from superset_core.semantic_layers.view import SemanticView
ConfigT = TypeVar("ConfigT", bound=BaseModel)
SemanticViewT = TypeVar("SemanticViewT", bound="SemanticView")
class SemanticLayer(ABC, Generic[ConfigT, SemanticViewT]):
"""
Abstract base class for semantic layers.
"""
configuration_class: type[BaseModel]
@classmethod
@abstractmethod
def from_configuration(
cls,
configuration: dict[str, Any],
) -> SemanticLayer[ConfigT, SemanticViewT]:
"""
Create a semantic layer from its configuration.
"""
raise NotImplementedError(
"Semantic layers must implement the from_configuration method"
)
@classmethod
@abstractmethod
def get_configuration_schema(
cls,
configuration: ConfigT | None = None,
) -> dict[str, Any]:
"""
Get the JSON schema for the configuration needed to add the semantic layer.
A partial configuration `configuration` can be sent to improve the schema,
allowing for progressive validation and better UX. For example, a semantic
layer might require:
- auth information
- a database
If the user provides the auth information, a client can send the partial
configuration to this method, and the resulting JSON schema would include
the list of databases the user has access to, allowing a dropdown to be
populated.
The Snowflake semantic layer has an example implementation of this method, where
database and schema names are populated based on the provided connection info.
"""
raise NotImplementedError(
"Semantic layers must implement the get_configuration_schema method"
)
@classmethod
@abstractmethod
def get_runtime_schema(
cls,
configuration: ConfigT,
runtime_data: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""
Get the JSON schema for the runtime parameters needed to load semantic views.
This returns the schema needed to connect to a semantic view given the
configuration for the semantic layer. For example, a semantic layer might
be configured by:
- auth information
- an optional database
If the user does not provide a database when creating the semantic layer, the
runtime schema would require the database name to be provided before loading any
semantic views. This allows users to create semantic layers that connect to a
specific database (or project, account, etc.), or that allow users to select it
at query time.
The Snowflake semantic layer has an example implementation of this method, where
database and schema names are required if they were not provided in the initial
configuration.
"""
raise NotImplementedError(
"Semantic layers must implement the get_runtime_schema method"
)
@abstractmethod
def get_semantic_views(
self,
runtime_configuration: dict[str, Any],
) -> set[SemanticViewT]:
"""
Get the semantic views available in the semantic layer.
The runtime configuration can provide information like a given project or
schema, used to restrict the semantic views returned.
"""
@abstractmethod
def get_semantic_view(
self,
name: str,
additional_configuration: dict[str, Any],
) -> SemanticViewT:
"""
Get a specific semantic view by its name and additional configuration.
"""

View File

@@ -1,85 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Semantic layer model interfaces for superset-core.
Provides abstract model classes for semantic layers and views that will be
replaced by the host implementation's concrete SQLAlchemy models during
initialization.
Usage:
from superset_core.semantic_layers.models import (
SemanticLayerModel,
SemanticViewModel,
)
"""
from __future__ import annotations
from datetime import datetime
from uuid import UUID
from superset_core.common.models import CoreModel
class SemanticLayerModel(CoreModel):
"""
Abstract interface for the SemanticLayer database model.
Host implementations will replace this class during initialization
with a concrete SQLAlchemy model providing actual persistence.
"""
__abstract__ = True
# Type hints for expected column attributes
uuid: UUID
name: str
description: str | None
type: str
configuration: str
configuration_version: int
cache_timeout: int | None
created_on: datetime | None
changed_on: datetime | None
class SemanticViewModel(CoreModel):
"""
Abstract interface for the SemanticView database model.
Host implementations will replace this class during initialization
with a concrete SQLAlchemy model providing actual persistence.
"""
__abstract__ = True
# Type hints for expected column attributes
id: int
uuid: UUID
name: str
description: str | None
configuration: str
configuration_version: int
cache_timeout: int | None
semantic_layer_uuid: UUID
created_on: datetime | None
changed_on: datetime | None
__all__ = ["SemanticLayerModel", "SemanticViewModel"]

View File

@@ -1,209 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import enum
from dataclasses import dataclass
from datetime import date, datetime, time, timedelta
import isodate
import pyarrow as pa
@dataclass(frozen=True)
class Grain:
"""
Represents a time grain (e.g., day, month, year).
Attributes:
name: Human-readable name of the grain (e.g., "Second")
representation: ISO 8601 duration (e.g., "PT1S", "P1D", "P1M")
"""
name: str
representation: str
def __post_init__(self) -> None:
isodate.parse_duration(self.representation)
def __eq__(self, other: object) -> bool:
if isinstance(other, Grain):
return self.representation == other.representation
return NotImplemented
def __hash__(self) -> int:
return hash(self.representation)
class Grains:
"""Pre-defined common grains and factory for custom ones."""
SECOND = Grain("Second", "PT1S")
MINUTE = Grain("Minute", "PT1M")
HOUR = Grain("Hour", "PT1H")
DAY = Grain("Day", "P1D")
WEEK = Grain("Week", "P1W")
MONTH = Grain("Month", "P1M")
QUARTER = Grain("Quarter", "P3M")
YEAR = Grain("Year", "P1Y")
_REGISTRY: dict[str, Grain] = {
"PT1S": SECOND,
"PT1M": MINUTE,
"PT1H": HOUR,
"P1D": DAY,
"P1W": WEEK,
"P1M": MONTH,
"P3M": QUARTER,
"P1Y": YEAR,
}
@classmethod
def get(cls, representation: str, name: str | None = None) -> Grain:
"""Return a pre-defined grain or create a custom one."""
if grain := cls._REGISTRY.get(representation):
return grain
return Grain(name or representation, representation)
@dataclass(frozen=True)
class Dimension:
id: str
name: str
type: pa.DataType
definition: str | None = None
description: str | None = None
grain: Grain | None = None
@dataclass(frozen=True)
class Metric:
id: str
name: str
type: pa.DataType
definition: str
description: str | None = None
@dataclass(frozen=True)
class AdhocExpression:
id: str
definition: str
class Operator(str, enum.Enum):
EQUALS = "="
NOT_EQUALS = "!="
GREATER_THAN = ">"
LESS_THAN = "<"
GREATER_THAN_OR_EQUAL = ">="
LESS_THAN_OR_EQUAL = "<="
IN = "IN"
NOT_IN = "NOT IN"
LIKE = "LIKE"
NOT_LIKE = "NOT LIKE"
IS_NULL = "IS NULL"
IS_NOT_NULL = "IS NOT NULL"
ADHOC = "ADHOC"
FilterValues = str | int | float | bool | datetime | date | time | timedelta | None
class PredicateType(enum.Enum):
WHERE = "WHERE"
HAVING = "HAVING"
@dataclass(frozen=True, order=True)
class Filter:
type: PredicateType
column: Dimension | Metric | None
operator: Operator
value: FilterValues | frozenset[FilterValues]
class OrderDirection(enum.Enum):
ASC = "ASC"
DESC = "DESC"
OrderTuple = tuple[Metric | Dimension | AdhocExpression, OrderDirection]
@dataclass(frozen=True)
class GroupLimit:
"""
Limit query to top/bottom N combinations of specified dimensions.
The `filters` parameter allows specifying separate filter constraints for the
group limit subquery. This is useful when you want to determine the top N groups
using different criteria (e.g., a different time range) than the main query.
For example, you might want to find the top 10 products by sales over the last
30 days, but then show daily sales for those products over the last 7 days.
"""
dimensions: list[Dimension]
top: int
metric: Metric | None
direction: OrderDirection = OrderDirection.DESC
group_others: bool = False
filters: set[Filter] | None = None
@dataclass(frozen=True)
class SemanticRequest:
"""
Represents a request made to obtain semantic results.
This could be a SQL query, an HTTP request, etc.
"""
type: str
definition: str
@dataclass(frozen=True)
class SemanticResult:
"""
Represents the results of a semantic query.
This includes any requests (SQL queries, HTTP requests) that were performed in order
to obtain the results, in order to help troubleshooting.
"""
requests: list[SemanticRequest]
results: pa.Table
@dataclass(frozen=True)
class SemanticQuery:
"""
Represents a semantic query.
"""
metrics: list[Metric]
dimensions: list[Dimension]
filters: set[Filter] | None = None
order: list[OrderTuple] | None = None
limit: int | None = None
offset: int | None = None
group_limit: GroupLimit | None = None

View File

@@ -1,113 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import enum
from abc import ABC, abstractmethod
from superset_core.semantic_layers.types import (
Dimension,
Filter,
Metric,
SemanticQuery,
SemanticResult,
)
# TODO (betodealmeida): move to the extension JSON
class SemanticViewFeature(enum.Enum):
"""
Custom features supported by semantic layers.
"""
ADHOC_EXPRESSIONS_IN_ORDERBY = "ADHOC_EXPRESSIONS_IN_ORDERBY"
GROUP_LIMIT = "GROUP_LIMIT"
GROUP_OTHERS = "GROUP_OTHERS"
class SemanticView(ABC):
"""
Abstract base class for semantic views.
"""
features: frozenset[SemanticViewFeature]
# Implementations must expose a display name for the view.
# Declared here as a type annotation (not abstract) so that existing
# implementations are not required to add a formal @abstractmethod.
name: str
@abstractmethod
def uid(self) -> str:
"""
Returns a unique identifier for the semantic view.
"""
@abstractmethod
def get_dimensions(self) -> set[Dimension]:
"""
Get the dimensions defined in the semantic view.
"""
@abstractmethod
def get_metrics(self) -> set[Metric]:
"""
Get the metrics defined in the semantic view.
"""
@abstractmethod
def get_values(
self,
dimension: Dimension,
filters: set[Filter] | None = None,
) -> SemanticResult:
"""
Return distinct values for a dimension.
"""
@abstractmethod
def get_table(self, query: SemanticQuery) -> SemanticResult:
"""
Execute a semantic query and return the results.
"""
@abstractmethod
def get_row_count(self, query: SemanticQuery) -> SemanticResult:
"""
Execute a query and return the number of rows the result would have.
"""
@abstractmethod
def get_compatible_metrics(
self,
selected_metrics: set[Metric],
selected_dimensions: set[Dimension],
) -> set[Metric]:
"""
Return metrics compatible with the selected dimensions.
"""
@abstractmethod
def get_compatible_dimensions(
self,
selected_metrics: set[Metric],
selected_dimensions: set[Dimension],
) -> set[Dimension]:
"""
Return dimensions compatible with the selected metrics.
"""

View File

@@ -28,14 +28,8 @@
"@emotion/cache": "^11.4.0",
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.1",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/ibm-plex-mono": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@googleapis/sheets": "^13.0.1",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@luma.gl/constants": "~9.2.5",
"@luma.gl/core": "~9.2.5",
"@luma.gl/engine": "~9.2.5",
@@ -43,7 +37,6 @@
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",
@@ -115,7 +108,7 @@
"memoize-one": "^5.2.1",
"mousetrap": "^1.6.5",
"mustache": "^4.2.0",
"nanoid": "^5.1.11",
"nanoid": "^5.1.9",
"ol": "^10.9.0",
"pretty-ms": "^9.3.0",
"query-string": "9.3.1",
@@ -249,7 +242,7 @@
"eslint-plugin-no-only-tests": "^3.4.0",
"eslint-plugin-prettier": "^5.5.5",
"eslint-plugin-react-prefer-function-component": "^5.0.0",
"eslint-plugin-react-you-might-not-need-an-effect": "^0.10.0",
"eslint-plugin-react-you-might-not-need-an-effect": "^0.9.3",
"eslint-plugin-storybook": "^0.8.0",
"eslint-plugin-testing-library": "^7.16.2",
"eslint-plugin-theme-colors": "file:eslint-rules/eslint-plugin-theme-colors",
@@ -264,7 +257,7 @@
"jest-html-reporter": "^4.4.0",
"jest-websocket-mock": "^2.5.0",
"js-yaml-loader": "^1.2.2",
"jsdom": "^29.1.1",
"jsdom": "^29.1.0",
"lerna": "^9.0.4",
"lightningcss": "^1.32.0",
"mini-css-extract-plugin": "^2.10.2",
@@ -3920,15 +3913,6 @@
}
}
},
"node_modules/@fontsource/fira-code": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz",
"integrity": "sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==",
"license": "OFL-1.1",
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@fontsource/ibm-plex-mono": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.2.7.tgz",
@@ -3943,6 +3927,7 @@
"resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.8.tgz",
"integrity": "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg==",
"license": "OFL-1.1",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
@@ -3969,26 +3954,6 @@
"node": ">=12.0.0"
}
},
"node_modules/@great-expectations/jsonforms-antd-renderers": {
"version": "2.3.5",
"resolved": "https://registry.npmjs.org/@great-expectations/jsonforms-antd-renderers/-/jsonforms-antd-renderers-2.3.5.tgz",
"integrity": "sha512-nWJQCX6zg2mQNk+QT5SFZUkaq2SNDRO5H7zoJmNvlndd0Byoq6AaB+UTdGt/SpO1knJFe80mmiWwh99fY/go3A==",
"license": "MIT",
"dependencies": {
"lodash.isempty": "^4.4.0",
"lodash.merge": "^4.6.2",
"lodash.range": "^3.2.0",
"lodash.startcase": "^4.4.0"
},
"peerDependencies": {
"@ant-design/icons": "^5.3.0",
"@jsonforms/core": "^3.3.0",
"@jsonforms/react": "^3.3.0",
"antd": "^5.14.0",
"dayjs": "^1",
"react": "^17 || ^18"
}
},
"node_modules/@hapi/address": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/@hapi/address/-/address-5.1.1.tgz",
@@ -6359,45 +6324,6 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@jsonforms/core": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/core/-/core-3.7.0.tgz",
"integrity": "sha512-CE9viWtwi9QWLqlWLeOul1/R1GRAyOA9y6OoUpsCc0FhyR+g5p29F3k0fUExHWxL0Sf4KHcXYkfhtqfRBPS8ww==",
"license": "MIT",
"dependencies": {
"@types/json-schema": "^7.0.3",
"ajv": "^8.6.1",
"ajv-formats": "^2.1.0",
"lodash": "^4.17.21"
}
},
"node_modules/@jsonforms/react": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/react/-/react-3.7.0.tgz",
"integrity": "sha512-HkY7qAx8vW97wPEgZ7GxCB3iiXG1c95GuObxtcDHGPBJWMwnxWBnVYJmv5h7nthrInKsQKHZL5OusnC/sj/1GQ==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonforms/vanilla-renderers": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/vanilla-renderers/-/vanilla-renderers-3.7.0.tgz",
"integrity": "sha512-RdXQGsheARUJVbaTe6SqGw9W4/yrm0BgUok6OKUj8krp1NF4fqXc5UbYGHFksMR/p7LCuoYHCtQzKLXEfxJbDw==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"@jsonforms/react": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonjoy.com/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz",
@@ -9568,89 +9494,6 @@
"integrity": "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ==",
"license": "MIT"
},
"node_modules/@rjsf/antd": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/antd/-/antd-5.24.13.tgz",
"integrity": "sha512-UiWE8xoBxxCoe/SEkdQEmL5E6z3I1pw0+y0dTyGt8SHfAxxFc4/OWn7tKOAiNsKCXgf83t0JKn6CHWLD01sAdQ==",
"license": "Apache-2.0",
"dependencies": {
"classnames": "^2.5.1",
"lodash": "^4.17.21",
"lodash-es": "^4.17.21",
"rc-picker": "2.7.6"
},
"engines": {
"node": ">=14"
},
"peerDependencies": {
"@ant-design/icons": "^4.0.0 || ^5.0.0",
"@rjsf/core": "^5.24.x",
"@rjsf/utils": "^5.24.x",
"antd": "^4.24.0 || ^5.8.5",
"dayjs": "^1.8.0",
"react": "^16.14.0 || >=17"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker": {
"version": "2.7.6",
"resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-2.7.6.tgz",
"integrity": "sha512-H9if/BUJUZBOhPfWcPeT15JUI3/ntrG9muzERrXDkSoWmDj4yzmBvumozpxYrHwjcKnjyDGAke68d+whWwvhHA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "^2.2.1",
"date-fns": "2.x",
"dayjs": "1.x",
"moment": "^2.24.0",
"rc-trigger": "^5.0.4",
"rc-util": "^5.37.0",
"shallowequal": "^1.1.0"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger": {
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.4.tgz",
"integrity": "sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.18.3",
"classnames": "^2.2.6",
"rc-align": "^4.0.0",
"rc-motion": "^2.0.0",
"rc-util": "^5.19.2"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger/node_modules/rc-align": {
"version": "4.0.15",
"resolved": "https://registry.npmjs.org/rc-align/-/rc-align-4.0.15.tgz",
"integrity": "sha512-wqJtVH60pka/nOX7/IspElA8gjPNQKIx/ZqJ6heATCkXpe1Zg4cPVrMD2vC96wjsFFL8WsmhPbx9tdMo1qqlIA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "2.x",
"dom-align": "^1.7.0",
"rc-util": "^5.26.0",
"resize-observer-polyfill": "^1.5.1"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/core": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/core/-/core-5.24.13.tgz",
@@ -20952,22 +20795,6 @@
"topojson": "^1.6.19"
}
},
"node_modules/date-fns": {
"version": "2.30.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz",
"integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.21.0"
},
"engines": {
"node": ">=0.11"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/date-fns"
}
},
"node_modules/dateformat": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.2.tgz",
@@ -21575,12 +21402,6 @@
"integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
"license": "MIT"
},
"node_modules/dom-align": {
"version": "1.12.4",
"resolved": "https://registry.npmjs.org/dom-align/-/dom-align-1.12.4.tgz",
"integrity": "sha512-R8LUSEay/68zE5c8/3BDxiTEvgb4xZTF0RKmAHfiEVN3klfIpXfi2/QCoiWPccVQ0J/ZGdz9OjzL4uJEP/MRAw==",
"license": "MIT"
},
"node_modules/dom-converter": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz",
@@ -22872,9 +22693,9 @@
"license": "MIT"
},
"node_modules/eslint-plugin-react-you-might-not-need-an-effect": {
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-react-you-might-not-need-an-effect/-/eslint-plugin-react-you-might-not-need-an-effect-0.10.0.tgz",
"integrity": "sha512-a4pugbQc2zLiE2NZGuXdTjtMNvlP2984QFPDv71eskUYDzigLFYfBL4QjK+RnRtcboHoXRKOcQqEZKxiK6KegA==",
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/eslint-plugin-react-you-might-not-need-an-effect/-/eslint-plugin-react-you-might-not-need-an-effect-0.9.3.tgz",
"integrity": "sha512-44cce7LndBnpDRWBTQ8p7ircIdl2rJBP5+V9Ik64E935UB47uA9ZMU1Uv160lAMhtvoPYqXBjQ+tojr5JF3mFQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -31681,9 +31502,9 @@
}
},
"node_modules/jsdom": {
"version": "29.1.1",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-29.1.1.tgz",
"integrity": "sha512-ECi4Fi2f7BdJtUKTflYRTiaMxIB0O6zfR1fX0GXpUrf6flp8QIYn1UT20YQqdSOfk2dfkCwS8LAFoJDEppNK5Q==",
"version": "29.1.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-29.1.0.tgz",
"integrity": "sha512-YNUc7fB9QuvSSQWfrH0xF+TyABkxUwx8sswgIDaCrw4Hol8BghdZDkITtZheRJeMtzWlnTfsM3bBBusRvpO1wg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -33293,12 +33114,6 @@
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
"license": "MIT"
},
"node_modules/lodash.isempty": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz",
"integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==",
"license": "MIT"
},
"node_modules/lodash.isequal": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
@@ -33330,18 +33145,7 @@
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"license": "MIT"
},
"node_modules/lodash.range": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/lodash.range/-/lodash.range-3.2.0.tgz",
"integrity": "sha512-Fgkb7SinmuzqgIhNhAElo0BL/R1rHCnhwSZf78omqSwvWqD0kD2ssOAutQonDKH/ldS8BxA72ORYI09qAY9CYg==",
"license": "MIT"
},
"node_modules/lodash.startcase": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
"integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.uniq": {
@@ -36552,15 +36356,6 @@
"node": ">=0.10.0"
}
},
"node_modules/moment": {
"version": "2.30.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
"license": "MIT",
"engines": {
"node": "*"
}
},
"node_modules/monaco-editor": {
"version": "0.52.2",
"resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz",
@@ -36703,9 +36498,9 @@
}
},
"node_modules/nanoid": {
"version": "5.1.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.11.tgz",
"integrity": "sha512-v+KEsUv2ps74PaSKv0gHTxTCgMXOIfBEbaqa6w6ISIGC7ZsvHN4N9oJ8d4cmf0n5oTzQz2SLmThbQWhjd/8eKg==",
"version": "5.1.9",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-5.1.9.tgz",
"integrity": "sha512-ZUvP7KeBLe3OZ1ypw6dI/TzYJuvHP77IM4Ry73waSQTLn8/g8rpdjfyVAh7t1/+FjBtG4lCP42MEbDxOsRpBMw==",
"funding": [
{
"type": "github",
@@ -43570,12 +43365,6 @@
"integrity": "sha512-b6i4ZpVuUxB9h5gfCxPiusKYkqTMOjEbBs4wMaFbkfia4yFv92UKZ6Df8WXcKbn08JNL/abvg3FnMAOfakDvUw==",
"license": "MIT"
},
"node_modules/shallowequal": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz",
"integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==",
"license": "MIT"
},
"node_modules/shapefile": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/shapefile/-/shapefile-0.3.1.tgz",
@@ -49599,7 +49388,7 @@
"dependencies": {
"chalk": "^5.6.2",
"lodash-es": "^4.18.1",
"yeoman-generator": "^8.1.2",
"yeoman-generator": "^8.2.2",
"yosay": "^3.0.0"
},
"devDependencies": {
@@ -50575,7 +50364,7 @@
"classnames": "^2.5.1",
"d3-array": "^3.2.4",
"lodash": "^4.18.1",
"memoize-one": "^6.0.0",
"memoize-one": "^5.2.1",
"react-table": "^7.8.0",
"regenerator-runtime": "^0.14.1",
"xss": "^1.0.15"
@@ -50606,12 +50395,6 @@
"node": ">=12"
}
},
"plugins/plugin-chart-ag-grid-table/node_modules/memoize-one": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-6.0.0.tgz",
"integrity": "sha512-rkpe71W0N0c0Xz6QD0eJETuWAJGnJ9afsl1srmwPrI+yBCkge5EycXXbYRyvL29zZVUWQCY7InPRCv3GDXuZNw==",
"license": "MIT"
},
"plugins/plugin-chart-cartodiagram": {
"name": "@superset-ui/plugin-chart-cartodiagram",
"version": "0.0.1",
@@ -50649,7 +50432,7 @@
"acorn": "^8.16.0",
"d3-array": "^3.2.4",
"lodash": "^4.18.1",
"zod": "^4.4.3"
"zod": "^4.4.1"
},
"peerDependencies": {
"@apache-superset/core": "*",
@@ -50893,7 +50676,7 @@
"@deck.gl/extensions": "~9.2.9",
"@deck.gl/geo-layers": "~9.2.5",
"@deck.gl/layers": "~9.2.5",
"@deck.gl/mapbox": "^9.3.2",
"@deck.gl/mapbox": "~9.3.1",
"@deck.gl/mesh-layers": "~9.2.5",
"@luma.gl/constants": "~9.2.5",
"@luma.gl/core": "~9.2.5",
@@ -50941,16 +50724,16 @@
}
},
"plugins/preset-chart-deckgl/node_modules/@deck.gl/mapbox": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@deck.gl/mapbox/-/mapbox-9.3.2.tgz",
"integrity": "sha512-+T9pJwsOXwjUxyGN6oiBMfIs28VtDIG1V1Rqz4qqn4TjjNEFFw+xO0olJIg8FO5IAqw2OtePdsrMj0tX8tHdGQ==",
"version": "9.3.1",
"resolved": "https://registry.npmjs.org/@deck.gl/mapbox/-/mapbox-9.3.1.tgz",
"integrity": "sha512-4SgpWMeZiqiZEiz9yPdr89cVRL8HFcvXLxXUA0ExhMreUdNuK/j2OIQHPhw6vp1xCFbJEEqRelQ0pJYkhGDkYw==",
"license": "MIT",
"dependencies": {
"@math.gl/web-mercator": "^4.1.0"
},
"peerDependencies": {
"@deck.gl/core": "~9.3.0",
"@luma.gl/core": "~9.3.3",
"@luma.gl/core": "~9.3.2",
"@math.gl/web-mercator": "^4.1.0"
}
},

View File

@@ -117,14 +117,7 @@
"@luma.gl/gltf": "~9.2.5",
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",
@@ -196,7 +189,7 @@
"memoize-one": "^5.2.1",
"mousetrap": "^1.6.5",
"mustache": "^4.2.0",
"nanoid": "^5.1.11",
"nanoid": "^5.1.9",
"ol": "^10.9.0",
"pretty-ms": "^9.3.0",
"query-string": "9.3.1",
@@ -330,7 +323,7 @@
"eslint-plugin-no-only-tests": "^3.4.0",
"eslint-plugin-prettier": "^5.5.5",
"eslint-plugin-react-prefer-function-component": "^5.0.0",
"eslint-plugin-react-you-might-not-need-an-effect": "^0.10.0",
"eslint-plugin-react-you-might-not-need-an-effect": "^0.9.3",
"eslint-plugin-storybook": "^0.8.0",
"eslint-plugin-testing-library": "^7.16.2",
"eslint-plugin-theme-colors": "file:eslint-rules/eslint-plugin-theme-colors",
@@ -345,7 +338,7 @@
"jest-html-reporter": "^4.4.0",
"jest-websocket-mock": "^2.5.0",
"js-yaml-loader": "^1.2.2",
"jsdom": "^29.1.1",
"jsdom": "^29.1.0",
"lerna": "^9.0.4",
"lightningcss": "^1.32.0",
"mini-css-extract-plugin": "^2.10.2",

View File

@@ -30,7 +30,7 @@
"dependencies": {
"chalk": "^5.6.2",
"lodash-es": "^4.18.1",
"yeoman-generator": "^8.1.2",
"yeoman-generator": "^8.2.2",
"yosay": "^3.0.0"
},
"devDependencies": {

View File

@@ -18,7 +18,6 @@
*/
import { isMatrixifyVisible } from './matrixifyControls';
import type { ControlStateMapping } from '../types';
/**
* Helper to build a controls object matching the shape used by
@@ -26,7 +25,7 @@ import type { ControlStateMapping } from '../types';
*/
function makeControls(
overrides: Record<string, unknown> = {},
): ControlStateMapping {
): Record<string, { value: unknown }> {
const defaults: Record<string, unknown> = {
matrixify_enable: false,
matrixify_mode_rows: 'disabled',
@@ -37,7 +36,7 @@ function makeControls(
const merged = { ...defaults, ...overrides };
return Object.fromEntries(
Object.entries(merged).map(([k, v]) => [k, { value: v }]),
) as ControlStateMapping;
);
}
// ── matrixify_enable guard ──────────────────────────────────────────

View File

@@ -20,7 +20,7 @@
import { t } from '@apache-superset/core/translation';
import { validateNonEmpty } from '@superset-ui/core';
import { ControlStateMapping, SharedControlConfig } from '../types';
import { SharedControlConfig } from '../types';
import { dndAdhocMetricControl } from './dndControls';
import { defineSavedMetrics } from '../utils';
@@ -29,12 +29,9 @@ import { defineSavedMetrics } from '../utils';
* Controls for transforming charts into matrix/grid layouts
*/
// Utility function to check if matrixify controls should be visible.
// Controls both visibility callbacks and validator injection via mapStateToProps.
// The matrixify_enable guard prevents hidden validators from firing on
// pre-revamp charts with stale matrixify_mode defaults (fix for #38519).
// Utility function to check if matrixify controls should be visible
const isMatrixifyVisible = (
controls: ControlStateMapping | undefined,
controls: any,
axis: 'rows' | 'columns',
mode?: 'metrics' | 'dimensions',
selectionMode?: 'members' | 'topn' | 'all',

View File

@@ -1,238 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Tests for the matrixify_enable guard in isMatrixifyVisible() and
* validator injection via mapStateToProps on real matrixify control definitions.
*
* These are TDD tests for the fix to apache/superset#38519 regression:
* isMatrixifyVisible() must check matrixify_enable before evaluating mode,
* otherwise pre-revamp charts with stale matrixify_mode defaults trigger
* hidden validators that block save.
*/
import {
matrixifyControls,
isMatrixifyVisible,
} from '../../src/shared-controls/matrixifyControls';
import type { ControlPanelState, ControlStateMapping } from '../../src/types';
// Helper: build a minimal controls object for ControlPanelState
const buildControls = (
overrides: Record<string, any> = {},
): ControlStateMapping => {
const controls: Record<string, { value: any }> = {};
Object.entries(overrides).forEach(([key, value]) => {
controls[key] = { value };
});
return controls as ControlStateMapping;
};
// Helper: build a minimal ControlPanelState for mapStateToProps.
// Only provides fields that isMatrixifyVisible and mapStateToProps actually read.
const buildState = (
controlValues: Record<string, any> = {},
formData: Record<string, any> = {},
) =>
({
controls: buildControls(controlValues),
datasource: { columns: [], type: 'table' },
form_data: formData,
common: {},
metadata: {},
slice: { slice_id: 0 },
}) as unknown as ControlPanelState;
// ============================================================
// Validator injection tests via real mapStateToProps (rows)
// ============================================================
// --- matrixify_dimension_rows ---
test('matrixify_dimension_rows: validators empty when matrixify_enable is falsy', () => {
const control = matrixifyControls.matrixify_dimension_rows;
const state = buildState(
{
matrixify_enable: undefined,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'members',
},
{ matrixify_mode_rows: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators).toEqual([]);
});
test('matrixify_dimension_rows: validators present when matrixify_enable is true', () => {
const control = matrixifyControls.matrixify_dimension_rows;
const state = buildState(
{
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'members',
},
{ matrixify_mode_rows: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators.length).toBeGreaterThan(0);
});
// --- matrixify_topn_value_rows ---
test('matrixify_topn_value_rows: validators empty when matrixify_enable is falsy', () => {
const control = matrixifyControls.matrixify_topn_value_rows;
const state = buildState(
{
matrixify_enable: undefined,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'topn',
},
{ matrixify_mode_rows: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators).toEqual([]);
});
test('matrixify_topn_value_rows: validators present when matrixify_enable is true', () => {
const control = matrixifyControls.matrixify_topn_value_rows;
const state = buildState(
{
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'topn',
},
{ matrixify_mode_rows: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators.length).toBeGreaterThan(0);
});
// --- matrixify_topn_metric_rows ---
test('matrixify_topn_metric_rows: validators empty when matrixify_enable is falsy', () => {
const control = matrixifyControls.matrixify_topn_metric_rows;
const state = buildState(
{
matrixify_enable: undefined,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'topn',
},
{ matrixify_mode_rows: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators).toEqual([]);
});
test('matrixify_topn_metric_rows: validators present when matrixify_enable is true', () => {
const control = matrixifyControls.matrixify_topn_metric_rows;
const state = buildState(
{
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_dimension_selection_mode_rows: 'topn',
},
{ matrixify_mode_rows: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators.length).toBeGreaterThan(0);
});
// ============================================================
// Validator injection tests via real mapStateToProps (columns)
// ============================================================
test('matrixify_dimension_columns: validators empty when matrixify_enable is falsy', () => {
const control = matrixifyControls.matrixify_dimension_columns;
const state = buildState(
{
matrixify_enable: undefined,
matrixify_mode_columns: 'dimensions',
matrixify_dimension_selection_mode_columns: 'members',
},
{ matrixify_mode_columns: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators).toEqual([]);
});
test('matrixify_dimension_columns: validators present when matrixify_enable is true', () => {
const control = matrixifyControls.matrixify_dimension_columns;
const state = buildState(
{
matrixify_enable: true,
matrixify_mode_columns: 'dimensions',
matrixify_dimension_selection_mode_columns: 'members',
},
{ matrixify_mode_columns: 'dimensions' },
);
const result = control.mapStateToProps!(state, {} as any);
expect(result.validators.length).toBeGreaterThan(0);
});
// ============================================================
// Direct isMatrixifyVisible guard tests
// ============================================================
test.each([
['undefined', undefined],
['null', null],
['false', false],
['0', 0],
])(
'isMatrixifyVisible returns false when matrixify_enable is %s',
(_, value) => {
const controls = buildControls({
matrixify_enable: value,
matrixify_mode_rows: 'dimensions',
});
expect(isMatrixifyVisible(controls, 'rows')).toBe(false);
},
);
test('isMatrixifyVisible returns true when matrixify_enable is true and mode matches', () => {
const controls = buildControls({
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
});
expect(isMatrixifyVisible(controls, 'rows', 'dimensions')).toBe(true);
});
test('isMatrixifyVisible returns false when matrixify_enable is true but mode is disabled', () => {
const controls = buildControls({
matrixify_enable: true,
matrixify_mode_rows: 'disabled',
});
expect(isMatrixifyVisible(controls, 'rows')).toBe(false);
});
test('isMatrixifyVisible returns true when matrixify_enable is true and any non-disabled mode (no mode filter)', () => {
const controls = buildControls({
matrixify_enable: true,
matrixify_mode_columns: 'metrics',
});
expect(isMatrixifyVisible(controls, 'columns')).toBe(true);
});

View File

@@ -94,20 +94,11 @@ class CategoricalColorScale extends ExtensibleFunction {
/**
* Increment the color range with analogous colors
*
* @param forceMinimumExpansion When true, expand at least once even if the
* ordinal domain is still shorter than the palette. Shared dashboard labels
* can resolve from the global map without entering the scale domain, so
* domain-based sizing alone would skip expansion while collision resolution
* still needs analogous colors.
*/
incrementColorRange(forceMinimumExpansion = false) {
const domainBasedMultiple = Math.floor(
incrementColorRange() {
const multiple = Math.floor(
this.domain().length / this.originColors.length,
);
const multiple = forceMinimumExpansion
? Math.max(domainBasedMultiple, 1)
: domainBasedMultiple;
// the domain has grown larger than the original range
// increments the range with analogous colors
if (multiple > this.multiple) {
@@ -153,7 +144,6 @@ class CategoricalColorScale extends ExtensibleFunction {
if (isFeatureEnabled(FeatureFlag.UseAnalogousColors)) {
this.incrementColorRange();
}
if (
// feature flag to be deprecated (will become standard behaviour)
isFeatureEnabled(FeatureFlag.AvoidColorsCollision) &&
@@ -164,39 +154,6 @@ class CategoricalColorScale extends ExtensibleFunction {
}
}
if (
isFeatureEnabled(FeatureFlag.AvoidColorsCollision) &&
source === LabelsColorMapSource.Dashboard &&
(forcedColor || isExistingLabel)
) {
const colliding = [...this.chartLabelsColorMap.entries()].filter(
([labelKey, c]) => c === color && labelKey !== cleanedValue,
);
if (
colliding.length > 0 &&
isFeatureEnabled(FeatureFlag.UseAnalogousColors)
) {
this.incrementColorRange(true);
}
for (const [otherLabel] of colliding) {
if (
Object.prototype.hasOwnProperty.call(this.forcedColors, otherLabel)
) {
continue;
}
const newColor = this.getNextAvailableColor(otherLabel, color);
this.chartLabelsColorMap.set(otherLabel, newColor);
if (sliceId) {
this.labelsColorMapInstance.addSlice(
otherLabel,
newColor,
sliceId,
appliedColorScheme,
);
}
}
}
// keep track of values in this slice
this.chartLabelsColorMap.set(cleanedValue, color);

View File

@@ -70,11 +70,21 @@ test('a change event that arrives before isEditing flips is not dropped', () =>
});
test('prop changes mid-edit do not clobber unsaved typing', async () => {
const { rerender } = render(<Harness initialTitle="Foo" />);
// Rerender DynamicEditableTitle directly with a changed title prop so the
// sync effect actually runs. Going through Harness would not exercise the
// bug because Harness owns its own state and only reads initialTitle once.
const onSave = jest.fn();
const props = {
placeholder: 'placeholder',
canEdit: true,
label: 'Title',
onSave,
};
const { rerender } = render(<DynamicEditableTitle {...props} title="Foo" />);
const input = screen.getByRole('textbox') as HTMLInputElement;
userEvent.click(input);
await userEvent.type(input, 'X', { delay: 1 });
expect(input.value).toBe('FooX');
rerender(<Harness initialTitle="Foo" />);
rerender(<DynamicEditableTitle {...props} title="Bar" />);
expect(input.value).toBe('FooX');
});

View File

@@ -86,7 +86,15 @@ export const DynamicEditableTitle = memo(
});
useEffect(() => {
setCurrentTitle(title);
// Don't overwrite in-flight user input when the parent re-renders with a
// new title prop mid-edit. handleBlur already syncs currentTitle on commit;
// re-running this effect when isEditing flips would resync to a stale
// title prop, so isEditing is intentionally read via closure rather than
// listed as a dep.
if (!isEditing) {
setCurrentTitle(title);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [title]);
useEffect(() => {
if (isEditing) {

View File

@@ -23,7 +23,7 @@ import { Label } from '..';
// Define the prop types for DatasetTypeLabel
interface DatasetTypeLabelProps {
datasetType: 'physical' | 'virtual' | 'semantic_view';
datasetType: 'physical' | 'virtual'; // Accepts only 'physical' or 'virtual'
}
const SIZE = 's'; // Define the size as a constant
@@ -32,22 +32,6 @@ export const DatasetTypeLabel: React.FC<DatasetTypeLabelProps> = ({
datasetType,
}) => {
const theme = useTheme();
if (datasetType === 'semantic_view') {
return (
<Label
icon={
<Icons.ApartmentOutlined
iconSize={SIZE}
iconColor={theme.colorInfo}
/>
}
type="info"
style={{ color: theme.colorInfo }}
>
{t('Semantic')}
</Label>
);
}
const isPhysical = datasetType === 'physical';
const label: string = isPhysical ? t('Physical') : t('Virtual');
const labelType = isPhysical ? 'primary' : 'default';

View File

@@ -19,15 +19,6 @@
import { DatasourceType } from './types/Datasource';
const DATASOURCE_TYPE_MAP: Record<string, DatasourceType> = {
table: DatasourceType.Table,
query: DatasourceType.Query,
dataset: DatasourceType.Dataset,
sl_table: DatasourceType.SlTable,
saved_query: DatasourceType.SavedQuery,
semantic_view: DatasourceType.SemanticView,
};
export default class DatasourceKey {
readonly id: number;
@@ -36,7 +27,8 @@ export default class DatasourceKey {
constructor(key: string) {
const [idStr, typeStr] = key.split('__');
this.id = parseInt(idStr, 10);
this.type = DATASOURCE_TYPE_MAP[typeStr] ?? DatasourceType.Table;
this.type = DatasourceType.Table; // default to SqlaTable model
this.type = typeStr === 'query' ? DatasourceType.Query : this.type;
}
public toString() {

View File

@@ -26,7 +26,6 @@ export enum DatasourceType {
Dataset = 'dataset',
SlTable = 'sl_table',
SavedQuery = 'saved_query',
SemanticView = 'semantic_view',
}
export interface Currency {
@@ -41,13 +40,6 @@ export interface Datasource {
id: number;
name: string;
type: DatasourceType;
/**
* The parent resource that owns this datasource.
* For SQL-based datasets this is the database; for semantic views it is the
* semantic layer. Use this field instead of the legacy `database` field when
* you only need the display name.
*/
parent?: { name: string };
columns: Column[];
metrics: Metric[];
description?: string;

View File

@@ -61,7 +61,6 @@ export enum FeatureFlag {
ListviewsDefaultCardView = 'LISTVIEWS_DEFAULT_CARD_VIEW',
Matrixify = 'MATRIXIFY',
ScheduledQueries = 'SCHEDULED_QUERIES',
SemanticLayers = 'SEMANTIC_LAYERS',
SqllabBackendPersistence = 'SQLLAB_BACKEND_PERSISTENCE',
SqlValidatorsByEngine = 'SQL_VALIDATORS_BY_ENGINE',
SshTunneling = 'SSH_TUNNELING',

View File

@@ -21,7 +21,6 @@ import { ScaleOrdinal } from 'd3-scale';
import {
CategoricalColorScale,
FeatureFlag,
getLabelsColorMap,
LabelsColorMapSource,
} from '@superset-ui/core';
@@ -200,42 +199,10 @@ describe('CategoricalColorScale', () => {
const returnedColor = scale.getColor(value, sliceId);
expect(returnedColor).toBe(expectedColor);
});
test('reassigns colliding colors when no sliceId is provided', () => {
window.featureFlags = {
[FeatureFlag.AvoidColorsCollision]: true,
};
const PALETTE = ['red', 'blue', 'green'];
const chartAScale = new CategoricalColorScale(PALETTE);
const labelsColorMap = chartAScale.labelsColorMapInstance;
labelsColorMap.reset();
labelsColorMap.source = LabelsColorMapSource.Dashboard;
try {
chartAScale.getColor('Trains', 101, 'testScheme');
const chartBScale = new CategoricalColorScale(PALETTE);
// Call getColor without sliceId (or with undefined)
chartBScale.getColor('Classic Cars', undefined, 'testScheme');
chartBScale.getColor('Trains', undefined, 'testScheme');
const classicCarsColor =
chartBScale.chartLabelsColorMap.get('Classic Cars');
const trainsColor = chartBScale.chartLabelsColorMap.get('Trains');
expect(trainsColor).toBe('red');
expect(classicCarsColor).toBeDefined();
expect(classicCarsColor).not.toBe('red');
} finally {
labelsColorMap.reset();
labelsColorMap.source = LabelsColorMapSource.Dashboard;
}
});
test('conditionally calls getNextAvailableColor', () => {
window.featureFlags = {
[FeatureFlag.AvoidColorsCollision]: true,
};
scale.labelsColorMapInstance.source = LabelsColorMapSource.Explore;
scale.getColor('testValue1');
scale.getColor('testValue2');
@@ -258,27 +225,6 @@ describe('CategoricalColorScale', () => {
expect(getNextAvailableColorSpy).not.toHaveBeenCalled();
});
test('reassigns non-forced labels when a dashboard-synced label would duplicate their color', () => {
window.featureFlags = {
[FeatureFlag.AvoidColorsCollision]: true,
};
const dashScale = new CategoricalColorScale(['red', 'blue', 'green']);
const sliceId = 501;
const colorScheme = 'preset';
dashScale.labelsColorMapInstance.source = LabelsColorMapSource.Dashboard;
jest
.spyOn(dashScale.labelsColorMapInstance, 'getColorMap')
.mockReturnValue(new Map([['Trains', 'red']]));
dashScale.getColor('Classic Cars', sliceId, colorScheme);
dashScale.getColor('Trains', sliceId, colorScheme);
expect(dashScale.chartLabelsColorMap.get('Trains')).toBe('red');
expect(dashScale.chartLabelsColorMap.get('Classic Cars')).not.toBe('red');
expect(dashScale.chartLabelsColorMap.get('Classic Cars')).toBeDefined();
});
});
describe('.setColor(value, forcedColor)', () => {
@@ -533,131 +479,6 @@ describe('CategoricalColorScale', () => {
});
});
describe('dashboard shared-dimension color collision', () => {
let labelsColorMap: ReturnType<typeof getLabelsColorMap>;
beforeEach(() => {
window.featureFlags = {
[FeatureFlag.AvoidColorsCollision]: true,
};
const sentinel = new CategoricalColorScale(['red', 'blue', 'green']);
labelsColorMap = sentinel.labelsColorMapInstance;
labelsColorMap.reset();
labelsColorMap.source = LabelsColorMapSource.Dashboard;
});
afterEach(() => {
jest.restoreAllMocks();
labelsColorMap.reset();
});
test('reproduces the bug without the fix: Classic Cars and Trains would both be red', () => {
window.featureFlags = {
[FeatureFlag.AvoidColorsCollision]: false,
};
const PALETTE = ['red', 'blue', 'green'];
const chartAScale = new CategoricalColorScale(PALETTE);
chartAScale.getColor('Trains', 101, 'testScheme');
expect(labelsColorMap.getColorMap().get('Trains')).toBe('red');
const chartBScale = new CategoricalColorScale(PALETTE);
chartBScale.getColor('Classic Cars', 102, 'testScheme');
chartBScale.getColor('Trains', 102, 'testScheme');
const classicCarsColor =
chartBScale.chartLabelsColorMap.get('Classic Cars');
const trainsColor = chartBScale.chartLabelsColorMap.get('Trains');
expect(trainsColor).toBe('red');
expect(classicCarsColor).toBe('red');
});
test('fix: Classic Cars is reassigned when Trains locks red from the dashboard', () => {
const PALETTE = ['red', 'blue', 'green'];
const chartAScale = new CategoricalColorScale(PALETTE);
chartAScale.getColor('Trains', 101, 'testScheme');
expect(labelsColorMap.getColorMap().get('Trains')).toBe('red');
const chartBScale = new CategoricalColorScale(PALETTE);
chartBScale.getColor('Classic Cars', 102, 'testScheme');
chartBScale.getColor('Trains', 102, 'testScheme');
const classicCarsColor =
chartBScale.chartLabelsColorMap.get('Classic Cars');
const trainsColor = chartBScale.chartLabelsColorMap.get('Trains');
expect(trainsColor).toBe('red');
expect(classicCarsColor).toBeDefined();
expect(classicCarsColor).not.toBe('red');
});
test('fix: no series in Chart B share a color when palette has enough colors', () => {
const PALETTE = ['red', 'blue', 'green'];
const chartAScale = new CategoricalColorScale(PALETTE);
chartAScale.getColor('Trains', 101, 'testScheme');
const chartBScale = new CategoricalColorScale(PALETTE);
chartBScale.getColor('Classic Cars', 102, 'testScheme');
chartBScale.getColor('Trains', 102, 'testScheme');
const colors = Array.from(chartBScale.chartLabelsColorMap.values());
const uniqueColors = new Set(colors);
expect(uniqueColors.size).toBe(colors.length);
});
test('fix: increments analogous color range for dashboard collisions when UseAnalogousColors is enabled', () => {
window.featureFlags = {
[FeatureFlag.AvoidColorsCollision]: true,
[FeatureFlag.UseAnalogousColors]: true,
};
const PALETTE = ['red', 'blue', 'green'];
const chartAScale = new CategoricalColorScale(PALETTE);
chartAScale.getColor('Trains', 101, 'testScheme');
const chartBScale = new CategoricalColorScale(PALETTE);
const addSliceSpy = jest.spyOn(
chartBScale.labelsColorMapInstance,
'addSlice',
);
chartBScale.getColor('Classic Cars', 102, 'testScheme');
chartBScale.getColor('Model T', 102, 'testScheme');
chartBScale.getColor('Trains', 102, 'testScheme');
expect(chartBScale.chartLabelsColorMap.get('Trains')).toBe('red');
expect(chartBScale.chartLabelsColorMap.get('Classic Cars')).toBeDefined();
expect(chartBScale.chartLabelsColorMap.get('Classic Cars')).not.toBe(
'red',
);
expect(chartBScale.range()).toHaveLength(6);
expect(
addSliceSpy.mock.calls.some(
([label, color]) => label === 'Classic Cars' && color !== 'red',
),
).toBe(true);
});
test('fix: forced colors (user-set in dashboard JSON) are never reassigned', () => {
const PALETTE = ['red', 'blue', 'green'];
const forcedColors = { 'Classic Cars': 'red' };
const chartAScale = new CategoricalColorScale(PALETTE);
chartAScale.getColor('Trains', 101, 'testScheme');
const chartBScale = new CategoricalColorScale(PALETTE, forcedColors);
chartBScale.getColor('Classic Cars', 102, 'testScheme');
chartBScale.getColor('Trains', 102, 'testScheme');
expect(chartBScale.chartLabelsColorMap.get('Classic Cars')).toBe('red');
});
});
describe("is compatible with D3's ScaleOrdinal", () => {
test('passes type check', () => {
const scale: ScaleOrdinal<{ toString(): string }, string> =

View File

@@ -19,7 +19,6 @@
import fetchMock from 'fetch-mock';
import { SupersetClient, SupersetClientClass } from '@superset-ui/core';
import type { SupersetClientInterface } from '@superset-ui/core';
import { LOGIN_GLOB } from './fixtures/constants';
beforeAll(() => fetchMock.mockGlobal());
@@ -32,10 +31,6 @@ describe('SupersetClient', () => {
afterEach(() => SupersetClient.reset());
const clientWithGetUrl = SupersetClient as SupersetClientInterface & {
getUrl: (...args: unknown[]) => string;
};
test('exposes configure, init, get, post, postForm, delete, put, request, reset, getGuestToken, getCSRFToken, getUrl, isAuthenticated, and reAuthenticate methods', () => {
expect(typeof SupersetClient.configure).toBe('function');
expect(typeof SupersetClient.init).toBe('function');
@@ -48,7 +43,7 @@ describe('SupersetClient', () => {
expect(typeof SupersetClient.reset).toBe('function');
expect(typeof SupersetClient.getGuestToken).toBe('function');
expect(typeof SupersetClient.getCSRFToken).toBe('function');
expect(typeof clientWithGetUrl.getUrl).toBe('function');
expect(typeof SupersetClient.getUrl).toBe('function');
expect(typeof SupersetClient.isAuthenticated).toBe('function');
expect(typeof SupersetClient.reAuthenticate).toBe('function');
});
@@ -63,7 +58,7 @@ describe('SupersetClient', () => {
expect(SupersetClient.request).toThrow();
expect(SupersetClient.getGuestToken).toThrow();
expect(SupersetClient.getCSRFToken).toThrow();
expect(clientWithGetUrl.getUrl).toThrow();
expect(SupersetClient.getUrl).toThrow();
expect(SupersetClient.isAuthenticated).toThrow();
expect(SupersetClient.reAuthenticate).toThrow();
expect(SupersetClient.configure).not.toThrow();
@@ -105,7 +100,7 @@ describe('SupersetClient', () => {
const getUrlSpy = jest.spyOn(SupersetClientClass.prototype, 'getUrl');
SupersetClient.configure({ appRoot: '/app' });
expect(clientWithGetUrl.getUrl({ endpoint: '/some/path' })).toContain(
expect(SupersetClient.getUrl({ endpoint: '/some/path' })).toContain(
'/app/some/path',
);
expect(getUrlSpy).toHaveBeenCalledTimes(1);

View File

@@ -28,11 +28,10 @@ test('DEFAULT_METRICS', () => {
});
test('DatasourceType', () => {
expect(Object.keys(DatasourceType).length).toBe(6);
expect(Object.keys(DatasourceType).length).toBe(5);
expect(DatasourceType.Table).toBe('table');
expect(DatasourceType.Query).toBe('query');
expect(DatasourceType.Dataset).toBe('dataset');
expect(DatasourceType.SlTable).toBe('sl_table');
expect(DatasourceType.SavedQuery).toBe('saved_query');
expect(DatasourceType.SemanticView).toBe('semantic_view');
});

View File

@@ -71,16 +71,10 @@ describe('TimeFormatter', () => {
// PivotData.processRecord coerces values with String(), turning numeric
// timestamps into strings.
const timestamp = PREVIEW_TIME.getTime().toString();
expect(formatter.format(timestamp as unknown as number | Date)).toEqual(
'2017',
);
expect(formatter.format(timestamp)).toEqual('2017');
});
test('handles ISO-8601 string without misinterpreting it as a number', () => {
expect(
formatter.format(
'2017-02-14T11:22:33.000Z' as unknown as number | Date,
),
).toEqual('2017');
expect(formatter.format('2017-02-14T11:22:33.000Z')).toEqual('2017');
});
test('otherwise returns formatted value', () => {
expect(formatter.format(PREVIEW_TIME)).toEqual('2017');

View File

@@ -29,7 +29,7 @@
"classnames": "^2.5.1",
"d3-array": "^3.2.4",
"lodash": "^4.18.1",
"memoize-one": "^6.0.0",
"memoize-one": "^5.2.1",
"react-table": "^7.8.0",
"regenerator-runtime": "^0.14.1",
"xss": "^1.0.15"

View File

@@ -1,99 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Regression coverage for memoize-one v6 adoption.
*
* memoize-one v6 changed the signature of the (optional) custom `isEqual`
* callback from per-argument `(a, b) => bool` to arg-array
* `(newArgs, lastArgs) => bool`. Of the four memoizeOne callsites in
* `src/transformProps.ts` (`processComparisonDataRecords`,
* `processDataRecords`, `processColumns`, `getBasicColorFormatter`), only
* `processColumns` passes a custom comparator (`isEqualColumns`); its
* signature already takes arg-arrays and is compatible with v6. The other
* three rely on memoize-one's default referential-equality comparator, which
* is unchanged between v5 and v6.
*
* These tests lock those assumptions in by observing the memoization
* behavior through the public `transformProps` API: identical chart-props
* input references should produce referentially-equal `data` and `columns`
* arrays (cache hit), while inputs that differ on the sub-fields each
* memoizer actually compares should produce fresh arrays (cache miss).
*/
import transformProps from '../src/transformProps';
import testData from '../../plugin-chart-table/test/testData';
test('transformProps returns referentially-equal data/columns on identical input (cache hit)', () => {
// processColumns and processDataRecords are both wrapped by memoizeOne at
// module scope. Two consecutive calls with the same chartProps reference
// should hit both caches and yield the same output references.
const first = transformProps(testData.basic);
const second = transformProps(testData.basic);
expect(second.columns).toBe(first.columns);
expect(second.data).toBe(first.data);
});
test('transformProps busts its memoization caches when sub-field inputs change (cache miss)', () => {
const first = transformProps(testData.basic);
// `processColumns` is wrapped with a custom equality (`isEqualColumns`) that
// compares specific chartProps sub-fields by identity — mutating only the
// top-level props reference is NOT enough to bust it. Here we supply a fresh
// `datasource.columnFormats` reference, which `isEqualColumns` compares with
// `===`, forcing `processColumns` to recompute and return a new `columns`
// array.
//
// `processDataRecords` uses memoize-one's default referential equality on
// `(data, columns)`. We also hand it a fresh `queriesData[0].data` array, so
// together with the recomputed `columns` reference it too cache-misses.
const freshProps = {
...testData.basic,
datasource: {
...testData.basic.datasource,
columnFormats: {},
},
queriesData: [
{
...testData.basic.queriesData[0],
data: [...(testData.basic.queriesData[0].data || [])],
},
],
};
const second = transformProps(freshProps);
expect(second.columns).not.toBe(first.columns);
expect(second.data).not.toBe(first.data);
});
test('transformProps memoizes the comparison-mode data pipeline on identical input', () => {
// Exercises `processComparisonDataRecords` (the third of four memoizeOne
// callsites in transformProps.ts) via the `comparison` fixture, which has
// `time_compare` set and therefore flows through the comparison branch
// where `passedData = comparisonData`.
//
// Note: we don't assert reference equality on `columns` here because the
// comparison branch runs `comparisonColumns` through the non-memoized
// `processComparisonColumns` helper, which returns a fresh array on each
// call by design.
const first = transformProps(testData.comparison);
const second = transformProps(testData.comparison);
expect(second.data).toBe(first.data);
});

View File

@@ -29,7 +29,7 @@
"acorn": "^8.16.0",
"d3-array": "^3.2.4",
"lodash": "^4.18.1",
"zod": "^4.4.3"
"zod": "^4.4.1"
},
"peerDependencies": {
"@apache-superset/core": "*",

View File

@@ -20,14 +20,12 @@ import {
AnnotationStyle,
AnnotationType,
AnnotationSourceType,
AxisType,
DataRecord,
FormulaAnnotationLayer,
IntervalAnnotationLayer,
VizType,
ChartDataResponseResult,
} from '@superset-ui/core';
import { GenericDataType } from '@apache-superset/core/common';
import {
LegendOrientation,
LegendType,
@@ -498,133 +496,3 @@ test('should add a formula annotation when X-axis column has dataset-level label
expect(Array.isArray(formulaSeries?.data)).toBe(true);
expect((formulaSeries!.data as unknown[]).length).toBeGreaterThan(0);
});
test('numeric x coltype never gets silently coerced to the Time axis', () => {
// Regression guard for echarts-timeseries-epoch-x-axis-labels investigation.
// Mixed Timeseries must follow the reported coltype: Numeric values stay
// off the Time axis and are not silently reinterpreted as Date instances.
// A future change that coerces Numeric → Time would bring back the "NaN"
// label symptom we were investigating. We also assert that whichever
// formatter is picked, it produces a string and does not emit "NaN".
const ts1 = 1745784000000;
const ts2 = 1745870400000;
const epochRows = [
{ __timestamp: ts1, metric: 10 },
{ __timestamp: ts2, metric: 20 },
];
const epochQueryData = createTestQueryData(epochRows, {
colnames: ['__timestamp', 'metric'],
coltypes: [GenericDataType.Numeric, GenericDataType.Numeric],
label_map: { __timestamp: ['__timestamp'], metric: ['metric'] },
});
const chartProps = createEchartsTimeseriesTestChartProps<
EchartsMixedTimeseriesFormData,
EchartsMixedTimeseriesProps
>({
...MIXED_TIMESERIES_CHART_PROPS_DEFAULTS,
defaultQueriesData: [epochQueryData, epochQueryData],
formData: {
...formData,
x_axis: '__timestamp',
metrics: ['metric'],
metricsB: ['metric'],
groupby: [],
groupbyB: [],
},
queriesData: [epochQueryData, epochQueryData],
});
const { echartOptions } = transformProps(chartProps);
const xAxis = echartOptions.xAxis as {
type: string;
axisLabel: { formatter: (v: number) => string };
};
expect(xAxis.type).not.toBe(AxisType.Time);
const label = xAxis.axisLabel.formatter(ts1);
expect(typeof label).toBe('string');
expect(label).not.toMatch(/NaN/);
});
test('xAxisForceCategorical forces Category axis regardless of Numeric coltype', () => {
const ts1 = 1745784000000;
const ts2 = 1745870400000;
const epochRows = [
{ __timestamp: ts1, metric: 10 },
{ __timestamp: ts2, metric: 20 },
];
const epochQueryData = createTestQueryData(epochRows, {
colnames: ['__timestamp', 'metric'],
coltypes: [GenericDataType.Numeric, GenericDataType.Numeric],
label_map: { __timestamp: ['__timestamp'], metric: ['metric'] },
});
const chartProps = createEchartsTimeseriesTestChartProps<
EchartsMixedTimeseriesFormData,
EchartsMixedTimeseriesProps
>({
...MIXED_TIMESERIES_CHART_PROPS_DEFAULTS,
defaultQueriesData: [epochQueryData, epochQueryData],
formData: {
...formData,
x_axis: '__timestamp',
metrics: ['metric'],
metricsB: ['metric'],
groupby: [],
groupbyB: [],
xAxisForceCategorical: true,
},
queriesData: [epochQueryData, epochQueryData],
});
const { echartOptions } = transformProps(chartProps);
const xAxis = echartOptions.xAxis as { type: string };
expect(xAxis.type).toBe(AxisType.Category);
});
test('temporal x coltype wires the time formatter and Time axis', () => {
// Regression guard: the happy path for mixed-timeseries charts. Ensures
// Temporal coltype still routes through the TimeFormatter so the time axis
// rendering path is exercised by the test suite.
const ts1 = 1745784000000;
const ts2 = 1745870400000;
const temporalRows = [
{ __timestamp: ts1, metric: 10 },
{ __timestamp: ts2, metric: 20 },
];
const temporalQueryData = createTestQueryData(temporalRows, {
colnames: ['__timestamp', 'metric'],
coltypes: [GenericDataType.Temporal, GenericDataType.Numeric],
label_map: { __timestamp: ['__timestamp'], metric: ['metric'] },
});
const chartProps = createEchartsTimeseriesTestChartProps<
EchartsMixedTimeseriesFormData,
EchartsMixedTimeseriesProps
>({
...MIXED_TIMESERIES_CHART_PROPS_DEFAULTS,
defaultQueriesData: [temporalQueryData, temporalQueryData],
formData: {
...formData,
x_axis: '__timestamp',
metrics: ['metric'],
metricsB: ['metric'],
groupby: [],
groupbyB: [],
},
queriesData: [temporalQueryData, temporalQueryData],
});
const { echartOptions } = transformProps(chartProps);
const xAxis = echartOptions.xAxis as {
type: string;
axisLabel: { formatter: (v: Date) => string };
};
expect(xAxis.type).toBe(AxisType.Time);
const label = xAxis.axisLabel.formatter(new Date(ts1));
expect(typeof label).toBe('string');
expect(label).not.toMatch(/NaN/);
});

View File

@@ -20,7 +20,6 @@ import {
AnnotationSourceType,
AnnotationStyle,
AnnotationType,
AxisType,
ComparisonType,
DataRecord,
EventAnnotationLayer,
@@ -1473,118 +1472,6 @@ test('x-axis formatter deduplicates consecutive identical labels for coarse time
expect(label4).toBe('');
});
test('numeric x coltype routes through the number formatter (not the time formatter)', () => {
// Regression guard for echarts-timeseries-epoch-x-axis-labels investigation.
// When the query reports a Numeric x-axis coltype (including epoch-ms-like
// values), Timeseries transformProps must pick the Value axis and run the
// label through getNumberFormatter, not the time formatter. If this ever
// changes, epoch-ms values that arrive as Numeric would suddenly be treated
// as Date instances and could render "NaN" — the symptom that prompted this
// investigation.
const ts1 = 1745784000000;
const ts2 = 1745870400000;
const chartProps = createTestChartProps({
formData: {
metrics: ['metric'],
granularity_sqla: 'ds',
x_axis: '__timestamp',
},
queriesData: [
createTestQueryData(
[
{ __timestamp: ts1, metric: 10 },
{ __timestamp: ts2, metric: 20 },
],
{
colnames: ['__timestamp', 'metric'],
coltypes: [GenericDataType.Numeric, GenericDataType.Numeric],
},
),
],
});
const { echartOptions } = transformProps(chartProps);
const xAxis = echartOptions.xAxis as {
type: string;
axisLabel: { formatter: (v: number) => string };
};
expect(xAxis.type).toBe(AxisType.Value);
const label = xAxis.axisLabel.formatter(ts1);
expect(typeof label).toBe('string');
expect(label).not.toMatch(/NaN/);
});
test('xAxisForceCategorical forces Category axis regardless of Numeric coltype', () => {
const ts1 = 1745784000000;
const ts2 = 1745870400000;
const chartProps = createTestChartProps({
formData: {
metrics: ['metric'],
granularity_sqla: 'ds',
x_axis: '__timestamp',
xAxisForceCategorical: true,
},
queriesData: [
createTestQueryData(
[
{ __timestamp: ts1, metric: 10 },
{ __timestamp: ts2, metric: 20 },
],
{
colnames: ['__timestamp', 'metric'],
coltypes: [GenericDataType.Numeric, GenericDataType.Numeric],
},
),
],
});
const { echartOptions } = transformProps(chartProps);
const xAxis = echartOptions.xAxis as { type: string };
expect(xAxis.type).toBe(AxisType.Category);
});
test('temporal x coltype wires the time formatter and Time axis', () => {
// Regression guard: the happy path for time-series charts. Ensures that
// Temporal coltype keeps routing through the TimeFormatter so a refactor
// does not accidentally drop Date handling (the feared regression that
// sparked this investigation).
const ts1 = 1745784000000;
const ts2 = 1745870400000;
const chartProps = createTestChartProps({
formData: {
metrics: ['metric'],
granularity_sqla: 'ds',
x_axis: '__timestamp',
},
queriesData: [
createTestQueryData(
[
{ __timestamp: ts1, metric: 10 },
{ __timestamp: ts2, metric: 20 },
],
{
colnames: ['__timestamp', 'metric'],
coltypes: [GenericDataType.Temporal, GenericDataType.Numeric],
},
),
],
});
const { echartOptions } = transformProps(chartProps);
const xAxis = echartOptions.xAxis as {
type: string;
axisLabel: { formatter: (v: Date) => string };
};
expect(xAxis.type).toBe(AxisType.Time);
const label = xAxis.axisLabel.formatter(new Date(ts1));
expect(typeof label).toBe('string');
expect(label).not.toMatch(/NaN/);
expect(label).not.toBe(String(ts1));
});
test('should assign distinct dash patterns for multiple time offsets consistently', () => {
const queriesDataWithMultipleOffsets = [
createTestQueryData([

View File

@@ -19,13 +19,11 @@
import {
NumberFormats,
SMART_DATE_ID,
SMART_DATE_VERBOSE_ID,
TimeFormatter,
TimeGranularity,
} from '@superset-ui/core';
import {
getPercentFormatter,
getTooltipTimeFormatter,
getXAxisFormatter,
} from '../../src/utils/formatters';
@@ -181,53 +179,3 @@ test('getXAxisFormatter without time grain should use standard smart date behavi
expect(standardResult).toBe(timeGrainResult);
});
// Regression tests for echarts-timeseries-epoch-x-axis-labels investigation.
// The bug report was that temporal x-axis labels could render as "NaN"
// in some edge cases that we could not reproduce locally. The tests below
// lock in the current behavior of the formatters so that a future refactor
// surfaces any change in contract.
test('getTooltipTimeFormatter returns a TimeFormatter with SMART_DATE_VERBOSE id for SMART_DATE_ID', () => {
const formatter = getTooltipTimeFormatter(SMART_DATE_ID);
expect(formatter).toBeInstanceOf(TimeFormatter);
expect((formatter as TimeFormatter).id).toBe(SMART_DATE_VERBOSE_ID);
});
test('getTooltipTimeFormatter returns a TimeFormatter for a custom format string', () => {
const customFormat = '%Y-%m-%d %H:%M';
const formatter = getTooltipTimeFormatter(customFormat);
expect(formatter).toBeInstanceOf(TimeFormatter);
expect((formatter as TimeFormatter).id).toBe(customFormat);
});
test('getTooltipTimeFormatter falls back to the String constructor when no format is supplied', () => {
expect(getTooltipTimeFormatter()).toBe(String);
expect(getTooltipTimeFormatter(undefined)).toBe(String);
});
test('getXAxisFormatter produces stable SMART_DATE output for a valid Date', () => {
// Documents the current happy-path output format so unexpected changes are
// caught during review.
const formatter = getXAxisFormatter(SMART_DATE_ID) as TimeFormatter;
const result = formatter.format(new Date('2025-01-15T00:00:00.000Z'));
expect(typeof result).toBe('string');
expect(result).not.toMatch(/NaN/);
expect(result.length).toBeGreaterThan(0);
});
test('getXAxisFormatter returns a string for an Invalid Date without throwing', () => {
// If a caller ever passes an Invalid Date (the originally-suspected cause
// of epoch-ms axis labels showing NaN in echarts), the formatter must
// still return a string instead of throwing, so echarts does not blow up
// the chart render. The *content* of that string is format-dependent and
// intentionally not asserted here — only that it is a string.
const formatter = getXAxisFormatter(SMART_DATE_ID) as TimeFormatter;
const invalid = new Date(Number.NaN);
expect(() => formatter.format(invalid)).not.toThrow();
expect(typeof formatter.format(invalid)).toBe('string');
const customFormatter = getXAxisFormatter('%Y-%m-%d') as TimeFormatter;
expect(() => customFormatter.format(invalid)).not.toThrow();
expect(typeof customFormatter.format(invalid)).toBe('string');
});

View File

@@ -1402,7 +1402,7 @@ test('getAxisType with forced categorical', () => {
test('getAxisType treats numeric as category for bar charts', () => {
expect(
(getAxisType as (...args: unknown[]) => AxisType)(
getAxisType(
false,
false,
GenericDataType.Numeric,
@@ -1410,7 +1410,7 @@ test('getAxisType treats numeric as category for bar charts', () => {
),
).toEqual(AxisType.Category);
expect(
(getAxisType as (...args: unknown[]) => AxisType)(
getAxisType(
false,
false,
GenericDataType.Numeric,
@@ -1419,22 +1419,6 @@ test('getAxisType treats numeric as category for bar charts', () => {
).toEqual(AxisType.Value);
});
test('getAxisType does not coerce Numeric x-axis to Time regardless of values', () => {
// Regression guard for echarts-timeseries-epoch-x-axis-labels investigation:
// getAxisType only considers the coltype reported by the query, never the
// actual values. Numeric coltype must stay on a Value axis so a future
// change that introduces implicit temporal coercion is surfaced here.
expect(getAxisType(false, false, GenericDataType.Numeric)).toEqual(
AxisType.Value,
);
expect(getAxisType(false, false, GenericDataType.Temporal)).toEqual(
AxisType.Time,
);
expect(getAxisType(false, false, GenericDataType.String)).toEqual(
AxisType.Category,
);
});
test('getMinAndMaxFromBounds returns empty object when not truncating', () => {
expect(
getMinAndMaxFromBounds(

View File

@@ -29,7 +29,7 @@
"@deck.gl/extensions": "~9.2.9",
"@deck.gl/geo-layers": "~9.2.5",
"@deck.gl/layers": "~9.2.5",
"@deck.gl/mapbox": "~9.3.2",
"@deck.gl/mapbox": "~9.3.1",
"@deck.gl/mesh-layers": "~9.2.5",
"@luma.gl/constants": "~9.2.5",
"@luma.gl/core": "~9.2.5",

View File

@@ -359,9 +359,7 @@ class Chart extends PureComponent<ChartProps, {}> {
width,
} = this.props;
const databaseName =
datasource?.parent?.name ??
(datasource?.database?.name as string | undefined);
const databaseName = datasource?.database?.name as string | undefined;
const isLoading = chartStatus === 'loading';
// Suppress spinner during auto-refresh to avoid visual flicker

View File

@@ -58,7 +58,6 @@ import { Dataset } from '../types';
import TableControls from './DrillDetailTableControls';
import { getDrillPayload } from './utils';
import { ResultsPage } from './types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const PAGE_SIZE = 50;
@@ -374,7 +373,7 @@ export default function DrillDetailPane({
tableContent = <Loading />;
} else if (resultsPage?.total === 0) {
// Render empty state if no results are returned for page
const title = t('No rows were returned for this %s', datasetLabelLower());
const title = t('No rows were returned for this dataset');
tableContent = <EmptyState image="document.svg" title={title} />;
} else {
// Render table if at least one page has successfully loaded

View File

@@ -52,10 +52,6 @@ import type {
DatabaseObject,
} from './types';
import { StyledFormLabel } from './styles';
import {
databaseLabel,
databasesLabelLower,
} from 'src/features/semanticLayers/label';
const DatabaseSelectorWrapper = styled.div<{ horizontal?: boolean }>`
${({ theme, horizontal }) =>
@@ -437,11 +433,7 @@ export function DatabaseSelector({
function renderDatabaseSelect() {
if (sqlLabMode) {
return renderSelectRow(
t(
'Select %s or type to search %s',
databaseLabel().toLowerCase(),
databasesLabelLower(),
),
t('Select database or type to search databases'),
null,
null,
{
@@ -458,24 +450,16 @@ export function DatabaseSelector({
return (
<div>
{renderSelectRow(
databaseLabel(),
t('Database'),
<AsyncSelect
ariaLabel={t(
'Select %s or type to search %s',
databaseLabel().toLowerCase(),
databasesLabelLower(),
)}
ariaLabel={t('Select database or type to search databases')}
optionFilterProps={['database_name', 'value']}
data-test="select-database"
lazyLoading={false}
notFoundContent={emptyState}
onChange={changeDatabase}
value={currentDb}
placeholder={t(
'Select %s or type to search %s',
databaseLabel().toLowerCase(),
databasesLabelLower(),
)}
placeholder={t('Select database or type to search databases')}
disabled={!isDatabaseSelectEnabled || readOnly}
options={loadDatabases}
sortComparator={sortComparator}

View File

@@ -27,7 +27,6 @@ const mockStore = configureStore([thunk]);
const store = mockStore({});
const mockedProps = {
addSuccessToast: jest.fn(),
addDangerToast: () => {},
onDatasourceSave: jest.fn(),
onChange: () => {},
@@ -92,36 +91,3 @@ test('changes the datasource', async () => {
expect(fetchMock.callHistory.calls(/api\/v1\/dataset\/7/)).toHaveLength(1),
);
});
test('does not show success toast or close modal when datasource request fails', async () => {
const props = {
...mockedProps,
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
onHide: jest.fn(),
};
(fetchMock.removeRoutes as any)(DATASOURCE_ENDPOINT);
(fetchMock.removeRoutes as any)(DATASOURCES_ENDPOINT);
(fetchMock.removeRoutes as any)(INFO_ENDPOINT);
fetchMock.get(DATASOURCES_ENDPOINT, { result: [mockDatasource['7__table']] });
fetchMock.get(INFO_ENDPOINT, {});
fetchMock.get(DATASOURCE_ENDPOINT, 500);
const { findByTestId, getByRole } = setup(props);
const confirmLink = await findByTestId('datasource-link');
fireEvent.click(confirmLink);
fireEvent.click(getByRole('button', { name: 'Proceed' }));
await waitFor(() => {
expect(fetchMock.callHistory.calls(/api\/v1\/dataset\/7/)).toHaveLength(1);
});
expect(props.addSuccessToast).not.toHaveBeenCalled();
expect(props.onHide).not.toHaveBeenCalled();
(fetchMock.removeRoutes as any)(DATASOURCE_ENDPOINT);
(fetchMock.removeRoutes as any)(DATASOURCES_ENDPOINT);
(fetchMock.removeRoutes as any)(INFO_ENDPOINT);
fetchMock.get(DATASOURCES_ENDPOINT, { result: [mockDatasource['7__table']] });
fetchMock.get(INFO_ENDPOINT, {});
fetchMock.get(DATASOURCE_ENDPOINT, DATASOURCE_PAYLOAD);
});

View File

@@ -53,7 +53,6 @@ import {
import withToasts from 'src/components/MessageToasts/withToasts';
import { InputRef } from 'antd';
import type { Datasource, ChangeDatasourceModalProps } from '../types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const CONFIRM_WARNING_MESSAGE = t(
'Warning! Changing the dataset may break the chart if the metadata does not exist.',
@@ -110,11 +109,7 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
const {
state: { loading, resourceCollection, resourceCount },
fetchData,
} = useListViewResource<Dataset>(
'dataset',
datasetLabelLower(),
addDangerToast,
);
} = useListViewResource<Dataset>('dataset', t('dataset'), addDangerToast);
const selectDatasource = useCallback((datasource: Datasource) => {
setConfirmChange(true);
@@ -171,27 +166,28 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
setPageIndex(0);
};
const handleChangeConfirm = async () => {
try {
const { json } = await SupersetClient.get({
endpoint: `/api/v1/dataset/${confirmedDataset?.id}`,
const handleChangeConfirm = () => {
SupersetClient.get({
endpoint: `/api/v1/dataset/${confirmedDataset?.id}`,
})
.then(({ json }) => {
// eslint-disable-next-line no-param-reassign
json.result.type = 'table';
onDatasourceSave(json.result);
onChange(`${confirmedDataset?.id}__table`);
})
.catch(response => {
getClientErrorObject(response).then(
({ error, message }: { error: any; message: string }) => {
const errorMessage = error
? error.error || error.statusText || error
: message;
addDangerToast(errorMessage);
},
);
});
// eslint-disable-next-line no-param-reassign
json.result.type = 'table';
onDatasourceSave(json.result);
onChange(`${confirmedDataset?.id}__table`);
onHide();
addSuccessToast(t('Successfully changed %s!', datasetLabelLower()));
} catch (response) {
getClientErrorObject(response).then(
({ error, message }: { error: any; message: string }) => {
const errorMessage = error
? error.error || error.statusText || error
: message;
addDangerToast(errorMessage);
},
);
}
onHide();
addSuccessToast(t('Successfully changed dataset!'));
};
const handlerCancelConfirm = () => {
@@ -257,7 +253,7 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
onHide={onHide}
responsive
name="Swap dataset"
title={t('Swap %s', datasetLabelLower())}
title={t('Swap dataset')}
width={confirmChange ? '432px' : ''}
height={confirmChange ? 'auto' : '540px'}
hideFooter={!confirmChange}

View File

@@ -20,7 +20,6 @@ import { t } from '@apache-superset/core/translation';
import type { ErrorMessageComponentProps } from './types';
import { ErrorAlert } from './ErrorAlert';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export function DatasetNotFoundErrorMessage({
error,
@@ -30,7 +29,7 @@ export function DatasetNotFoundErrorMessage({
const { level, message } = error;
return (
<ErrorAlert
errorType={t('Missing %s', datasetLabelLower())}
errorType={t('Missing dataset')}
message={subtitle}
description={message}
type={level}

View File

@@ -60,12 +60,6 @@ function UIFilters(
filter.current?.clearFilter?.();
});
},
clearFilterById: (id: string) => {
const index = filters.findIndex(f => f.id === id);
if (index >= 0) {
filterRefs[index]?.current?.clearFilter?.();
}
},
}));
return (

View File

@@ -19,14 +19,7 @@
import { t } from '@apache-superset/core/translation';
import { Alert } from '@apache-superset/core/components';
import { styled } from '@apache-superset/core/theme';
import {
useCallback,
useEffect,
useLayoutEffect,
useRef,
useState,
ReactNode,
} from 'react';
import { useCallback, useEffect, useRef, useState, ReactNode } from 'react';
import cx from 'classnames';
import TableCollection from '@superset-ui/core/components/TableCollection';
import BulkTagModal from 'src/features/tags/BulkTagModal';
@@ -272,11 +265,6 @@ export interface ListViewProps<T extends object = any> {
columnsForWrapText?: string[];
enableBulkTag?: boolean;
bulkTagResourceName?: string;
/** Optional ref exposed to callers for programmatic filter control. */
filtersRef?: React.RefObject<{
clearFilters: () => void;
clearFilterById: (id: string) => void;
}>;
}
export function ListView<T extends object = any>({
@@ -303,7 +291,6 @@ export function ListView<T extends object = any>({
columnsForWrapText,
enableBulkTag = false,
bulkTagResourceName,
filtersRef,
addSuccessToast,
addDangerToast,
}: ListViewProps<T>) {
@@ -351,21 +338,7 @@ export function ListView<T extends object = any>({
});
}
const filterControlsRef = useRef<{
clearFilters: () => void;
clearFilterById: (id: string) => void;
}>(null);
// Wire the optional external filtersRef to our internal filterControlsRef.
// useLayoutEffect fires synchronously after DOM mutations, guaranteeing the
// ref is populated before the first paint and after every update.
useLayoutEffect(() => {
if (filtersRef) {
(
filtersRef as React.MutableRefObject<typeof filterControlsRef.current>
).current = filterControlsRef.current;
}
});
const filterControlsRef = useRef<{ clearFilters: () => void }>(null);
const handleClearFilterControls = useCallback(() => {
if (query.filters) {

View File

@@ -36,7 +36,6 @@ import { Tooltip, ImageLoader } from '@superset-ui/core/components';
import { GenericLink, usePluginContext } from 'src/components';
import { assetUrl } from 'src/utils/assetUrl';
import { Theme } from '@emotion/react';
import { datasetLabel } from 'src/features/semanticLayers/label';
const FALLBACK_THUMBNAIL_URL = assetUrl(
'/static/assets/images/chart-card-fallback.svg',
@@ -284,7 +283,7 @@ const AddSliceCard: FC<{
>
<MetadataItem label={t('Viz type')} value={vizName} />
<MetadataItem
label={datasetLabel()}
label={t('Dataset')}
value={
datasourceUrl ? (
<GenericLink to={datasourceUrl}>

View File

@@ -55,7 +55,6 @@ import type { ConnectDragSource } from 'react-dnd';
import AddSliceCard from './AddSliceCard';
import AddSliceDragPreview from './dnd/AddSliceDragPreview';
import { DragDroppable } from './dnd/DragDroppable';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export type SliceAdderProps = {
theme: Theme;
@@ -89,7 +88,7 @@ const KEYS_TO_FILTERS = ['slice_name', 'viz_type', 'datasource_name'];
const KEYS_TO_SORT = {
slice_name: t('name'),
viz_type: t('viz type'),
datasource_name: datasetLabelLower(),
datasource_name: t('dataset'),
changed_on: t('recent'),
};

View File

@@ -51,10 +51,6 @@ import { addDangerToast } from 'src/components/MessageToasts/actions';
import { cachedSupersetGet } from 'src/utils/cachedSupersetGet';
import { dispatchChartCustomizationHoverAction } from './utils';
import { mergeExtraFormData } from '../../utils';
import {
datasetLabel as getDatasetLabel,
datasetLabelLower,
} from 'src/features/semanticLayers/label';
interface ColumnApiResponse {
column_name?: string;
@@ -266,9 +262,9 @@ const GroupByFilterCardContent: FC<{
</Row>
<Row>
<RowLabel>{getDatasetLabel()}</RowLabel>
<RowLabel>{t('Dataset')}</RowLabel>
<RowValue>
{typeof datasetLabel === 'string' ? datasetLabel : t('Dataset')}
{typeof datasetLabel === 'string' ? datasetLabel : 'Dataset'}
</RowValue>
</Row>
@@ -479,13 +475,7 @@ const GroupByFilterCard: FC<GroupByFilterCardProps> = ({
} catch (error) {
setColumnOptions([]);
dispatch(
addDangerToast(
t(
'Failed to load columns for %s %s',
datasetLabelLower(),
datasetId,
),
),
addDangerToast(t('Failed to load columns for dataset %s', datasetId)),
);
} finally {
setLoading(false);

View File

@@ -30,11 +30,6 @@ import {
Dataset,
DatasetSelectLabel,
} from 'src/features/datasets/DatasetSelectLabel';
import {
datasetLabel,
datasetLabelLower,
datasetsLabelLower,
} from 'src/features/semanticLayers/label';
interface DatasetSelectProps {
onChange: (value: { label: string | ReactNode; value: number }) => void;
@@ -106,13 +101,13 @@ const DatasetSelect = ({
return (
<AsyncSelect
ariaLabel={datasetLabel()}
ariaLabel={t('Dataset')}
value={value}
options={loadDatasetOptionsCallback}
onChange={onChange}
optionFilterProps={['table_name']}
notFoundContent={t('No compatible %s found', datasetsLabelLower())}
placeholder={t('Select a %s', datasetLabelLower())}
notFoundContent={t('No compatible datasets found')}
placeholder={t('Select a dataset')}
/>
);
};

View File

@@ -120,7 +120,6 @@ import {
INPUT_WIDTH,
} from './constants';
import DependencyList from './DependencyList';
import { datasetLabel } from 'src/features/semanticLayers/label';
const FORM_ITEM_WIDTH = 260;
@@ -326,12 +325,6 @@ const FiltersConfigForm = (
const filters = form.getFieldValue('filters');
const formValues = filters?.[filterId];
const formFilter = formValues || undoFormValues || defaultFormFilter;
const formFilterWithTimeGrains = formFilter as typeof formFilter & {
time_grains?: string[];
};
const filterToEditWithTimeGrains = filterToEdit as
| (Filter & { time_grains?: string[] })
| undefined;
const handleModifyFilter = useCallback(() => {
if (onModifyFilter) {
@@ -594,8 +587,7 @@ const FiltersConfigForm = (
!!filterToEdit?.time_range;
const hasTimeGrainPreFilter = !!(
formFilterWithTimeGrains?.time_grains?.length ||
filterToEditWithTimeGrains?.time_grains?.length
formFilter?.time_grains?.length || filterToEdit?.time_grains?.length
);
const hasEnableSingleValue =
@@ -1060,7 +1052,7 @@ const FiltersConfigForm = (
<StyledFormItem
expanded={expanded}
name={['filters', filterId, 'dataset']}
label={<StyledLabel>{datasetLabel()}</StyledLabel>}
label={<StyledLabel>{t('Dataset')}</StyledLabel>}
initialValue={
datasetDetails
? {
@@ -1080,10 +1072,7 @@ const FiltersConfigForm = (
rules={[
{
required: !isRemoved,
message:
datasetLabel() === t('Datasource')
? t('Datasource is required')
: t('Dataset is required'),
message: t('Dataset is required'),
},
]}
{...getFiltersConfigModalTestId('datasource-input')}
@@ -1109,7 +1098,7 @@ const FiltersConfigForm = (
) : (
<StyledFormItem
expanded={expanded}
label={<StyledLabel>{datasetLabel()}</StyledLabel>}
label={<StyledLabel>{t('Dataset')}</StyledLabel>}
>
<Loading position="inline-centered" />
</StyledFormItem>
@@ -1333,7 +1322,7 @@ const FiltersConfigForm = (
'time_grains',
]}
initialValue={
filterToEditWithTimeGrains?.time_grains
filterToEdit?.time_grains
}
{...getFiltersConfigModalTestId(
'time-grain-allowlist',

View File

@@ -113,7 +113,7 @@ function transformFormInput(
excluded: [],
};
const result: Filter & { time_grains?: string[] } = {
return {
id,
type: NativeFilterType.NativeFilter,
name: formInputs.name,
@@ -127,17 +127,14 @@ function transformFormInput(
adhoc_filters: formInputs.adhoc_filters,
time_range: formInputs.time_range,
granularity_sqla: formInputs.granularity_sqla,
time_grains: formInputs.time_grains?.length
? formInputs.time_grains
: undefined,
sortMetric: formInputs.sortMetric ?? null,
requiredFirst: formInputs.requiredFirst
? Object.values(formInputs.requiredFirst).find(rf => rf)
: undefined,
};
if (formInputs.time_grains?.length) {
result.time_grains = formInputs.time_grains;
}
return result;
}
function transformSavedFilter(id: string, filter: Filter): Filter {

View File

@@ -19,7 +19,7 @@
import 'src/public-path';
import { lazy, Suspense } from 'react';
import { createRoot, type Root } from 'react-dom/client';
import { createRoot } from 'react-dom/client';
import { BrowserRouter as Router, Route } from 'react-router-dom';
import { Global } from '@emotion/react';
import { t } from '@apache-superset/core/translation';
@@ -150,8 +150,6 @@ if (!window.parent || window.parent === window) {
// }
let displayedUnauthorizedToast = false;
let root: Root | null = null;
let started = false;
/**
* If there is a problem with the guest token, we will start getting
@@ -177,8 +175,6 @@ function guestUnauthorizedHandler() {
}
function start() {
if (started) return undefined;
started = true;
const getMeWithRole = makeApi<void, { result: UserWithPermissionsAndRoles }>({
method: 'GET',
endpoint: '/api/v1/me/roles/',
@@ -193,21 +189,16 @@ function start() {
type: USER_LOADED,
user: result,
});
if (!root) {
root = createRoot(appMountPoint);
}
root.render(<EmbeddedApp />);
createRoot(appMountPoint).render(<EmbeddedApp />);
},
err => {
// something is most likely wrong with the guest token; reset the guard
// so a rehandshake with a valid token can retry.
// something is most likely wrong with the guest token
logging.error(err);
showFailureMessage(
t(
'Something went wrong with embedded authentication. Check the dev console for details.',
),
);
started = false;
},
);
}
@@ -252,11 +243,16 @@ window.addEventListener('message', function embeddedPageInitializer(event) {
debug: debugMode,
});
let started = false;
Switchboard.defineMethod(
'guestToken',
({ guestToken }: { guestToken: string }) => {
setupGuestClient(guestToken);
start();
if (!started) {
start();
started = true;
}
},
);
@@ -326,7 +322,7 @@ window.addEventListener('message', function embeddedPageInitializer(event) {
}
});
// Clean up theme controller and unmount React root on page unload
// Clean up theme controller on page unload
window.addEventListener('beforeunload', () => {
try {
const controller = getThemeController();
@@ -337,10 +333,6 @@ window.addEventListener('beforeunload', () => {
} catch (error) {
logging.warn('Failed to destroy theme controller:', error);
}
if (root) {
root.unmount();
root = null;
}
});
log('embed page is ready to receive messages');

View File

@@ -17,7 +17,6 @@
* under the License.
*/
import type { AnyAction } from 'redux';
import { SupersetClient } from '@superset-ui/core';
import { defaultState } from 'src/explore/store';
import exploreReducer, {
ExploreState,
@@ -241,107 +240,3 @@ describe('reducers', () => {
);
});
});
test('fetchCompatibility ignores stale async responses', async () => {
const dispatch = jest.fn();
let resolveFirst: (value: {
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}) => void;
let resolveSecond: (value: {
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}) => void;
const firstPromise = new Promise<{
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}>(resolve => {
resolveFirst = resolve;
});
const secondPromise = new Promise<{
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}>(resolve => {
resolveSecond = resolve;
});
const postSpy = jest.spyOn(SupersetClient, 'post');
postSpy
.mockImplementationOnce(() => firstPromise as never)
.mockImplementationOnce(() => secondPromise as never);
const firstThunk = actions.fetchCompatibility(
'semantic_view',
7,
['m1'],
['d1'],
)(dispatch as any);
const secondThunk = actions.fetchCompatibility(
'semantic_view',
7,
['m2'],
['d2'],
)(dispatch as any);
resolveSecond!({
json: {
result: {
compatible_metrics: ['m2'],
compatible_dimensions: ['d2'],
},
},
});
await secondThunk;
resolveFirst!({
json: {
result: {
compatible_metrics: ['m1'],
compatible_dimensions: ['d1'],
},
},
});
await firstThunk;
const compatibilityActions = dispatch.mock.calls
.map(call => call[0])
.filter((action: AnyAction) => action.type === actions.SET_COMPATIBILITY);
const successfulActions = compatibilityActions.filter(
(action: AnyAction) => action.compatibilityLoading === false,
);
expect(successfulActions).toContainEqual(
expect.objectContaining({
compatibleMetrics: ['m2'],
compatibleDimensions: ['d2'],
compatibilityLoading: false,
}),
);
expect(successfulActions).not.toContainEqual(
expect.objectContaining({
compatibleMetrics: ['m1'],
compatibleDimensions: ['d1'],
compatibilityLoading: false,
}),
);
postSpy.mockRestore();
});

View File

@@ -166,90 +166,6 @@ export function updateExploreChartState(
};
}
export const SET_COMPATIBILITY = 'SET_COMPATIBILITY';
export function setCompatibility(payload: {
compatibleMetrics: string[] | null;
compatibleDimensions: string[] | null;
compatibilityLoading: boolean;
}) {
return { type: SET_COMPATIBILITY, ...payload };
}
let compatibilityRequestSeq = 0;
/**
* Fetch compatible metrics and dimensions for the current selection.
*
* Only fires for semantic views — SQL datasets always have full compatibility
* so we short-circuit to `null` (no filtering) for everything else.
*
* Covers both real-time selection changes (M3) and saved-chart loading (M4):
* call this thunk on mount as well as whenever the metric / dimension
* selection changes in Explore.
*/
export function fetchCompatibility(
datasourceType: string,
datasourceId: number,
selectedMetrics: string[],
selectedDimensions: string[],
) {
return async (dispatch: Dispatch) => {
compatibilityRequestSeq += 1;
const requestSeq = compatibilityRequestSeq;
if (datasourceType !== 'semantic_view') {
dispatch(
setCompatibility({
compatibleMetrics: null,
compatibleDimensions: null,
compatibilityLoading: false,
}),
);
return;
}
dispatch(
setCompatibility({
compatibleMetrics: null,
compatibleDimensions: null,
compatibilityLoading: true,
}),
);
try {
const { json } = await SupersetClient.post({
endpoint: `/api/v1/datasource/${datasourceType}/${datasourceId}/compatible`,
jsonPayload: {
selected_metrics: selectedMetrics,
selected_dimensions: selectedDimensions,
},
});
if (requestSeq !== compatibilityRequestSeq) {
return;
}
dispatch(
setCompatibility({
compatibleMetrics: json.result.compatible_metrics,
compatibleDimensions: json.result.compatible_dimensions,
compatibilityLoading: false,
}),
);
} catch {
// On error fall back to no filtering so the user is never blocked.
if (requestSeq !== compatibilityRequestSeq) {
return;
}
dispatch(
setCompatibility({
compatibleMetrics: null,
compatibleDimensions: null,
compatibilityLoading: false,
}),
);
}
};
}
export const SET_STASH_FORM_DATA = 'SET_STASH_FORM_DATA';
export function setStashFormData(
isHidden: boolean,
@@ -292,7 +208,6 @@ export const exploreActions = {
sliceUpdated,
setForceQuery,
syncDatasourceMetadata,
fetchCompatibility,
};
export type ExploreActions = typeof exploreActions;

View File

@@ -24,7 +24,7 @@ import {
ExplorePageState,
} from 'src/explore/types';
import { getChartKey } from 'src/explore/exploreUtils';
import { getControlsState, handleDeprecatedControls } from 'src/explore/store';
import { getControlsState } from 'src/explore/store';
import { Dispatch } from 'redux';
import {
Currency,
@@ -116,12 +116,6 @@ export const hydrateExplore =
]),
);
// Normalize deprecated controls (e.g., migrate old per-axis matrixify
// flags to matrixify_enable) before form_data is stored in Redux state.
// getControlsState also calls this on its own copy, but state.form_data
// must reflect the same migration so the two stay consistent.
handleDeprecatedControls(initialFormData);
const initialExploreState = {
form_data: initialFormData,
slice: initialSlice,

View File

@@ -151,8 +151,11 @@ export const getSlicePayload = async (
const [id, typeString] = formData.datasource.split('__');
datasourceId = parseInt(id, 10);
if (Object.values(DatasourceType).includes(typeString as DatasourceType)) {
datasourceType = typeString as DatasourceType;
const formattedTypeString =
typeString.charAt(0).toUpperCase() + typeString.slice(1);
if (formattedTypeString in DatasourceType) {
datasourceType =
DatasourceType[formattedTypeString as keyof typeof DatasourceType];
}
}

View File

@@ -19,7 +19,6 @@
import { useState, useEffect, useMemo, useCallback } from 'react';
import { t } from '@apache-superset/core/translation';
import { ensureIsArray } from '@superset-ui/core';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
import { styled } from '@apache-superset/core/theme';
import { EmptyState, Loading } from '@superset-ui/core/components';
import { GenericDataType } from '@apache-superset/core/common';
@@ -161,10 +160,7 @@ export const SamplesPane = ({
}
if (data.length === 0) {
const title = t(
'No samples were returned for this %s',
datasetLabelLower(),
);
const title = t('No samples were returned for this dataset');
return <EmptyState image="document.svg" title={title} />;
}

View File

@@ -26,7 +26,7 @@ test('should render', async () => {
value={{ metric_name: 'test', uuid: '1' }}
type={DndItemType.Metric}
/>,
{ useDnd: true, useRedux: true, initialState: { explore: {} } },
{ useDnd: true },
);
expect(
@@ -41,7 +41,7 @@ test('should have attribute draggable:true', async () => {
value={{ metric_name: 'test', uuid: '1' }}
type={DndItemType.Metric}
/>,
{ useDnd: true, useRedux: true, initialState: { explore: {} } },
{ useDnd: true },
);
expect(

View File

@@ -16,9 +16,8 @@
* specific language governing permissions and limitations
* under the License.
*/
import { RefObject, useMemo } from 'react';
import { RefObject } from 'react';
import { useDrag } from 'react-dnd';
import { useSelector } from 'react-redux';
import { Metric } from '@superset-ui/core';
import { css, styled, useTheme } from '@apache-superset/core/theme';
import { ColumnMeta } from '@superset-ui/chart-controls';
@@ -28,7 +27,6 @@ import {
StyledMetricOption,
} from 'src/explore/components/optionRenderers';
import { Icons } from '@superset-ui/core/components/Icons';
import { ExplorePageState } from 'src/explore/types';
import { DatasourcePanelDndItem } from '../types';
@@ -72,38 +70,11 @@ export default function DatasourcePanelDragOption(
) {
const { labelRef, showTooltip, type, value } = props;
const theme = useTheme();
// Read compatibility lists from Redux.
// `null` means no filtering is active (SQL datasets, or no selection yet).
const compatibleMetrics = useSelector<
ExplorePageState,
string[] | null | undefined
>(state => state.explore.compatibleMetrics);
const compatibleDimensions = useSelector<
ExplorePageState,
string[] | null | undefined
>(state => state.explore.compatibleDimensions);
// An item is compatible when the list is null (no filter) or when its
// name explicitly appears in the list returned by the backend.
const isCompatible = useMemo(() => {
if (type === DndItemType.Metric) {
if (!compatibleMetrics) return true;
return compatibleMetrics.includes((value as Metric).metric_name);
}
if (type === DndItemType.Column) {
if (!compatibleDimensions) return true;
return compatibleDimensions.includes((value as ColumnMeta).column_name);
}
return true;
}, [type, value, compatibleMetrics, compatibleDimensions]);
const [{ isDragging }, drag] = useDrag({
item: {
value: props.value,
type: props.type,
},
canDrag: isCompatible,
collect: monitor => ({
isDragging: monitor.isDragging(),
}),
@@ -116,14 +87,7 @@ export default function DatasourcePanelDragOption(
};
return (
<DatasourceItemContainer
data-test="DatasourcePanelDragOption"
ref={drag}
style={{
opacity: isCompatible ? 1 : 0.35,
cursor: isCompatible ? 'grab' : 'not-allowed',
}}
>
<DatasourceItemContainer data-test="DatasourcePanelDragOption" ref={drag}>
{type === DndItemType.Column ? (
<StyledColumnOption column={value as ColumnMeta} {...optionProps} />
) : (

View File

@@ -89,7 +89,7 @@ const setup = (data: DatasourcePanelItemProps['data'] = mockData) =>
<DatasourcePanelItem index={index} data={data} style={{}} />
))}
</>,
{ useDnd: true, useRedux: true, initialState: { explore: {} } },
{ useDnd: true },
);
test('renders each item accordingly', () => {

View File

@@ -122,7 +122,7 @@ const sortColumns = (slice: DatasourcePanelColumn[]) =>
if (col2?.is_dttm && !col1?.is_dttm) {
return 1;
}
return (col1?.column_name ?? '').localeCompare(col2?.column_name ?? '');
return 0;
})
.sort((a, b) => (b?.is_certified ?? 0) - (a?.is_certified ?? 0));
@@ -191,9 +191,7 @@ export default function DataSourcePanel({
const filteredMetrics = useMemo(() => {
if (!searchKeyword) {
return [...(allowedMetrics ?? [])].sort((a, b) =>
(a?.metric_name ?? '').localeCompare(b?.metric_name ?? ''),
);
return allowedMetrics ?? [];
}
return matchSorter(allowedMetrics, searchKeyword, {
keys: [

View File

@@ -36,7 +36,6 @@ import {
JsonObject,
MatrixifyFormData,
DatasourceType,
ensureIsArray,
} from '@superset-ui/core';
import {
ControlStateMapping,
@@ -413,48 +412,6 @@ function ExploreViewContainer(props: ExploreViewContainerProps) {
[originalTitle, theme?.brandAppName, theme?.brandLogoAlt],
);
// M3 + M4: fire compatibility check on mount and whenever the metric /
// dimension selection changes. Only semantic views use the endpoint;
// SQL datasets short-circuit to null inside fetchCompatibility.
const selectedMetrics = useMemo(
() =>
ensureIsArray(props.form_data.metrics).filter(
(m): m is string => typeof m === 'string',
),
// eslint-disable-next-line react-hooks/exhaustive-deps
[JSON.stringify(props.form_data.metrics)],
);
const selectedDimensions = useMemo(
() =>
[
...ensureIsArray(props.form_data.groupby),
...ensureIsArray(props.form_data.columns),
...(typeof props.form_data.x_axis === 'string'
? [props.form_data.x_axis]
: []),
].filter((d): d is string => typeof d === 'string'),
// eslint-disable-next-line react-hooks/exhaustive-deps
[
JSON.stringify(props.form_data.groupby),
JSON.stringify(props.form_data.columns),
props.form_data.x_axis,
],
);
useEffect(() => {
props.actions.fetchCompatibility(
props.datasource.type,
props.datasource.id as number,
selectedMetrics,
selectedDimensions,
);
// props.datasource.id covers the saved-chart-loading case (M4)
}, [
props.datasource.id,
props.datasource.type,
selectedMetrics,
selectedDimensions,
]);
const addHistory = useCallback(
async ({
isReplace = false,

View File

@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import React, { useCallback, useMemo, useRef } from 'react';
import React, { useCallback, useMemo } from 'react';
import { IconTooltip, List } from '@superset-ui/core/components';
import { nanoid } from 'nanoid';
import { t } from '@apache-superset/core/translation';
@@ -185,22 +185,8 @@ function CollectionControl({
}),
);
// Two items can collide when keyAccessor returns falsy and the index
// fallback is used — breaking dnd-kit reordering and React reconciliation.
// Assign a stable nanoid per item ref when no key is available.
const generatedIdsRef = useRef<WeakMap<CollectionItem, string>>(new WeakMap());
const itemIds = useMemo(
() =>
value.map(item => {
const accessed = keyAccessor(item);
if (accessed) return accessed;
let id = generatedIdsRef.current.get(item);
if (!id) {
id = nanoid(11);
generatedIdsRef.current.set(item, id);
}
return id;
}),
() => value.map((item, i) => keyAccessor(item) || String(i)),
[value, keyAccessor],
);
@@ -211,16 +197,8 @@ function CollectionControl({
const onChangeItem = useCallback(
(i: number, itemValue: CollectionItem) => {
const oldItem = value[i];
const newItem = { ...oldItem, ...itemValue };
// Replacing the object would orphan the WeakMap-stored id and remount
// the row. Carry the generated id over to the new ref.
const generatedId = generatedIdsRef.current.get(oldItem);
if (generatedId) {
generatedIdsRef.current.set(newItem, generatedId);
}
const newValue = [...value];
newValue[i] = newItem;
newValue[i] = { ...value[i], ...itemValue };
onChange?.(newValue);
},
[value, onChange],

View File

@@ -19,19 +19,15 @@
import { SHARED_COLUMN_CONFIG_PROPS } from './constants';
const tokenSeparators =
SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat.tokenSeparators;
test('should allow commas in D3 format inputs', () => {
const { options } = SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat;
const labels = (options ?? []).map((option: { label: unknown }) =>
String(option.label),
);
expect(labels.some((label: string) => label.includes(','))).toBe(true);
expect(tokenSeparators).toBeDefined();
expect(tokenSeparators).not.toContain(',');
});
test('should use defaults from Select token separators', () => {
expect(
Object.prototype.hasOwnProperty.call(
SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat,
'tokenSeparators',
),
).toBe(false);
test('should have correct default token separators', () => {
const expectedSeparators = ['\r\n', '\n', '\t', ';'];
expect(tokenSeparators).toEqual(expectedSeparators);
});

View File

@@ -58,6 +58,8 @@ const d3NumberFormat: ControlFormItemSpec<'Select'> = {
creatable: true,
minWidth: '14em',
debounceDelay: 500,
// default value tokenSeparators in superset-frontend/packages/superset-ui-core/src/components/Select/constants.ts
tokenSeparators: ['\r\n', '\n', '\t', ';'],
};
const d3TimeFormat: ControlFormItemSpec<'Select'> = {

View File

@@ -40,13 +40,11 @@ import {
DatasourceModal,
ErrorAlert,
} from 'src/components';
import SemanticViewEditModal from 'src/features/semanticViews/SemanticViewEditModal';
import { Menu } from '@superset-ui/core/components/Menu';
import { Icons } from '@superset-ui/core/components/Icons';
import WarningIconWithTooltip from '@superset-ui/core/components/WarningIconWithTooltip';
import { URL_PARAMS } from 'src/constants';
import { getDatasourceAsSaveableDataset } from 'src/utils/datasourceUtils';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
import {
userHasPermission,
isUserAdmin,
@@ -70,7 +68,6 @@ interface ExtendedDatasource extends Datasource {
}>;
extra?: string;
health_check_message?: string;
cache_timeout?: number | null;
database?: {
id: number;
database_name: string;
@@ -378,7 +375,7 @@ class DatasourceControl extends PureComponent<
const canAccessSqlLab = userHasPermission(user, 'SQL Lab', 'menu_access');
const editText = t('Edit %s', datasetLabelLower());
const editText = t('Edit dataset');
const requestedQuery = {
datasourceKey: `${datasource.id}__${datasource.type}`,
sql: datasource.sql,
@@ -390,9 +387,7 @@ class DatasourceControl extends PureComponent<
label: !allowEdit ? (
<Tooltip
title={t(
'You must be a %s owner in order to edit. Please reach out to a %s owner to request modifications or edit access.',
datasetLabelLower(),
datasetLabelLower(),
'You must be a dataset owner in order to edit. Please reach out to a dataset owner to request modifications or edit access.',
)}
>
{editText}
@@ -407,7 +402,7 @@ class DatasourceControl extends PureComponent<
defaultDatasourceMenuItems.push({
key: CHANGE_DATASET,
label: t('Swap %s', datasetLabelLower()),
label: t('Swap dataset'),
});
if (!isMissingDatasource && canAccessSqlLab) {
@@ -486,7 +481,7 @@ class DatasourceControl extends PureComponent<
queryDatasourceMenuItems.push({
key: SAVE_AS_DATASET,
label: <span>{t('Save as %s', datasetLabelLower())}</span>,
label: <span>{t('Save as dataset')}</span>,
});
const queryDatasourceMenu = (
@@ -500,7 +495,7 @@ class DatasourceControl extends PureComponent<
const titleText =
isMissingDatasource && !datasource.name
? t('Missing %s', datasetLabelLower())
? t('Missing dataset')
: getDatasourceTitle(datasource);
const tooltip = titleText;
@@ -566,15 +561,14 @@ class DatasourceControl extends PureComponent<
) : (
<ErrorAlert
type="warning"
message={t('Missing %s', datasetLabelLower())}
message={t('Missing dataset')}
descriptionPre={false}
descriptionDetailsCollapsed={false}
descriptionDetails={
<>
<p>
{t(
'The %s linked to this chart may have been deleted.',
datasetLabelLower(),
'The dataset linked to this chart may have been deleted.',
)}
</p>
<p>
@@ -584,7 +578,7 @@ class DatasourceControl extends PureComponent<
this.handleMenuItemClick({ key: CHANGE_DATASET })
}
>
{t('Swap %s', datasetLabelLower())}
{t('Swap dataset')}
</Button>
</p>
</>
@@ -593,27 +587,14 @@ class DatasourceControl extends PureComponent<
)}
</div>
)}
{showEditDatasourceModal &&
(String(datasource.type) === 'semantic_view' ? (
<SemanticViewEditModal
show={showEditDatasourceModal}
onHide={this.toggleEditDatasourceModal}
onSave={() => this.onDatasourceSave(datasource)}
semanticView={{
id: datasource.id,
table_name: datasource.name,
description: datasource.description,
cache_timeout: datasource.cache_timeout,
}}
/>
) : (
<DatasourceModal
datasource={datasource}
show={showEditDatasourceModal}
onDatasourceSave={this.onDatasourceSave}
onHide={this.toggleEditDatasourceModal}
/>
))}
{showEditDatasourceModal && (
<DatasourceModal
datasource={datasource}
show={showEditDatasourceModal}
onDatasourceSave={this.onDatasourceSave}
onHide={this.toggleEditDatasourceModal}
/>
)}
{showChangeDatasourceModal && (
<ChangeDatasourceModal
onDatasourceSave={this.onDatasourceSave}

View File

@@ -142,10 +142,6 @@ const ColumnSelectPopover = ({
const datasourceType = useSelector<ExplorePageState, string | undefined>(
state => state.explore.datasource.type,
);
const compatibleDimensions = useSelector<
ExplorePageState,
string[] | null | undefined
>(state => state.explore.compatibleDimensions);
const [initialLabel] = useState(label);
const [initialAdhocColumn, initialCalculatedColumn, initialSimpleColumn] =
getInitialColumnValues(editedColumn);
@@ -171,22 +167,21 @@ const ColumnSelectPopover = ({
const sqlEditorRef = useRef<editors.EditorHandle>(null);
const [calculatedColumns, simpleColumns] = useMemo(() => {
const [calc, simple] = (columns ?? []).reduce(
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
if (column.expression) {
acc[0].push(column);
} else {
acc[1].push(column);
}
return acc;
},
[[], []],
);
const alpha = (a: ColumnMeta, b: ColumnMeta) =>
(a.column_name ?? '').localeCompare(b.column_name ?? '');
return [calc.sort(alpha), simple.sort(alpha)];
}, [columns]);
const [calculatedColumns, simpleColumns] = useMemo(
() =>
columns?.reduce(
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
if (column.expression) {
acc[0].push(column);
} else {
acc[1].push(column);
}
return acc;
},
[[], []],
),
[columns],
);
// Filter metrics that are already selected in the chart
const availableMetrics = useMemo(() => {
@@ -556,11 +551,6 @@ const ColumnSelectPopover = ({
key: `column-${simpleColumn.column_name}`,
column_name: simpleColumn.column_name,
verbose_name: simpleColumn.verbose_name ?? '',
disabled:
compatibleDimensions != null &&
!compatibleDimensions.includes(
simpleColumn.column_name,
),
})),
...availableMetrics.map(metric => ({
value: metric.metric_name,
@@ -575,9 +565,6 @@ const ColumnSelectPopover = ({
key: `metric-${metric.metric_name}`,
metric_name: metric.metric_name,
verbose_name: metric.verbose_name ?? '',
disabled:
compatibleDimensions != null &&
!compatibleDimensions.includes(metric.metric_name),
})),
]}
optionFilterProps={[

View File

@@ -23,7 +23,6 @@ import AdhocFilter from 'src/explore/components/controls/FilterControl/AdhocFilt
import { OptionSortType } from 'src/explore/types';
import { useGetTimeRangeLabel } from 'src/explore/components/controls/FilterControl/utils';
import OptionWrapper from './OptionWrapper';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export interface DndAdhocFilterOptionProps {
adhocFilter: AdhocFilter;
@@ -69,10 +68,7 @@ export default function DndAdhocFilterOption({
isExtra={adhocFilter.isExtra}
datasourceWarningMessage={
adhocFilter.datasourceWarning
? t(
'This filter might be incompatible with current %s',
datasetLabelLower(),
)
? t('This filter might be incompatible with current dataset')
: undefined
}
/>

View File

@@ -38,7 +38,6 @@ import AdhocMetric from 'src/explore/components/controls/MetricControl/AdhocMetr
import MetricDefinitionValue from 'src/explore/components/controls/MetricControl/MetricDefinitionValue';
import ColumnSelectPopoverTrigger from './ColumnSelectPopoverTrigger';
import { DndControlProps } from './types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const AGGREGATED_DECK_GL_CHART_TYPES = [
'deck_screengrid',
@@ -130,16 +129,6 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
formData,
} = props;
// Semantic views do not support arbitrary SQL expressions as dimensions.
// Merge 'sqlExpression' into disabledTabs so the Custom SQL tab is hidden.
const effectiveDisabledTabs = useMemo(
() =>
String(datasource?.type) === 'semantic_view'
? new Set([...(disabledTabs ?? []), 'sqlExpression'])
: disabledTabs,
[datasource?.type, disabledTabs],
);
const [newColumnPopoverVisible, setNewColumnPopoverVisible] = useState(false);
const combinedOptionsMap = useMemo(() => {
@@ -314,7 +303,7 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
}}
editedColumn={column}
isTemporal={isTemporal}
disabledTabs={effectiveDisabledTabs}
disabledTabs={disabledTabs}
>
<OptionWrapper
key={`column-${idx}`}
@@ -337,10 +326,7 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
typeof item === 'object' &&
'error_text' in item &&
item.error_text)
? t(
'This metric might be incompatible with current %s',
datasetLabelLower(),
)
? t('This metric might be incompatible with current dataset')
: undefined;
return (
@@ -454,7 +440,7 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
togglePopover={toggleColumnPopover}
closePopover={closeColumnPopover}
isTemporal={false}
disabledTabs={effectiveDisabledTabs}
disabledTabs={disabledTabs}
metrics={savedMetrics}
selectedMetrics={selectedMetrics}
>

View File

@@ -17,7 +17,6 @@
* under the License.
*/
import { useCallback, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { t } from '@apache-superset/core/translation';
import { AdhocColumn, QueryFormColumn, isAdhocColumn } from '@superset-ui/core';
import { tn } from '@apache-superset/core/translation';
@@ -28,10 +27,8 @@ import OptionWrapper from 'src/explore/components/controls/DndColumnSelectContro
import { OptionSelector } from 'src/explore/components/controls/DndColumnSelectControl/utils';
import { DatasourcePanelDndItem } from 'src/explore/components/DatasourcePanel/types';
import { DndItemType } from 'src/explore/components/DndItemType';
import { ExplorePageState } from 'src/explore/types';
import ColumnSelectPopoverTrigger from './ColumnSelectPopoverTrigger';
import { DndControlProps } from './types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export type DndColumnSelectProps = DndControlProps<QueryFormColumn> & {
options: ColumnMeta[];
@@ -52,19 +49,6 @@ function DndColumnSelect(props: DndColumnSelectProps) {
isTemporal,
disabledTabs,
} = props;
// Semantic views do not support arbitrary SQL expressions as dimensions.
const datasourceType = useSelector<ExplorePageState, string | undefined>(
state => state.explore.datasource?.type,
);
const effectiveDisabledTabs = useMemo(
() =>
datasourceType === 'semantic_view'
? new Set([...(disabledTabs ?? []), 'sqlExpression'])
: disabledTabs,
[datasourceType, disabledTabs],
);
const [newColumnPopoverVisible, setNewColumnPopoverVisible] = useState(false);
const optionSelector = useMemo(() => {
@@ -119,10 +103,7 @@ function DndColumnSelect(props: DndColumnSelectProps) {
optionSelector.values.map((column, idx) => {
const datasourceWarningMessage =
isAdhocColumn(column) && column.datasourceWarning
? t(
'This column might be incompatible with current %s',
datasetLabelLower(),
)
? t('This column might be incompatible with current dataset')
: undefined;
const withCaret = isAdhocColumn(column) || !column.error_text;
@@ -140,7 +121,7 @@ function DndColumnSelect(props: DndColumnSelectProps) {
}}
editedColumn={column}
isTemporal={isTemporal}
disabledTabs={effectiveDisabledTabs}
disabledTabs={disabledTabs}
>
<OptionWrapper
key={idx}
@@ -224,7 +205,7 @@ function DndColumnSelect(props: DndColumnSelectProps) {
closePopover={closePopover}
visible={newColumnPopoverVisible}
isTemporal={isTemporal}
disabledTabs={effectiveDisabledTabs}
disabledTabs={disabledTabs}
>
<div />
</ColumnSelectPopoverTrigger>

View File

@@ -69,7 +69,7 @@ const baseFormData = {
};
const mockStore = configureStore([thunk]);
const store = mockStore({ explore: {} });
const store = mockStore({});
function setup({
value = undefined,

View File

@@ -69,20 +69,14 @@ const adhocMetricB = {
};
test('renders with default props', () => {
render(<DndMetricSelect {...defaultProps} />, {
useDnd: true,
useRedux: true,
});
render(<DndMetricSelect {...defaultProps} />, { useDnd: true });
expect(
screen.getByText('Drop a column/metric here or click'),
).toBeInTheDocument();
});
test('renders with default props and multi = true', () => {
render(<DndMetricSelect {...defaultProps} multi />, {
useDnd: true,
useRedux: true,
});
render(<DndMetricSelect {...defaultProps} multi />, { useDnd: true });
expect(
screen.getByText('Drop columns/metrics here or click'),
).toBeInTheDocument();
@@ -92,7 +86,6 @@ test('render selected metrics correctly', () => {
const metricValues = ['metric_a', 'metric_b', adhocMetricB];
render(<DndMetricSelect {...defaultProps} value={metricValues} multi />, {
useDnd: true,
useRedux: true,
});
expect(screen.getByText('metric_a')).toBeVisible();
expect(screen.getByText('Metric B')).toBeVisible();
@@ -114,7 +107,6 @@ test('warn selected custom metric when metric gets removed from dataset', async
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -167,7 +159,6 @@ test('warn selected custom metric when metric gets removed from dataset for sing
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -226,7 +217,6 @@ test('remove selected adhoc metric when column gets removed from dataset', async
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -269,7 +259,6 @@ test('update adhoc metric name when column label in dataset changes', () => {
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -315,7 +304,6 @@ test('can drag metrics', async () => {
const metricValues = ['metric_a', 'metric_b', adhocMetricB];
render(<DndMetricSelect {...defaultProps} value={metricValues} multi />, {
useDnd: true,
useRedux: true,
});
expect(screen.getByText('metric_a')).toBeVisible();
@@ -353,7 +341,6 @@ test('cannot drop a duplicated item', () => {
</>,
{
useDnd: true,
useRedux: true,
},
);
@@ -387,7 +374,6 @@ test('can drop a saved metric when disallow_adhoc_metrics', () => {
</>,
{
useDnd: true,
useRedux: true,
},
);
@@ -429,7 +415,6 @@ test('cannot drop non-saved metrics when disallow_adhoc_metrics', () => {
</>,
{
useDnd: true,
useRedux: true,
},
);
@@ -478,7 +463,6 @@ test('title changes on custom SQL text change', async () => {
/>,
{
useDnd: true,
useRedux: true,
},
);

View File

@@ -41,7 +41,6 @@ import { DndItemType } from 'src/explore/components/DndItemType';
import DndSelectLabel from 'src/explore/components/controls/DndColumnSelectControl/DndSelectLabel';
import { savedMetricType } from 'src/explore/components/controls/MetricControl/types';
import { AGGREGATES } from 'src/explore/constants';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const EMPTY_OBJECT = {};
const DND_ACCEPTED_TYPES = [DndItemType.Column, DndItemType.Metric];
@@ -78,10 +77,7 @@ const coerceMetrics = (
) {
return {
metric_name: metric,
error_text: t(
'This metric might be incompatible with current %s',
datasetLabelLower(),
),
error_text: t('This metric might be incompatible with current dataset'),
uuid: nanoid(),
};
}
@@ -132,26 +128,6 @@ const DndMetricSelect = (props: any) => {
return extra;
}, [datasource?.extra]);
// Semantic views do not support arbitrary SQL expressions as metrics.
const disallowAdhocMetrics =
extra.disallow_adhoc_metrics || datasource?.type === 'semantic_view';
// AdhocMetricEditPopover reads `datasource.extra.disallow_adhoc_metrics`
// directly, so we need to inject the flag there too — not just in canDrop.
const datasourceForPopover = useMemo(() => {
if (!disallowAdhocMetrics || !datasource) return datasource;
let parsedExtra: Record<string, unknown> = {};
if (datasource.extra) {
try {
parsedExtra = JSON.parse(datasource.extra as string);
} catch {} // eslint-disable-line no-empty
}
return {
...datasource,
extra: JSON.stringify({ ...parsedExtra, disallow_adhoc_metrics: true }),
};
}, [disallowAdhocMetrics, datasource]);
const savedMetricSet = useMemo(
() =>
new Set(
@@ -208,7 +184,7 @@ const DndMetricSelect = (props: any) => {
const canDrop = useCallback(
(item: DatasourcePanelDndItem) => {
if (
disallowAdhocMetrics &&
extra.disallow_adhoc_metrics &&
(item.type !== DndItemType.Metric ||
!savedMetricSet.has(item.value.metric_name))
) {
@@ -317,17 +293,14 @@ const DndMetricSelect = (props: any) => {
columns={props.columns}
savedMetrics={props.savedMetrics}
savedMetricsOptions={getSavedMetricOptionsForMetric(index)}
datasource={datasourceForPopover}
datasource={props.datasource}
onMoveLabel={moveLabel}
onDropLabel={handleDropLabel}
type={`${DndItemType.AdhocMetricOption}_${props.name}_${props.label}`}
multi={multi}
datasourceWarningMessage={
option instanceof AdhocMetric && option.datasourceWarning
? t(
'This metric might be incompatible with current %s',
datasetLabelLower(),
)
? t('This metric might be incompatible with current dataset')
: undefined
}
/>
@@ -426,7 +399,7 @@ const DndMetricSelect = (props: any) => {
columns={props.columns}
savedMetricsOptions={newSavedMetricOptions}
savedMetric={EMPTY_OBJECT as savedMetricType}
datasource={datasourceForPopover}
datasource={props.datasource}
isControlledComponent
visible={newMetricPopoverVisible}
togglePopover={togglePopover}

View File

@@ -415,25 +415,21 @@ export default class AdhocFilterEditPopover extends Component<
</ErrorBoundary>
),
},
...(datasource?.type === 'semantic_view'
? []
: [
{
key: ExpressionTypes.Sql,
label: t('Custom SQL'),
children: (
<ErrorBoundary>
<AdhocFilterEditPopoverSqlTabContent
adhocFilter={this.state.adhocFilter}
onChange={this.onAdhocFilterChange}
options={this.props.options}
height={this.state.height}
datasource={datasource}
/>
</ErrorBoundary>
),
},
]),
{
key: ExpressionTypes.Sql,
label: t('Custom SQL'),
children: (
<ErrorBoundary>
<AdhocFilterEditPopoverSqlTabContent
adhocFilter={this.state.adhocFilter}
onChange={this.onAdhocFilterChange}
options={this.props.options}
height={this.state.height}
datasource={datasource}
/>
</ErrorBoundary>
),
},
]}
/>
{hasDeckSlices && (

View File

@@ -67,19 +67,13 @@ const createProps = () => ({
test('Should render', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByTestId('metrics-edit-popover')).toBeVisible();
});
test('Should render correct elements', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByRole('tablist')).toBeVisible();
expect(screen.getByRole('button', { name: 'Resize' })).toBeVisible();
expect(screen.getByRole('button', { name: 'Save' })).toBeVisible();
@@ -88,10 +82,7 @@ test('Should render correct elements', () => {
test('Should render correct elements for SQL', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByRole('tab', { name: 'Custom SQL' })).toBeVisible();
expect(screen.getByRole('tab', { name: 'Simple' })).toBeVisible();
expect(screen.getByRole('tab', { name: 'Saved' })).toBeVisible();
@@ -103,10 +94,7 @@ test('Should render correct elements for allow ad-hoc metrics', () => {
...createProps(),
datasource: { extra: '{"disallow_adhoc_metrics": false}' },
};
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByRole('tab', { name: 'Custom SQL' })).toBeEnabled();
expect(screen.getByRole('tab', { name: 'Simple' })).toBeEnabled();
expect(screen.getByRole('tab', { name: 'Saved' })).toBeEnabled();
@@ -118,10 +106,7 @@ test('Should render correct elements for disallow ad-hoc metrics', () => {
...createProps(),
datasource: { extra: '{"disallow_adhoc_metrics": true}' },
};
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByRole('tab', { name: 'Custom SQL' })).toHaveAttribute(
'aria-disabled',
'true',
@@ -136,10 +121,7 @@ test('Should render correct elements for disallow ad-hoc metrics', () => {
test('Clicking on "Close" should call onClose', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Close' }));
expect(props.onClose).toHaveBeenCalledTimes(1);
@@ -147,10 +129,7 @@ test('Clicking on "Close" should call onClose', () => {
test('Clicking on "Save" should call onChange and onClose', async () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(
@@ -166,10 +145,7 @@ test('Clicking on "Save" should call onChange and onClose', async () => {
test('Clicking on "Save" should not call onChange and onClose', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Save' }));
@@ -179,10 +155,7 @@ test('Clicking on "Save" should not call onChange and onClose', () => {
test('Clicking on "Save" should call onChange and onClose for new metric', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} isNewMetric />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} isNewMetric />);
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Save' }));
@@ -192,10 +165,7 @@ test('Clicking on "Save" should call onChange and onClose for new metric', () =>
test('Clicking on "Save" should call onChange and onClose for new title', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} isLabelModified />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} isLabelModified />);
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Save' }));
@@ -208,10 +178,7 @@ test('Should switch to tab:Simple', () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByRole('tabpanel', { name: 'Saved' })).toBeVisible();
expect(
@@ -235,10 +202,7 @@ test('Should render "Simple" tab correctly', () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
const tab = screen.getByRole('tab', { name: 'Simple' }).parentElement!;
userEvent.click(tab);
@@ -252,10 +216,7 @@ test('Should switch to tab:Custom SQL', () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
expect(screen.getByRole('tabpanel', { name: 'Saved' })).toBeVisible();
expect(
@@ -281,10 +242,7 @@ test('Should render "Custom SQL" tab correctly', async () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
const tab = screen.getByRole('tab', { name: 'Custom SQL' }).parentElement!;
userEvent.click(tab);
@@ -328,10 +286,7 @@ test('Should filter saved metrics by metric_name and verbose_name', async () =>
},
],
};
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
const combobox = screen.getByRole('combobox', {
name: 'Select saved metrics',
@@ -407,10 +362,7 @@ test('Should filter columns by column_name and verbose_name in Simple tab', asyn
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
render(<AdhocMetricEditPopover {...props} />);
const tab = screen.getByRole('tab', { name: 'Simple' }).parentElement!;
userEvent.click(tab);

View File

@@ -18,7 +18,6 @@
*/
/* eslint-disable camelcase */
import { PureComponent, createRef } from 'react';
import { useSelector } from 'react-redux';
import { isDefined, ensureIsArray, DatasourceType } from '@superset-ui/core';
import { t } from '@apache-superset/core/translation';
import type { editors } from '@apache-superset/core';
@@ -95,8 +94,6 @@ interface AdhocMetricEditPopoverProps {
datasource?: DatasourceInfo;
isNewMetric?: boolean;
isLabelModified?: boolean;
/** Names of metrics the user may select; null means no filtering. */
compatibleMetrics?: string[] | null;
}
interface AdhocMetricEditPopoverState {
@@ -126,7 +123,7 @@ const StyledSelect = styled(Select)`
export const SAVED_TAB_KEY = 'SAVED';
class AdhocMetricEditPopover extends PureComponent<
export default class AdhocMetricEditPopover extends PureComponent<
AdhocMetricEditPopoverProps,
AdhocMetricEditPopoverState
> {
@@ -441,24 +438,15 @@ class AdhocMetricEditPopover extends PureComponent<
ensureIsArray(savedMetricsOptions).length > 0 ? (
<FormItem label={t('Saved metric')}>
<StyledSelect
options={[...ensureIsArray(savedMetricsOptions)]
.sort((a, b) =>
(a.metric_name ?? '').localeCompare(
b.metric_name ?? '',
),
)
.map(savedMetric => ({
options={ensureIsArray(savedMetricsOptions).map(
savedMetric => ({
value: savedMetric.metric_name,
label: this.renderMetricOption(savedMetric),
key: savedMetric.id,
metric_name: savedMetric.metric_name,
verbose_name: savedMetric.verbose_name ?? '',
disabled:
this.props.compatibleMetrics != null &&
!this.props.compatibleMetrics.includes(
savedMetric.metric_name,
),
}))}
}),
)}
optionFilterProps={['metric_name', 'verbose_name']}
{...savedSelectProps}
/>
@@ -608,20 +596,3 @@ class AdhocMetricEditPopover extends PureComponent<
}
// @ts-expect-error - defaultProps for backward compatibility
AdhocMetricEditPopover.defaultProps = defaultProps;
// ---------------------------------------------------------------------------
// Thin functional wrapper that injects compatibility data from Redux.
// AdhocMetricEditPopover is a class component and cannot use hooks directly.
// ---------------------------------------------------------------------------
function AdhocMetricEditPopoverWithRedux(props: AdhocMetricEditPopoverProps) {
const compatibleMetrics = useSelector(
(state: any) =>
state.explore?.compatibleMetrics as string[] | null | undefined,
);
return (
<AdhocMetricEditPopover {...props} compatibleMetrics={compatibleMetrics} />
);
}
export { AdhocMetricEditPopover };
export default AdhocMetricEditPopoverWithRedux;

View File

@@ -61,11 +61,7 @@ function setup(overrides: Record<string, unknown> = {}) {
...overrides,
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return render(<AdhocMetricOption {...(props as any)} />, {
useDnd: true,
useRedux: true,
initialState: { explore: {} },
});
return render(<AdhocMetricOption {...(props as any)} />, { useDnd: true });
}
test('renders an overlay trigger wrapper for the label', () => {

View File

@@ -62,10 +62,7 @@ function setup(overrides: Record<string, unknown> = {}) {
...defaultProps,
...overrides,
};
const result = render(<MetricsControl {...props} />, {
useDnd: true,
useRedux: true,
});
const result = render(<MetricsControl {...props} />, { useDnd: true });
return { onChange, ...result };
}
@@ -169,7 +166,7 @@ test('does not remove custom SQL metric if savedMetrics changes', async () => {
]}
datasource={undefined}
/>,
{ useDnd: true, useRedux: true },
{ useDnd: true },
);
expect(screen.getByText('old label')).toBeInTheDocument();

View File

@@ -64,7 +64,6 @@ import {
validateNonEmpty,
} from '@superset-ui/core';
import { t } from '@apache-superset/core/translation';
import { datasetLabel } from 'src/features/semanticLayers/label';
import { formatSelectOptions } from 'src/explore/exploreUtils';
import { TIME_FILTER_LABELS } from './constants';
import { StyledColumnOption } from './components/optionRenderers';
@@ -215,7 +214,7 @@ export const controls = {
datasource: {
type: 'DatasourceControl',
label: datasetLabel(),
label: t('Dataset'),
default: null,
description: null,
mapStateToProps: ({ datasource }: ControlState) => ({

View File

@@ -70,9 +70,6 @@ export interface ExploreState {
metadata?: {
owners?: string[] | null;
};
compatibleMetrics?: string[] | null;
compatibleDimensions?: string[] | null;
compatibilityLoading?: boolean;
saveAction?: SaveActionType | null;
chartStates?: Record<number, JsonObject>;
}
@@ -181,13 +178,6 @@ interface UpdateExploreChartStateAction {
lastModified: number;
}
interface SetCompatibilityAction {
type: typeof actions.SET_COMPATIBILITY;
compatibleMetrics: string[] | null;
compatibleDimensions: string[] | null;
compatibilityLoading: boolean;
}
type ExploreAction =
| DynamicPluginControlsReadyAction
| ToggleFaveStarAction
@@ -207,7 +197,6 @@ type ExploreAction =
| SliceUpdatedAction
| SetForceQueryAction
| UpdateExploreChartStateAction
| SetCompatibilityAction
| HydrateExplore;
// Extended control state for dynamic form controls - uses Record for flexibility
@@ -646,15 +635,6 @@ export default function exploreReducer(
force: typedAction.force,
};
},
[actions.SET_COMPATIBILITY]() {
const typedAction = action as SetCompatibilityAction;
return {
...state,
compatibleMetrics: typedAction.compatibleMetrics,
compatibleDimensions: typedAction.compatibleDimensions,
compatibilityLoading: typedAction.compatibilityLoading,
};
},
[actions.UPDATE_EXPLORE_CHART_STATE]() {
const typedAction = action as UpdateExploreChartStateAction;
return {

View File

@@ -17,359 +17,55 @@
* under the License.
*/
import { getChartControlPanelRegistry } from '@superset-ui/core';
import {
applyDefaultFormData,
getControlsState,
handleDeprecatedControls,
} from 'src/explore/store';
import { applyDefaultFormData } from 'src/explore/store';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(window as any).featureFlags = {};
// eslint-disable-next-line no-restricted-globals -- TODO: Migrate from describe blocks
describe('store', () => {
beforeAll(() => {
getChartControlPanelRegistry().registerValue('test-chart', {
controlPanelSections: [
{
label: 'Test section',
expanded: true,
controlSetRows: [['row_limit']],
},
],
});
});
beforeAll(() => {
getChartControlPanelRegistry().registerValue('test-chart', {
controlPanelSections: [
{
label: 'Test section',
expanded: true,
controlSetRows: [['row_limit']],
},
],
afterAll(() => {
getChartControlPanelRegistry().remove('test-chart');
});
// eslint-disable-next-line no-restricted-globals -- TODO: Migrate from describe blocks
describe('applyDefaultFormData', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(window as any).featureFlags = {};
test('applies default to formData if the key is missing', () => {
const inputFormData = {
datasource: '11_table',
viz_type: 'test-chart',
};
let outputFormData = applyDefaultFormData(inputFormData);
expect(outputFormData.row_limit).toEqual(10000);
const inputWithRowLimit = {
...inputFormData,
row_limit: 888,
};
outputFormData = applyDefaultFormData(inputWithRowLimit);
expect(outputFormData.row_limit).toEqual(888);
});
test('keeps null if key is defined with null', () => {
const inputFormData = {
datasource: '11_table',
viz_type: 'test-chart',
row_limit: null,
};
const outputFormData = applyDefaultFormData(inputFormData);
expect(outputFormData.row_limit).toBe(null);
});
});
});
afterAll(() => {
getChartControlPanelRegistry().remove('test-chart');
});
// Helper: build ExploreState for getControlsState
const buildExploreState = (controlOverrides: Record<string, any> = {}) => ({
datasource: { type: 'table' },
controls: Object.fromEntries(
Object.entries(controlOverrides).map(([k, v]) => [k, { value: v }]),
),
});
// ============================================================
// Existing applyDefaultFormData tests
// ============================================================
test('applyDefaultFormData applies default to formData if the key is missing', () => {
const inputFormData = {
datasource: '11_table',
viz_type: 'test-chart',
};
let outputFormData = applyDefaultFormData(inputFormData);
expect(outputFormData.row_limit).toEqual(10000);
const inputWithRowLimit = {
...inputFormData,
row_limit: 888,
};
outputFormData = applyDefaultFormData(inputWithRowLimit);
expect(outputFormData.row_limit).toEqual(888);
});
test('applyDefaultFormData keeps null if key is defined with null', () => {
const inputFormData = {
datasource: '11_table',
viz_type: 'test-chart',
row_limit: null,
};
const outputFormData = applyDefaultFormData(inputFormData);
expect(outputFormData.row_limit).toBe(null);
});
// ============================================================
// Migration tests: handleDeprecatedControls normalizes stale matrixify modes
// (fix for apache/superset#38519 regression — guards validators AND
// downstream UI consumers that infer matrixify state from mode values)
// ============================================================
test('getControlsState resets stale matrixify_mode_rows to disabled when matrixify_enable key absent', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_mode_rows: 'dimensions', // stale pre-revamp default
};
const result = getControlsState(state as any, formData as any);
const modeControl = result.matrixify_mode_rows as any;
expect(modeControl?.value).toBe('disabled');
});
test('getControlsState resets stale matrixify_mode_columns to disabled when matrixify_enable key absent', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_mode_columns: 'metrics', // stale pre-revamp default
};
const result = getControlsState(state as any, formData as any);
const modeControl = result.matrixify_mode_columns as any;
expect(modeControl?.value).toBe('disabled');
});
test('getControlsState preserves matrixify mode values when matrixify_enable is true', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
};
const result = getControlsState(state as any, formData as any);
const modeControl = result.matrixify_mode_rows as any;
expect(modeControl?.value).toBe('dimensions');
});
test('getControlsState preserves matrixify mode values when matrixify_enable is explicitly false', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable: false,
matrixify_mode_rows: 'dimensions',
};
const result = getControlsState(state as any, formData as any);
const modeControl = result.matrixify_mode_rows as any;
// matrixify_enable key IS present (just false) — migration does NOT fire
expect(modeControl?.value).toBe('dimensions');
});
test('getControlsState is idempotent when matrixify modes already disabled', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_mode_rows: 'disabled',
matrixify_mode_columns: 'disabled',
};
const result = getControlsState(state as any, formData as any);
expect((result.matrixify_mode_rows as any)?.value).toBe('disabled');
expect((result.matrixify_mode_columns as any)?.value).toBe('disabled');
});
test('getControlsState handles form_data with no matrixify keys', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
};
const result = getControlsState(state as any, formData as any);
// Controls should get their defaults — matrixify_mode defaults to 'disabled'
expect((result.matrixify_mode_rows as any)?.value).toBe('disabled');
expect((result.matrixify_mode_columns as any)?.value).toBe('disabled');
});
test('getControlsState round-trip: pre-revamp form_data produces no matrixify validation errors', () => {
// Simulate a chart saved before #38519 with stale matrixify defaults
// Empty controls: on real first-load hydration, no pre-existing controls exist
const state = buildExploreState();
const preRevampFormData = {
datasource: '1__table',
viz_type: 'test-chart',
// Stale old defaults — no matrixify_enable key (legacy chart)
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const result = getControlsState(state as any, preRevampFormData as any);
// Every matrixify control should have zero validation errors
const matrixifyControlEntries = Object.entries(result).filter(([name]) =>
name.startsWith('matrixify_'),
);
const controlsWithErrors = matrixifyControlEntries.filter(
([, control]) => (control as any)?.validationErrors?.length > 0,
);
expect(controlsWithErrors).toEqual([]);
});
// ============================================================
// Dashboard hydration: applyDefaultFormData with stale form_data
// ============================================================
test('applyDefaultFormData normalizes stale matrixify modes for legacy charts', () => {
// Dashboard hydration now runs handleDeprecatedControls too, so stale
// matrixify modes from pre-revamp charts are normalized to 'disabled'.
// This protects downstream consumers (ChartContextMenu, DrillBySubmenu,
// ChartRenderer) that infer "matrixify is active" from mode values alone.
const preRevampFormData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
// No matrixify_enable key — legacy chart that never used matrixify
};
const outputFormData = applyDefaultFormData(preRevampFormData as any);
// Stale values are now normalized to 'disabled'
expect(outputFormData.matrixify_mode_rows).toBe('disabled');
expect(outputFormData.matrixify_mode_columns).toBe('disabled');
expect(outputFormData.matrixify_enable).toBe(false);
});
// ============================================================
// P1: Pre-revamp charts that actually used matrixify via old per-axis flags
// (matrixify_enable_vertical_layout / matrixify_enable_horizontal_layout)
// ============================================================
test('getControlsState preserves modes and sets matrixify_enable when old vertical flag is true', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable_vertical_layout: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const result = getControlsState(state as any, formData as any);
// Vertical layout was enabled — rows mode preserved, matrixify_enable migrated
expect((result.matrixify_mode_rows as any)?.value).toBe('dimensions');
expect((result.matrixify_enable as any)?.value).toBe(true);
// Horizontal layout was NOT enabled — columns mode reset
expect((result.matrixify_mode_columns as any)?.value).toBe('disabled');
});
test('getControlsState preserves modes and sets matrixify_enable when old horizontal flag is true', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable_horizontal_layout: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const result = getControlsState(state as any, formData as any);
// Horizontal layout was enabled — columns mode preserved, matrixify_enable migrated
expect((result.matrixify_mode_columns as any)?.value).toBe('metrics');
expect((result.matrixify_enable as any)?.value).toBe(true);
// Vertical layout was NOT enabled — rows mode reset
expect((result.matrixify_mode_rows as any)?.value).toBe('disabled');
});
test('getControlsState preserves both modes when both old per-axis flags are true', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const result = getControlsState(state as any, formData as any);
expect((result.matrixify_mode_rows as any)?.value).toBe('dimensions');
expect((result.matrixify_mode_columns as any)?.value).toBe('metrics');
expect((result.matrixify_enable as any)?.value).toBe(true);
});
test('getControlsState resets modes when old per-axis flags are explicitly false', () => {
const state = buildExploreState();
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable_vertical_layout: false,
matrixify_enable_horizontal_layout: false,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const result = getControlsState(state as any, formData as any);
// Old flags present but false — chart never used matrixify, reset stale modes
expect((result.matrixify_mode_rows as any)?.value).toBe('disabled');
expect((result.matrixify_mode_columns as any)?.value).toBe('disabled');
});
// ============================================================
// P2: Dashboard hydration (applyDefaultFormData) with old per-axis flags
// ============================================================
test('applyDefaultFormData preserves modes when old vertical flag is true', () => {
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable_vertical_layout: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const outputFormData = applyDefaultFormData(formData as any);
expect(outputFormData.matrixify_mode_rows).toBe('dimensions');
expect(outputFormData.matrixify_enable).toBe(true);
// Horizontal not enabled — columns reset
expect(outputFormData.matrixify_mode_columns).toBe('disabled');
});
test('applyDefaultFormData preserves modes when both old flags are true', () => {
const formData = {
datasource: '1__table',
viz_type: 'test-chart',
matrixify_enable_vertical_layout: true,
matrixify_enable_horizontal_layout: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
const outputFormData = applyDefaultFormData(formData as any);
expect(outputFormData.matrixify_mode_rows).toBe('dimensions');
expect(outputFormData.matrixify_mode_columns).toBe('metrics');
expect(outputFormData.matrixify_enable).toBe(true);
});
// ============================================================
// Direct handleDeprecatedControls tests: verify form_data mutation
// so callers (hydrateExplore) can propagate migrated fields into state
// ============================================================
test('handleDeprecatedControls sets matrixify_enable on form_data when old vertical flag is true', () => {
const formData: any = {
matrixify_enable_vertical_layout: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
handleDeprecatedControls(formData);
expect(formData.matrixify_enable).toBe(true);
expect(formData.matrixify_mode_rows).toBe('dimensions');
// Horizontal not enabled — columns reset
expect(formData.matrixify_mode_columns).toBe('disabled');
});
test('handleDeprecatedControls resets modes when no matrixify_enable and no old flags', () => {
const formData: any = {
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
handleDeprecatedControls(formData);
expect(formData.matrixify_enable).toBeUndefined();
expect(formData.matrixify_mode_rows).toBe('disabled');
expect(formData.matrixify_mode_columns).toBe('disabled');
});
test('handleDeprecatedControls is idempotent — no-op when matrixify_enable already present', () => {
const formData: any = {
matrixify_enable: true,
matrixify_mode_rows: 'dimensions',
matrixify_mode_columns: 'metrics',
};
handleDeprecatedControls(formData);
// No mutation — matrixify_enable key is present
expect(formData.matrixify_enable).toBe(true);
expect(formData.matrixify_mode_rows).toBe('dimensions');
expect(formData.matrixify_mode_columns).toBe('metrics');
});

View File

@@ -41,16 +41,9 @@ type FormData = QueryFormData & {
y_axis_zero?: boolean;
y_axis_bounds?: [number | null, number | null];
datasource?: string;
matrixify_enable?: boolean;
matrixify_mode_rows?: string;
matrixify_mode_columns?: string;
// Pre-revamp per-axis enable flags (removed in #38519, may still exist in
// persisted form_data for charts that actually used matrixify)
matrixify_enable_vertical_layout?: boolean;
matrixify_enable_horizontal_layout?: boolean;
};
export function handleDeprecatedControls(formData: FormData): void {
function handleDeprecatedControls(formData: FormData): void {
// Reaffectation / handling of deprecated controls
/* eslint-disable no-param-reassign */
@@ -58,37 +51,6 @@ export function handleDeprecatedControls(formData: FormData): void {
if (formData.y_axis_zero) {
formData.y_axis_bounds = [0, null];
}
// #38519: migrate pre-revamp matrixify controls to the new single-toggle
// system. Before the revamp, per-axis enable flags
// (matrixify_enable_vertical_layout / matrixify_enable_horizontal_layout)
// gated visibility, and matrixify_mode_rows/columns defaulted to
// non-disabled values ('dimensions'/'metrics'). The revamp replaced those
// with a single matrixify_enable toggle and mode default 'disabled'.
//
// Charts that actually used matrixify pre-revamp have the old per-axis
// flags set to true — we must preserve their modes and set
// matrixify_enable: true. Charts that never used matrixify (or predate it)
// need stale mode defaults reset to 'disabled' because 4 downstream UI
// consumers (ExploreChartPanel, ChartContextMenu, DrillBySubmenu,
// ChartRenderer) infer "matrixify is active" from mode values alone.
if (!('matrixify_enable' in formData)) {
const hadVerticalLayout =
formData.matrixify_enable_vertical_layout === true;
const hadHorizontalLayout =
formData.matrixify_enable_horizontal_layout === true;
if (hadVerticalLayout || hadHorizontalLayout) {
// Pre-revamp chart that genuinely used matrixify — migrate to new flag
formData.matrixify_enable = true;
if (!hadVerticalLayout) formData.matrixify_mode_rows = 'disabled';
if (!hadHorizontalLayout) formData.matrixify_mode_columns = 'disabled';
} else {
// Never used matrixify — reset stale defaults
formData.matrixify_mode_rows = 'disabled';
formData.matrixify_mode_columns = 'disabled';
}
}
}
export function getControlsState(
@@ -127,31 +89,25 @@ export function getControlsState(
export function applyDefaultFormData(
inputFormData: FormData,
): Record<string, unknown> {
// Normalize deprecated controls before building control state — ensures
// stale matrixify modes are cleaned on the dashboard hydration path too,
// not just the explore path (getControlsState).
const cleanedFormData = { ...inputFormData };
handleDeprecatedControls(cleanedFormData);
const datasourceType = cleanedFormData.datasource?.split('__')[1] ?? '';
const vizType = cleanedFormData.viz_type;
const datasourceType = inputFormData.datasource?.split('__')[1] ?? '';
const vizType = inputFormData.viz_type;
const controlsState = getAllControlsState(
vizType,
datasourceType as DatasourceType,
null,
cleanedFormData,
inputFormData,
);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const controlFormData = getFormDataFromControls(controlsState as any);
const formData: Record<string, unknown> = {};
Object.keys(controlsState)
.concat(Object.keys(cleanedFormData))
.concat(Object.keys(inputFormData))
.forEach(controlName => {
if (cleanedFormData[controlName as keyof FormData] === undefined) {
if (inputFormData[controlName as keyof FormData] === undefined) {
formData[controlName] = controlFormData[controlName];
} else {
formData[controlName] = cleanedFormData[controlName as keyof FormData];
formData[controlName] = inputFormData[controlName as keyof FormData];
}
});

View File

@@ -71,8 +71,6 @@ export type OptionSortType = Partial<
export type Datasource = Dataset & {
database?: DatabaseObject;
/** The parent resource that owns this datasource (database or semantic layer). */
parent?: { name: string };
datasource?: string;
catalog?: string | null;
schema?: string;
@@ -133,9 +131,6 @@ export interface ExplorePageState {
standalone: boolean;
force: boolean;
common: JsonObject;
compatibleMetrics?: string[] | null;
compatibleDimensions?: string[] | null;
compatibilityLoading?: boolean;
};
sliceEntities?: JsonObject; // propagated from Dashboard view
}

View File

@@ -35,7 +35,6 @@ import {
MenuObjectProps,
MenuData,
} from 'src/types/bootstrapTypes';
import { datasetsLabel } from 'src/features/semanticLayers/label';
import RightMenu from './RightMenu';
import { NAVBAR_MENU_POPUP_OFFSET } from './commonMenuData';
@@ -224,7 +223,7 @@ export function Menu({
setActiveTabs(['Charts']);
break;
case path.startsWith(Paths.Datasets):
setActiveTabs([datasetsLabel()]);
setActiveTabs(['Datasets']);
break;
case path.startsWith(Paths.SqlLab) || path.startsWith(Paths.SavedQueries):
setActiveTabs(['SQL']);
@@ -409,12 +408,6 @@ export default function MenuWrapper({ data, ...rest }: MenuProps) {
Manage: true,
};
// Remap labels that depend on feature flags so they stay in sync with
// the active-tab key used in the Menu component above.
const labelOverrides: Record<string, () => string> = {
Datasets: datasetsLabel,
};
// Cycle through menu.menu to build out cleanedMenu and settings
const cleanedMenu: MenuObjectProps[] = [];
const settings: MenuObjectProps[] = [];
@@ -426,10 +419,6 @@ export default function MenuWrapper({ data, ...rest }: MenuProps) {
const children: (MenuObjectProps | string)[] = [];
const newItem = {
...item,
// Apply any label override for this item (keyed by FAB internal name).
...(item.name && labelOverrides[item.name]
? { label: labelOverrides[item.name]() }
: {}),
};
// Filter childs

View File

@@ -149,7 +149,6 @@ export interface ButtonProps {
buttonStyle: 'primary' | 'secondary' | 'dashed' | 'link' | 'tertiary';
loading?: boolean;
icon?: ReactNode;
component?: ReactNode;
}
export interface SubMenuProps {
@@ -313,22 +312,18 @@ const SubMenuComponent: FunctionComponent<SubMenuProps> = props => {
),
}))}
/>
{props.buttons?.map((btn, i) =>
btn.component ? (
<span key={i}>{btn.component}</span>
) : (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
),
)}
{props.buttons?.map((btn, i) => (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
))}
</div>
</Row>
{props.children}

View File

@@ -1,130 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SupersetClient } from '@superset-ui/core';
import { render, waitFor } from 'spec/helpers/testing-library';
import SemanticLayerModal from './SemanticLayerModal';
let mockJsonFormsChangeTriggered = false;
jest.mock('@jsonforms/react', () => ({
...jest.requireActual('@jsonforms/react'),
JsonForms: ({ onChange }: { onChange: (value: unknown) => void }) => {
// eslint-disable-next-line react-hooks/rules-of-hooks
if (!mockJsonFormsChangeTriggered) {
mockJsonFormsChangeTriggered = true;
onChange({
data: { warehouse: 'wh1' },
errors: [],
});
}
return null;
},
}));
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
get: jest.fn(),
post: jest.fn(),
put: jest.fn(),
},
getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
}));
const mockedGet = SupersetClient.get as jest.Mock;
const mockedPost = SupersetClient.post as jest.Mock;
const props = {
show: true,
onHide: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
semanticLayerUuid: '11111111-1111-1111-1111-111111111111',
};
beforeEach(() => {
mockJsonFormsChangeTriggered = false;
jest.useFakeTimers();
mockedGet.mockReset();
mockedPost.mockReset();
mockedGet
.mockResolvedValueOnce({
json: {
result: [{ id: 'snowflake', name: 'Snowflake', description: '' }],
},
})
.mockResolvedValueOnce({
json: {
result: {
name: 'Layer 1',
type: 'snowflake',
configuration: { warehouse: 'wh0' },
},
},
});
mockedPost.mockResolvedValue({
json: {
result: {
type: 'object',
properties: {
warehouse: {
type: 'string',
'x-dynamic': true,
'x-dependsOn': ['warehouse'],
},
},
},
},
});
});
afterEach(() => {
jest.runOnlyPendingTimers();
jest.useRealTimers();
});
test('posts configuration schema refresh after debounce', async () => {
render(<SemanticLayerModal {...props} />);
await waitFor(() => {
expect(mockedPost).toHaveBeenNthCalledWith(1, {
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: {
type: 'snowflake',
configuration: { warehouse: 'wh0' },
},
});
});
jest.advanceTimersByTime(501);
await waitFor(() => {
expect(mockedPost).toHaveBeenNthCalledWith(2, {
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: {
type: 'snowflake',
configuration: { warehouse: 'wh1' },
},
});
});
});

View File

@@ -1,408 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { t } from '@apache-superset/core/translation';
import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import { Input, Select, Button } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import { JsonForms } from '@jsonforms/react';
import type { JsonSchema, UISchemaElement } from '@jsonforms/core';
import { cellRegistryEntries } from '@great-expectations/jsonforms-antd-renderers';
import type { ErrorObject } from 'ajv';
import {
StandardModal,
ModalFormField,
MODAL_STANDARD_WIDTH,
MODAL_MEDIUM_WIDTH,
} from 'src/components/Modal';
import { styled } from '@apache-superset/core/theme';
import {
renderers,
sanitizeSchema,
buildUiSchema,
getDynamicDependencies,
areDependenciesSatisfied,
serializeDependencyValues,
SCHEMA_REFRESH_DEBOUNCE_MS,
} from './jsonFormsHelpers';
const ModalContent = styled.div`
padding: ${({ theme }) => theme.sizeUnit * 4}px;
`;
type Step = 'type' | 'config';
type ValidationMode = 'ValidateAndHide' | 'ValidateAndShow';
interface SemanticLayerType {
id: string;
name: string;
description: string;
}
interface SemanticLayerModalProps {
show: boolean;
onHide: () => void;
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
semanticLayerUuid?: string;
}
export default function SemanticLayerModal({
show,
onHide,
addDangerToast,
addSuccessToast,
semanticLayerUuid,
}: SemanticLayerModalProps) {
const isEditMode = !!semanticLayerUuid;
const [step, setStep] = useState<Step>('type');
const [name, setName] = useState('');
const [selectedType, setSelectedType] = useState<string | null>(null);
const [types, setTypes] = useState<SemanticLayerType[]>([]);
const [loading, setLoading] = useState(false);
const [configSchema, setConfigSchema] = useState<JsonSchema | null>(null);
const [uiSchema, setUiSchema] = useState<UISchemaElement | undefined>(
undefined,
);
const [formData, setFormData] = useState<Record<string, unknown>>({});
const [saving, setSaving] = useState(false);
const [hasErrors, setHasErrors] = useState(true);
const [refreshingSchema, setRefreshingSchema] = useState(false);
const [validationMode, setValidationMode] =
useState<ValidationMode>('ValidateAndHide');
const errorsRef = useRef<ErrorObject[]>([]);
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const lastDepSnapshotRef = useRef<string>('');
const dynamicDepsRef = useRef<Record<string, string[]>>({});
const fetchTypes = useCallback(async () => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: '/api/v1/semantic_layer/types',
});
setTypes(json.result ?? []);
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
t('An error occurred while fetching semantic layer types'),
);
} finally {
setLoading(false);
}
}, [addDangerToast]);
const applySchema = useCallback((rawSchema: JsonSchema) => {
const schema = sanitizeSchema(rawSchema);
setConfigSchema(schema);
setUiSchema(buildUiSchema(schema));
dynamicDepsRef.current = getDynamicDependencies(rawSchema);
}, []);
const fetchConfigSchema = useCallback(
async (type: string, configuration?: Record<string, unknown>) => {
const isInitialFetch = !configuration;
if (isInitialFetch) setLoading(true);
else setRefreshingSchema(true);
try {
const { json } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type, configuration },
});
applySchema(json.result);
if (json.warning) {
addDangerToast(String(json.warning));
}
if (isInitialFetch) setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
if (isInitialFetch) {
addDangerToast(
clientError.error ||
t('An error occurred while fetching the configuration schema'),
);
} else {
addDangerToast(
clientError.error ||
t('An error occurred while refreshing the configuration schema'),
);
}
} finally {
if (isInitialFetch) setLoading(false);
else setRefreshingSchema(false);
}
},
[addDangerToast, applySchema],
);
const fetchExistingLayer = useCallback(
async (uuid: string) => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: `/api/v1/semantic_layer/${uuid}`,
});
const layer = json.result;
setName(layer.name ?? '');
setSelectedType(layer.type);
setFormData(layer.configuration ?? {});
setHasErrors(false);
// In edit mode, fetch the enriched schema using the full saved
// configuration so that dynamic dropdowns (account, project,
// environment) show their human-readable labels immediately rather
// than flashing raw IDs while the background refresh completes.
const { json: schemaJson } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type: layer.type, configuration: layer.configuration },
});
applySchema(schemaJson.result);
setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
t('An error occurred while fetching the semantic layer'),
);
} finally {
setLoading(false);
}
},
[addDangerToast, applySchema],
);
useEffect(() => {
if (show) {
if (isEditMode && semanticLayerUuid) {
fetchTypes();
fetchExistingLayer(semanticLayerUuid);
} else {
fetchTypes();
}
} else {
setStep('type');
setName('');
setSelectedType(null);
setTypes([]);
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setHasErrors(true);
setRefreshingSchema(false);
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
}
}, [show, fetchTypes, isEditMode, semanticLayerUuid, fetchExistingLayer]);
const handleStepAdvance = () => {
if (selectedType) {
fetchConfigSchema(selectedType);
}
};
const handleBack = () => {
setStep('type');
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
};
const handleCreate = async () => {
setSaving(true);
try {
if (isEditMode && semanticLayerUuid) {
await SupersetClient.put({
endpoint: `/api/v1/semantic_layer/${semanticLayerUuid}`,
jsonPayload: { name, configuration: formData },
});
addSuccessToast(t('Semantic layer updated'));
} else {
await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/',
jsonPayload: { name, type: selectedType, configuration: formData },
});
addSuccessToast(t('Semantic layer created'));
}
onHide();
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
(isEditMode
? t('An error occurred while updating the semantic layer')
: t('An error occurred while creating the semantic layer')),
);
} finally {
setSaving(false);
}
};
const handleSave = () => {
if (step === 'type') {
handleStepAdvance();
} else {
// Trigger validation UI and submit only from explicit save action.
setValidationMode('ValidateAndShow');
if (errorsRef.current.length === 0) {
handleCreate();
}
}
};
const maybeRefreshSchema = useCallback(
(data: Record<string, unknown>) => {
if (!selectedType) return;
const dynamicDeps = dynamicDepsRef.current;
if (Object.keys(dynamicDeps).length === 0) return;
// Check if any dynamic field has all dependencies satisfied
const hasSatisfiedDeps = Object.values(dynamicDeps).some(deps =>
areDependenciesSatisfied(deps, data, configSchema ?? undefined),
);
if (!hasSatisfiedDeps) return;
// Only re-fetch if dependency values actually changed
const snapshot = serializeDependencyValues(dynamicDeps, data);
if (snapshot === lastDepSnapshotRef.current) return;
lastDepSnapshotRef.current = snapshot;
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
debounceTimerRef.current = setTimeout(() => {
fetchConfigSchema(selectedType, data);
}, SCHEMA_REFRESH_DEBOUNCE_MS);
},
[selectedType, fetchConfigSchema, configSchema],
);
const handleFormChange = useCallback(
({
data,
errors,
}: {
data: Record<string, unknown>;
errors?: ErrorObject[];
}) => {
setFormData(data);
errorsRef.current = errors ?? [];
setHasErrors(errorsRef.current.length > 0);
maybeRefreshSchema(data);
},
[maybeRefreshSchema],
);
const selectedTypeName =
types.find(type => type.id === selectedType)?.name ?? '';
const title = isEditMode
? t('Edit %s', selectedTypeName || t('Semantic Layer'))
: step === 'type'
? t('New Semantic Layer')
: t('Configure %s', selectedTypeName);
return (
<StandardModal
show={show}
onHide={onHide}
onSave={handleSave}
title={title}
icon={isEditMode ? <Icons.EditOutlined /> : <Icons.PlusOutlined />}
width={step === 'type' ? MODAL_STANDARD_WIDTH : MODAL_MEDIUM_WIDTH}
saveDisabled={
step === 'type' ? !selectedType : saving || !name.trim() || hasErrors
}
saveText={
step === 'type' ? undefined : isEditMode ? t('Save') : t('Create')
}
saveLoading={saving}
contentLoading={loading}
>
<ModalContent>
{step === 'type' ? (
<ModalFormField label={t('Type')}>
<Select
ariaLabel={t('Semantic layer type')}
placeholder={t('Select a semantic layer type')}
value={selectedType}
onChange={value => setSelectedType(value as string)}
options={types.map(type => ({
value: type.id,
label: type.name,
}))}
getPopupContainer={() => document.body}
dropdownAlign={{
points: ['tl', 'bl'],
offset: [0, 4],
overflow: { adjustX: 0, adjustY: 1 },
}}
/>
</ModalFormField>
) : (
<>
{!isEditMode && (
<Button
buttonStyle="link"
icon={<Icons.CaretLeftOutlined iconSize="s" />}
onClick={handleBack}
>
{t('Back')}
</Button>
)}
<ModalFormField label={t('Name')} required>
<Input
value={name}
onChange={e => setName(e.target.value)}
placeholder={t('Name of the semantic layer')}
/>
</ModalFormField>
{configSchema && (
// Wrap in a form with autocomplete="off" so browsers do not
// autofill credential fields (service token, account, etc.).
// eslint-disable-next-line jsx-a11y/no-redundant-roles
<form
role="presentation"
autoComplete="off"
onSubmit={e => e.preventDefault()}
>
<JsonForms
schema={configSchema}
uischema={uiSchema}
data={formData}
renderers={renderers}
cells={cellRegistryEntries}
config={{ refreshingSchema, formData }}
validationMode={validationMode}
onChange={handleFormChange}
/>
</form>
)}
</>
)}
</ModalContent>
</StandardModal>
);
}

View File

@@ -1,150 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import type { JsonSchema } from '@jsonforms/core';
import {
areDependenciesSatisfied,
sanitizeSchema,
buildUiSchema,
getDynamicDependencies,
serializeDependencyValues,
} from './jsonFormsHelpers';
test('areDependenciesSatisfied returns true for present dependency values', () => {
expect(
areDependenciesSatisfied(['database', 'schema'], {
database: 'examples',
schema: 'public',
}),
).toBe(true);
});
test('areDependenciesSatisfied treats empty object dependencies as unsatisfied', () => {
expect(
areDependenciesSatisfied(['auth'], {
auth: {},
}),
).toBe(false);
});
test('areDependenciesSatisfied uses schema defaults for untouched fields', () => {
const schema: JsonSchema = {
type: 'object',
properties: {
database: {
type: 'string',
default: 'analytics',
},
},
};
expect(areDependenciesSatisfied(['database'], {}, schema)).toBe(true);
});
test('sanitizeSchema removes empty enums and preserves other properties', () => {
const schema: JsonSchema = {
type: 'object',
properties: {
environment: {
type: 'string',
enum: [],
},
warehouse: {
type: 'string',
enum: ['xsmall', 'small'],
},
},
};
const sanitized = sanitizeSchema(schema);
const sanitizedProperties =
(sanitized.properties as Record<string, JsonSchema>) ?? {};
expect(sanitizedProperties.environment?.enum).toBeUndefined();
expect(sanitizedProperties.warehouse?.enum).toEqual(['xsmall', 'small']);
});
test('buildUiSchema respects x-propertyOrder and includes placeholders/tooltips', () => {
const schema = {
type: 'object',
properties: {
database: {
type: 'string',
description: 'Target database',
examples: ['examples'],
},
schema: {
type: 'string',
},
},
'x-propertyOrder': ['schema', 'database'],
} as JsonSchema;
const uiSchema = buildUiSchema(schema) as {
type: string;
elements: Array<Record<string, unknown>>;
};
expect(uiSchema.type).toBe('VerticalLayout');
expect(uiSchema.elements[0].scope).toBe('#/properties/schema');
expect(uiSchema.elements[1].scope).toBe('#/properties/database');
expect(uiSchema.elements[1].options).toEqual({
placeholderText: 'examples',
tooltip: 'Target database',
});
});
test('getDynamicDependencies extracts x-dynamic dependency mapping', () => {
const schema = {
type: 'object',
properties: {
schema: {
type: 'string',
'x-dynamic': true,
'x-dependsOn': ['database'],
},
database: {
type: 'string',
},
warehouse: {
type: 'string',
'x-dynamic': true,
},
},
} as JsonSchema;
expect(getDynamicDependencies(schema)).toEqual({ schema: ['database'] });
});
test('serializeDependencyValues is stable and sorted by key', () => {
const dynamicDeps = {
schema: ['database'],
role: ['warehouse', 'database'],
};
const data = {
warehouse: 'compute_wh',
database: 'analytics',
ignored: 'x',
};
expect(serializeDependencyValues(dynamicDeps, data)).toBe(
JSON.stringify({ database: 'analytics', warehouse: 'compute_wh' }),
);
});

View File

@@ -1,386 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useEffect } from 'react';
import { t } from '@apache-superset/core/translation';
import { Spin, Select, Form } from 'antd';
import { withJsonFormsControlProps } from '@jsonforms/react';
import type {
JsonSchema,
UISchemaElement,
ControlProps,
} from '@jsonforms/core';
import {
rankWith,
and,
isStringControl,
formatIs,
schemaMatches,
} from '@jsonforms/core';
import {
rendererRegistryEntries,
TextControl,
} from '@great-expectations/jsonforms-antd-renderers';
export const SCHEMA_REFRESH_DEBOUNCE_MS = 500;
/**
* Custom renderer that renders `Input.Password` for fields with
* `format: "password"` in the JSON Schema (e.g. Pydantic `SecretStr`).
*/
function PasswordControl(props: ControlProps) {
const uischema = {
...props.uischema,
options: {
...props.uischema.options,
type: 'password',
inputProps: {
...((props.uischema.options?.inputProps as Record<string, unknown>) ??
{}),
// Prevent browsers from autofilling stored login passwords into
// service-token fields. 'new-password' is respected even when
// 'off' is ignored (Chrome ≥ 34).
autoComplete: 'new-password',
},
},
};
return TextControl({ ...props, uischema });
}
const PasswordRenderer = withJsonFormsControlProps(PasswordControl);
const passwordEntry = {
tester: rankWith(3, and(isStringControl, formatIs('password'))),
renderer: PasswordRenderer,
};
/**
* Renderer for `const` properties (e.g. Pydantic discriminator fields).
* Renders nothing visually but ensures the const value is set in form data,
* so discriminated unions resolve correctly on the backend.
*/
function ConstControl({ data, handleChange, path, schema }: ControlProps) {
const constValue = (schema as Record<string, unknown>).const;
useEffect(() => {
if (constValue !== undefined && data !== constValue) {
handleChange(path, constValue);
}
}, [constValue, data, handleChange, path]);
return null;
}
const ConstRenderer = withJsonFormsControlProps(ConstControl);
const constEntry = {
tester: rankWith(
10,
schemaMatches(
s =>
s !== undefined &&
'const' in s &&
!(s as Record<string, unknown>).readOnly,
),
),
renderer: ConstRenderer,
};
/**
* Renderer for read-only fields (e.g. a fixed database that the admin locked).
* Renders a disabled input showing the current value. Also ensures the default
* value is injected into form data (like ConstControl does for hidden fields).
*/
function ReadOnlyControl({
data,
handleChange,
path,
schema,
...rest
}: ControlProps) {
const defaultValue =
(schema as Record<string, unknown>).const ??
(schema as Record<string, unknown>).default;
useEffect(() => {
if (defaultValue !== undefined && data !== defaultValue) {
handleChange(path, defaultValue);
}
}, [defaultValue, data, handleChange, path]);
return TextControl({
...rest,
data,
handleChange,
path,
schema,
enabled: false,
});
}
const ReadOnlyRenderer = withJsonFormsControlProps(ReadOnlyControl);
const readOnlyEntry = {
tester: rankWith(
11,
schemaMatches(
s => s !== undefined && (s as Record<string, unknown>).readOnly === true,
),
),
renderer: ReadOnlyRenderer,
};
/**
* Checks whether all dependency values are filled (non-empty).
* Handles nested objects (like auth) by checking they have at least one key.
*
* Fields that have a `default` in the schema are considered satisfied even
* when the user has not explicitly touched them yet — JsonForms does not
* write default values into `data` until a field is interacted with, so
* without this fallback a field like `admin_host` (which ships with a
* sensible default) would permanently block the refresh.
*/
export function areDependenciesSatisfied(
dependencies: string[],
data: Record<string, unknown>,
schema?: JsonSchema,
): boolean {
return dependencies.every(dep => {
const value = data[dep];
if (value !== null && value !== undefined && value !== '') {
if (typeof value === 'object' && Object.keys(value).length === 0)
return false;
return true;
}
// Fall back to the schema default when the field hasn't been touched yet.
const defaultValue = schema?.properties?.[dep]?.default;
return (
defaultValue !== null && defaultValue !== undefined && defaultValue !== ''
);
});
}
/**
* Renderer for fields marked `x-dynamic` in the JSON Schema.
* Shows a loading spinner inside the input while the schema is being
* refreshed with dynamic values from the backend.
*/
function DynamicFieldControl(props: ControlProps) {
const { refreshingSchema, formData: cfgData } = props.config ?? {};
const deps = (props.schema as Record<string, unknown>)?.['x-dependsOn'];
const refreshing =
refreshingSchema &&
Array.isArray(deps) &&
areDependenciesSatisfied(
deps as string[],
(cfgData as Record<string, unknown>) ?? {},
props.rootSchema,
);
if (!refreshing) {
return TextControl(props);
}
const uischema = {
...props.uischema,
options: {
...props.uischema.options,
placeholderText: t('Loading...'),
inputProps: { suffix: <Spin size="small" /> },
},
};
return TextControl({ ...props, uischema, enabled: false });
}
const DynamicFieldRenderer = withJsonFormsControlProps(DynamicFieldControl);
const dynamicFieldEntry = {
tester: rankWith(
3,
and(
isStringControl,
schemaMatches(
s => (s as Record<string, unknown>)?.['x-dynamic'] === true,
),
),
),
renderer: DynamicFieldRenderer,
};
/**
* Renderer for fields that carry an ``x-enumNames`` array alongside their
* ``enum`` values. Renders as an Antd Select showing human-readable labels
* (from ``x-enumNames``) while storing the underlying enum values in form
* data. Used for MetricFlow's integer-ID fields (account, project,
* environment) where the backend provides both IDs and display names.
*/
function EnumNamesControl(props: ControlProps) {
const { refreshingSchema } = props.config ?? {};
const schema = props.schema as Record<string, unknown>;
const enumValues = (schema.enum as unknown[]) ?? [];
const enumNames =
(schema['x-enumNames'] as string[]) ?? enumValues.map(String);
const options = enumValues.map((value, index) => ({
value,
label: enumNames[index] ?? String(value),
}));
const tooltip = (props.uischema?.options as Record<string, unknown>)
?.tooltip as string | undefined;
return (
<Form.Item label={props.label} tooltip={tooltip}>
<Select
value={props.data ?? null}
onChange={value => props.handleChange(props.path, value)}
options={options}
style={{ width: '100%' }}
disabled={!props.enabled}
allowClear
loading={!!refreshingSchema}
placeholder={
(props.uischema?.options as Record<string, unknown>)
?.placeholderText as string | undefined
}
/>
</Form.Item>
);
}
const EnumNamesRenderer = withJsonFormsControlProps(EnumNamesControl);
const enumNamesEntry = {
// Rank 5: higher than the default string renderer (23) so this fires
// whenever x-enumNames is present, regardless of the underlying type.
tester: rankWith(
5,
schemaMatches(s => {
const names = (s as Record<string, unknown>)['x-enumNames'];
return Array.isArray(names) && (names as unknown[]).length > 0;
}),
),
renderer: EnumNamesRenderer,
};
export const renderers = [
...rendererRegistryEntries,
passwordEntry,
constEntry,
readOnlyEntry,
enumNamesEntry,
dynamicFieldEntry,
];
/**
* Removes empty `enum` arrays from schema properties. The JSON Schema spec
* requires `enum` to have at least one item, and AJV rejects empty arrays.
* Fields with empty enums are rendered as plain text inputs instead.
*/
export function sanitizeSchema(schema: JsonSchema): JsonSchema {
if (!schema.properties) return schema;
const properties: Record<string, JsonSchema> = {};
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'enum' in prop &&
Array.isArray(prop.enum) &&
prop.enum.length === 0
) {
const { enum: _empty, ...rest } = prop;
properties[key] = rest;
} else {
properties[key] = prop as JsonSchema;
}
}
return { ...schema, properties } as JsonSchema;
}
/**
* Builds a JSON Forms UI schema from a JSON Schema, using the first
* `examples` entry as placeholder text for each string property.
*/
export function buildUiSchema(schema: JsonSchema): UISchemaElement | undefined {
if (!schema.properties) return undefined;
// Use explicit property order from backend if available,
// otherwise fall back to the JSON object key order
const propertyOrder: string[] =
((schema as Record<string, unknown>)['x-propertyOrder'] as string[]) ??
Object.keys(schema.properties);
const elements = propertyOrder
.filter(key => key in (schema.properties ?? {}))
.map(key => {
const prop = schema.properties![key];
const control: Record<string, unknown> = {
type: 'Control',
scope: `#/properties/${key}`,
};
if (typeof prop === 'object' && prop !== null) {
const options: Record<string, unknown> = {};
if (
'examples' in prop &&
Array.isArray(prop.examples) &&
prop.examples.length > 0
) {
options.placeholderText = String(prop.examples[0]);
}
if ('description' in prop && typeof prop.description === 'string') {
options.tooltip = prop.description;
}
if (Object.keys(options).length > 0) {
control.options = options;
}
}
return control;
});
return { type: 'VerticalLayout', elements } as UISchemaElement;
}
/**
* Extracts dynamic field dependency mappings from the schema.
* Returns a map of field name -> list of dependency field names.
*/
export function getDynamicDependencies(
schema: JsonSchema,
): Record<string, string[]> {
const deps: Record<string, string[]> = {};
if (!schema.properties) return deps;
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'x-dynamic' in prop &&
'x-dependsOn' in prop &&
Array.isArray((prop as Record<string, unknown>)['x-dependsOn'])
) {
deps[key] = (prop as Record<string, unknown>)['x-dependsOn'] as string[];
}
}
return deps;
}
/**
* Serializes the dependency values for a set of fields into a stable string
* for comparison, so we only re-fetch when dependency values actually change.
*/
export function serializeDependencyValues(
dynamicDeps: Record<string, string[]>,
data: Record<string, unknown>,
): string {
const allDepKeys = new Set<string>();
for (const deps of Object.values(dynamicDeps)) {
for (const dep of deps) {
allDepKeys.add(dep);
}
}
const snapshot: Record<string, unknown> = {};
for (const key of [...allDepKeys].sort()) {
snapshot[key] = data[key];
}
return JSON.stringify(snapshot);
}

View File

@@ -1,65 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { isFeatureEnabled, FeatureFlag } from '@superset-ui/core';
import { t } from '@apache-superset/core/translation';
const SEMANTIC_LAYERS_FLAG = 'SEMANTIC_LAYERS' as FeatureFlag;
/**
* When the SEMANTIC_LAYERS feature flag is enabled the UI broadens
* "dataset" → "datasource" and "database" → "data connection" so
* that semantic views and semantic layers feel like first-class
* citizens alongside traditional datasets and database connections.
*/
function sl<T>(legacy: T, semantic: T): T {
return isFeatureEnabled(SEMANTIC_LAYERS_FLAG) ? semantic : legacy;
}
// ---------------------------------------------------------------------------
// "dataset" family
// ---------------------------------------------------------------------------
/** Capitalized singular: "Dataset" / "Datasource" */
export const datasetLabel = () => sl(t('Dataset'), t('Datasource'));
/** Lower-case singular: "dataset" / "datasource" */
export const datasetLabelLower = () => sl(t('dataset'), t('datasource'));
/** Capitalized plural: "Datasets" / "Datasources" */
export const datasetsLabel = () => sl(t('Datasets'), t('Datasources'));
/** Lower-case plural: "datasets" / "datasources" */
export const datasetsLabelLower = () => sl(t('datasets'), t('datasources'));
// ---------------------------------------------------------------------------
// "database" family
// ---------------------------------------------------------------------------
/** Capitalized singular: "Database" / "Data connection" */
export const databaseLabel = () => sl(t('Database'), t('Data connection'));
/** Lower-case singular: "database" / "data connection" */
export const databaseLabelLower = () => sl(t('database'), t('data connection'));
/** Capitalized plural: "Databases" / "Data connections" */
export const databasesLabel = () => sl(t('Databases'), t('Data connections'));
/** Lower-case plural: "databases" / "data connections" */
export const databasesLabelLower = () =>
sl(t('databases'), t('data connections'));

View File

@@ -1,264 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
render,
screen,
waitFor,
userEvent,
} from 'spec/helpers/testing-library';
import { SupersetClient } from '@superset-ui/core';
import AddSemanticViewModal from './AddSemanticViewModal';
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
get: jest.fn(),
post: jest.fn(),
},
}));
const mockedGet = SupersetClient.get as jest.Mock;
const mockedPost = SupersetClient.post as jest.Mock;
const createProps = () => ({
show: true,
onHide: jest.fn(),
onSuccess: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
});
const selectOption = async (name: string, optionLabel: string) => {
const select = await screen.findByRole('combobox', { name });
await userEvent.click(select);
await userEvent.click(await screen.findByText(optionLabel));
};
beforeEach(() => {
mockedGet.mockReset();
mockedPost.mockReset();
});
test('loads layers on open and adds selected semantic views', async () => {
mockedGet.mockResolvedValue({
json: {
result: [{ uuid: 'layer-1', name: 'Snowflake SL' }],
},
});
mockedPost.mockImplementation(({ endpoint }: { endpoint: string }) => {
if (endpoint === '/api/v1/semantic_layer/layer-1/schema/runtime') {
return Promise.resolve({ json: { result: { properties: {} } } });
}
if (endpoint === '/api/v1/semantic_layer/layer-1/views') {
return Promise.resolve({
json: {
result: [
{ name: 'orders', already_added: false },
{ name: 'customers', already_added: true },
],
},
});
}
if (endpoint === '/api/v1/semantic_view/') {
return Promise.resolve({
json: {
result: {
created: [{ uuid: 'view-1', name: 'orders' }],
},
},
});
}
return Promise.reject(new Error(`Unexpected endpoint: ${endpoint}`));
});
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/',
});
});
await selectOption('Semantic layer', 'Snowflake SL');
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/layer-1/schema/runtime',
jsonPayload: {},
});
});
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/layer-1/views',
jsonPayload: { runtime_data: {} },
});
});
await selectOption('Semantic views', 'orders');
await userEvent.click(
screen.getByRole('button', { name: /add 1 view\(s\)/i }),
);
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_view/',
jsonPayload: {
views: [
{
name: 'orders',
semantic_layer_uuid: 'layer-1',
configuration: {},
},
],
},
});
});
expect(props.addSuccessToast).toHaveBeenCalledWith(
'1 semantic view(s) added',
);
expect(props.onSuccess).toHaveBeenCalled();
expect(props.onHide).toHaveBeenCalled();
});
test('shows partial success feedback when only some semantic views are created', async () => {
mockedGet.mockResolvedValue({
json: {
result: [{ uuid: 'layer-1', name: 'Snowflake SL' }],
},
});
mockedPost.mockImplementation(({ endpoint }: { endpoint: string }) => {
if (endpoint === '/api/v1/semantic_layer/layer-1/schema/runtime') {
return Promise.resolve({ json: { result: { properties: {} } } });
}
if (endpoint === '/api/v1/semantic_layer/layer-1/views') {
return Promise.resolve({
json: {
result: [
{ name: 'orders', already_added: false },
{ name: 'customers', already_added: false },
],
},
});
}
if (endpoint === '/api/v1/semantic_view/') {
return Promise.resolve({
json: {
result: {
created: [{ uuid: 'view-1', name: 'orders' }],
errors: [{ name: 'customers', error: 'create failed' }],
},
},
});
}
return Promise.reject(new Error(`Unexpected endpoint: ${endpoint}`));
});
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await selectOption('Semantic layer', 'Snowflake SL');
await waitFor(() => {
expect(
screen.getByRole('combobox', { name: 'Semantic views' }),
).toBeInTheDocument();
});
await selectOption('Semantic views', 'orders');
await selectOption('Semantic views', 'customers');
await userEvent.click(
screen.getByRole('button', { name: /add 2 view\(s\)/i }),
);
await waitFor(() => {
expect(props.addSuccessToast).toHaveBeenCalledWith(
'1 semantic view(s) added',
);
expect(props.addDangerToast).toHaveBeenCalledWith(
'1 semantic view(s) failed to add: customers',
);
});
expect(props.onSuccess).not.toHaveBeenCalled();
expect(props.onHide).not.toHaveBeenCalled();
});
test('shows toast when loading semantic layers fails', async () => {
mockedGet.mockRejectedValue(new Error('boom'));
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await waitFor(() => {
expect(props.addDangerToast).toHaveBeenCalledWith(
'An error occurred while fetching semantic layers',
);
});
});
test('shows toast when add semantic views fails', async () => {
mockedGet.mockResolvedValue({
json: {
result: [{ uuid: 'layer-1', name: 'Snowflake SL' }],
},
});
mockedPost.mockImplementation(({ endpoint }: { endpoint: string }) => {
if (endpoint === '/api/v1/semantic_layer/layer-1/schema/runtime') {
return Promise.resolve({ json: { result: { properties: {} } } });
}
if (endpoint === '/api/v1/semantic_layer/layer-1/views') {
return Promise.resolve({
json: {
result: [{ name: 'orders', already_added: false }],
},
});
}
if (endpoint === '/api/v1/semantic_view/') {
return Promise.reject(new Error('save failed'));
}
return Promise.reject(new Error(`Unexpected endpoint: ${endpoint}`));
});
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await selectOption('Semantic layer', 'Snowflake SL');
await waitFor(() => {
expect(
screen.getByRole('combobox', { name: 'Semantic views' }),
).toBeInTheDocument();
});
await selectOption('Semantic views', 'orders');
await userEvent.click(
screen.getByRole('button', { name: /add 1 view\(s\)/i }),
);
await waitFor(() => {
expect(props.addDangerToast).toHaveBeenCalledWith(
'An error occurred while adding semantic views',
);
});
});

Some files were not shown because too many files have changed in this diff Show More