Compare commits

...

19 Commits

Author SHA1 Message Date
Beto Dealmeida
3aad565eab refactor: keep engine manager focused on engine creation 2026-05-05 18:22:36 -04:00
Beto Dealmeida
b7b59dfb8a fix: Add port validation for SSH tunnels
Raise SSHTunnelDatabasePortError when the database URI has no port and
there's no default port for the database backend. This matches the
original behavior from the removed ssh.py module.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-05-05 18:08:58 -04:00
Beto Dealmeida
fa0d4e1c08 Small fixes 2026-05-05 18:08:58 -04:00
Beto Dealmeida
08df7d5178 fix: SSH tunnel and test connection error handling
- Use sshtunnel.open_tunnel() instead of SSHTunnelForwarder directly
  to properly handle debug_level parameter
- Fix keepalive parameter name (set_keepalive, not keepalive)
- Fix test assertions that were inside pytest.raises blocks and never
  executed - now check error_type instead of string messages
- Update SSH tunnel test mocks to patch open_tunnel

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-05-05 18:08:57 -04:00
Beto Dealmeida
9dc54d8f1b Rebase 2026-05-05 18:08:57 -04:00
Beto Dealmeida
e2ce534148 Fix tests 2026-05-05 18:08:43 -04:00
Beto Dealmeida
b3f8831d34 Fix poolclass check 2026-05-05 18:08:43 -04:00
Beto Dealmeida
1775cae220 Fix more tests 2026-05-05 18:08:42 -04:00
Beto Dealmeida
26f0390bbb Simplify key generation 2026-05-05 18:08:42 -04:00
Beto Dealmeida
ea27cabfc6 Update existing tests 2026-05-05 18:08:42 -04:00
Beto Dealmeida
f39367bffd Hash key 2026-05-05 18:08:32 -04:00
Beto Dealmeida
11395531f2 Small improvements 2026-05-05 18:08:31 -04:00
Beto Dealmeida
ec018cd842 Cleanup 2026-05-05 18:02:45 -04:00
Beto Dealmeida
48d3f441b8 Connecting 2026-05-05 18:02:45 -04:00
Beto Dealmeida
b3393c65f7 Add extension 2026-05-05 18:02:45 -04:00
Beto Dealmeida
8776b651a5 Cleanup locks 2026-05-05 18:02:45 -04:00
Beto Dealmeida
ccd32920fc feat: engine manager 2026-05-05 18:02:45 -04:00
Richard Fogaca Nienkotter
9459bc7bf4 fix(mcp): warn on invalid chart preview form data key (#39891)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 16:40:00 -03:00
Beto Dealmeida
cb53745d43 feat: semantic layer extension (#37815) 2026-05-05 12:07:46 -04:00
155 changed files with 19552 additions and 1106 deletions

View File

@@ -54,6 +54,7 @@ jobs:
SUPERSET_SECRET_KEY: not-a-secret
run: |
pytest --durations-min=0.5 --cov=superset/sql/ ./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
pytest --durations-min=0.5 --cov=superset/semantic_layers/ ./tests/unit_tests/semantic_layers/ --cache-clear --cov-fail-under=100
- name: Upload code coverage
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v5
with:

View File

@@ -46,6 +46,13 @@ The Deck.gl MapBox chart's **Opacity**, **Default longitude**, **Default latitud
**To restore fit-to-data behavior:** Open the chart in Explore, clear the **Default longitude**, **Default latitude**, and **Zoom** fields in the Viewport section, and re-save the chart.
### Combined datasource list endpoint
Added a new combined datasource list endpoint at `GET /api/v1/datasource/` to serve datasets and semantic views in one response.
- The endpoint is available to users with at least one of `can_read` on `Dataset` or `SemanticView`.
- Semantic views are included only when the `SEMANTIC_LAYERS` feature flag is enabled.
- The endpoint enforces strict `order_column` validation and returns `400` for invalid sort columns.
### ClickHouse minimum driver version bump
The minimum required version of `clickhouse-connect` has been raised to `>=0.13.0`. If you are using the ClickHouse connector, please upgrade your `clickhouse-connect` package. The `_mutate_label` workaround that appended hash suffixes to column aliases has also been removed, as it is no longer needed with modern versions of the driver.
@@ -107,7 +114,6 @@ DISTRIBUTED_COORDINATION_CONFIG = {
```
See `superset/config.py` for complete configuration options.
### WebSocket config for GAQ with Docker
[35896](https://github.com/apache/superset/pull/35896) and [37624](https://github.com/apache/superset/pull/37624) updated documentation on how to run and configure Superset with Docker. Specifically for the WebSocket configuration, a new `docker/superset-websocket/config.example.json` was added to the repo, so that users could copy it to create a `docker/superset-websocket/config.json` file. The existing `docker/superset-websocket/config.json` was removed and git-ignored, so if you're using GAQ / WebSocket make sure to:

View File

@@ -105,7 +105,13 @@ class CeleryConfig:
CELERY_CONFIG = CeleryConfig
FEATURE_FLAGS = {"ALERT_REPORTS": True, "DATASET_FOLDERS": True}
FEATURE_FLAGS = {
"ALERT_REPORTS": True,
"DATASET_FOLDERS": True,
"ENABLE_EXTENSIONS": True,
"SEMANTIC_LAYERS": True,
}
EXTENSIONS_PATH = "/app/docker/extensions"
ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
WEBDRIVER_BASEURL = f"http://superset_app{os.environ.get('SUPERSET_APP_ROOT', '/')}/" # When using docker compose baseurl should be http://superset_nginx{ENV{BASEPATH}}/ # noqa: E501
# The base URL for the email report hyperlinks.

View File

@@ -224,3 +224,52 @@ async def analysis_guide(ctx: Context) -> str:
```
See [MCP Integration](./mcp) for implementation details.
### Semantic Layers
Extensions can register custom semantic layer implementations that allow Superset to connect to external data modeling frameworks. Each semantic layer defines how to authenticate, discover semantic views (tables/metrics/dimensions), and execute queries against the external system.
```python
from superset_core.semantic_layers.decorators import semantic_layer
from superset_core.semantic_layers.layer import SemanticLayer
from my_extension.config import MyConfig
from my_extension.view import MySemanticView
@semantic_layer(
id="my_platform",
name="My Data Platform",
description="Connect to My Data Platform's semantic layer",
)
class MySemanticLayer(SemanticLayer[MyConfig, MySemanticView]):
configuration_class = MyConfig
@classmethod
def from_configuration(cls, configuration: dict) -> "MySemanticLayer":
config = MyConfig.model_validate(configuration)
return cls(config)
@classmethod
def get_configuration_schema(cls, configuration=None) -> dict:
return MyConfig.model_json_schema()
@classmethod
def get_runtime_schema(cls, configuration=None, runtime_data=None) -> dict:
return {"type": "object", "properties": {}}
def get_semantic_views(self, runtime_configuration: dict) -> set[MySemanticView]:
# Return available views from the external platform
...
def get_semantic_view(self, name: str, additional_configuration: dict) -> MySemanticView:
# Return a specific view by name
...
```
**Note**: The `@semantic_layer` decorator automatically detects context and applies appropriate ID prefixing:
- **Extension context**: ID prefixed as `extensions.{publisher}.{name}.{id}`
- **Host context**: Original ID used as-is
The decorator registers the class in the semantic layers registry, making it available in the UI for users to create connections. The `configuration_class` should be a Pydantic model that defines the fields needed to connect (credentials, project, database, etc.). Superset uses the model's JSON schema to render the configuration form dynamically.

View File

@@ -81,6 +81,12 @@
"lifecycle": "development",
"description": "Expand nested types in Presto into extra columns/arrays. Experimental, doesn't work with all nested types."
},
{
"name": "SEMANTIC_LAYERS",
"default": false,
"lifecycle": "development",
"description": "Enable semantic layers and show semantic views alongside datasets"
},
{
"name": "TABLE_V2_TIME_COMPARISON_ENABLED",
"default": false,

View File

@@ -288,6 +288,7 @@ module = [
"superset.tags.filters",
"superset.commands.security.update",
"superset.commands.security.create",
"superset.semantic_layers.api",
]
warn_unused_ignores = false

View File

@@ -43,6 +43,8 @@ classifiers = [
]
dependencies = [
"flask-appbuilder>=5.0.2,<6",
"isodate>=0.7.0",
"pyarrow>=16.0.0",
"pydantic>=2.8.0",
"sqlalchemy>=1.4.0,<2.0",
"sqlalchemy-utils>=0.38.0, <0.43", # expanding lowerbound to work with pydoris

View File

@@ -0,0 +1,73 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import Any
from pydantic import BaseModel
def build_configuration_schema(
config_class: type[BaseModel],
configuration: BaseModel | None = None,
) -> dict[str, Any]:
"""
Build a JSON schema from a Pydantic configuration class.
Handles generic boilerplate that any semantic layer with dynamic fields needs:
- Reorders properties to match model field order (Pydantic sorts alphabetically)
- When ``configuration`` is None, sets ``enum: []`` on all ``x-dynamic`` properties
so the frontend renders them as empty dropdowns
Semantic layer implementations call this instead of
``model_json_schema()`` directly,
then only need to add their own dynamic population logic.
"""
schema = config_class.model_json_schema()
# Pydantic sorts properties alphabetically; restore model field order
field_order = [
field.alias or name for name, field in config_class.model_fields.items()
]
schema["properties"] = {
key: schema["properties"][key]
for key in field_order
if key in schema["properties"]
}
if configuration is None:
for prop_schema in schema["properties"].values():
if prop_schema.get("x-dynamic"):
prop_schema["enum"] = []
return schema
def check_dependencies(
prop_schema: dict[str, Any],
configuration: BaseModel,
) -> bool:
"""
Check whether a dynamic property's dependencies are satisfied.
Reads the ``x-dependsOn`` list from the property schema and returns ``True``
when every referenced attribute on ``configuration`` is truthy.
"""
dependencies = prop_schema.get("x-dependsOn", [])
return all(getattr(configuration, dep, None) for dep in dependencies)

View File

@@ -0,0 +1,169 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Semantic layer DAO interfaces for superset-core.
Provides abstract DAO classes for semantic layers and views that define the
interface contract. Host implementations replace these with concrete classes
backed by SQLAlchemy during initialization.
Usage:
from superset_core.semantic_layers.daos import (
AbstractSemanticLayerDAO,
AbstractSemanticViewDAO,
)
"""
from __future__ import annotations
from abc import abstractmethod
from typing import Any, ClassVar
from superset_core.common.daos import BaseDAO
from superset_core.semantic_layers.models import SemanticLayerModel, SemanticViewModel
class AbstractSemanticLayerDAO(BaseDAO[SemanticLayerModel]):
"""
Abstract DAO interface for SemanticLayer.
Host implementations will replace this class during initialization
with a concrete DAO providing actual database access.
"""
model_cls: ClassVar[type[Any] | None] = None
base_filter = None
id_column_name = "uuid"
uuid_column_name = "uuid"
@classmethod
@abstractmethod
def validate_uniqueness(cls, name: str) -> bool:
"""
Validate that a semantic layer name is unique.
:param name: Semantic layer name to validate
:return: True if the name is unique, False otherwise
"""
...
@classmethod
@abstractmethod
def validate_update_uniqueness(cls, layer_uuid: str, name: str) -> bool:
"""
Validate that a semantic layer name is unique for an update operation,
excluding the layer being updated.
:param layer_uuid: UUID of the semantic layer being updated
:param name: New name to validate
:return: True if the name is unique, False otherwise
"""
...
@classmethod
@abstractmethod
def find_by_name(cls, name: str) -> SemanticLayerModel | None:
"""
Find a semantic layer by name.
:param name: Semantic layer name
:return: SemanticLayerModel instance or None
"""
...
@classmethod
@abstractmethod
def get_semantic_views(cls, layer_uuid: str) -> list[SemanticViewModel]:
"""
Get all semantic views associated with a semantic layer.
:param layer_uuid: UUID of the semantic layer
:return: List of SemanticViewModel instances
"""
...
class AbstractSemanticViewDAO(BaseDAO[SemanticViewModel]):
"""
Abstract DAO interface for SemanticView.
Host implementations will replace this class during initialization
with a concrete DAO providing actual database access.
"""
model_cls: ClassVar[type[Any] | None] = None
base_filter = None
id_column_name = "id"
uuid_column_name = "uuid"
@classmethod
@abstractmethod
def validate_uniqueness(
cls,
name: str,
layer_uuid: str,
configuration: dict[str, Any],
) -> bool:
"""
Validate that a semantic view is unique within a semantic layer.
Uniqueness is determined by the combination of name, layer UUID, and
configuration.
:param name: View name
:param layer_uuid: UUID of the parent semantic layer
:param configuration: Configuration dict to compare
:return: True if unique, False otherwise
"""
...
@classmethod
@abstractmethod
def validate_update_uniqueness(
cls,
view_uuid: str,
name: str,
layer_uuid: str,
configuration: dict[str, Any],
) -> bool:
"""
Validate that a semantic view is unique within a semantic layer for an
update operation, excluding the view being updated.
:param view_uuid: UUID of the view being updated
:param name: New name to validate
:param layer_uuid: UUID of the parent semantic layer
:param configuration: Configuration dict to compare
:return: True if unique, False otherwise
"""
...
@classmethod
@abstractmethod
def find_by_name(cls, name: str, layer_uuid: str) -> SemanticViewModel | None:
"""
Find a semantic view by name within a semantic layer.
:param name: View name
:param layer_uuid: UUID of the parent semantic layer
:return: SemanticViewModel instance or None
"""
...
__all__ = ["AbstractSemanticLayerDAO", "AbstractSemanticViewDAO"]

View File

@@ -0,0 +1,102 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Semantic layer registration decorator for Superset.
This module provides a decorator interface to register semantic layer
implementations with the host application, enabling automatic discovery
by the extensions framework.
Usage:
from superset_core.semantic_layers.decorators import semantic_layer
@semantic_layer(
id="snowflake",
name="Snowflake Cortex",
description="Snowflake semantic layer via Cortex Analyst",
)
class SnowflakeSemanticLayer(SemanticLayer[SnowflakeConfig, SnowflakeView]):
...
# Or with minimal arguments:
@semantic_layer(id="dbt", name="dbt Semantic Layer")
class DbtSemanticLayer(SemanticLayer[DbtConfig, DbtView]):
...
"""
from __future__ import annotations
from typing import Callable, TypeVar
# Type variable for decorated semantic layer classes
T = TypeVar("T")
def semantic_layer(
id: str,
name: str,
description: str | None = None,
) -> Callable[[T], T]:
"""
Decorator to register a semantic layer implementation.
Automatically detects extension context and applies appropriate
namespacing to prevent ID conflicts between host and extension
semantic layers.
Host implementations will replace this function during initialization
with a concrete implementation providing actual functionality.
Args:
id: Unique semantic layer type identifier (e.g., "snowflake",
"dbt"). Used as the key in the semantic layers registry and
stored in the ``type`` column of the ``SemanticLayer`` model.
name: Human-readable display name (e.g., "Snowflake Cortex").
Shown in the UI when listing available semantic layer types.
description: Optional description for documentation and UI
tooltips.
Returns:
Decorated semantic layer class registered with the host
application.
Raises:
NotImplementedError: If called before host implementation is
initialized.
Example:
from superset_core.semantic_layers.decorators import semantic_layer
from superset_core.semantic_layers.layer import SemanticLayer
@semantic_layer(
id="snowflake",
name="Snowflake Cortex",
description="Connect to Snowflake Cortex Analyst",
)
class SnowflakeSemanticLayer(
SemanticLayer[SnowflakeConfig, SnowflakeView]
):
...
"""
raise NotImplementedError(
"Semantic layer decorator not initialized. "
"This decorator should be replaced during Superset startup."
)
__all__ = ["semantic_layer"]

View File

@@ -0,0 +1,129 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Any, Generic, TypeVar
from pydantic import BaseModel
from superset_core.semantic_layers.view import SemanticView
ConfigT = TypeVar("ConfigT", bound=BaseModel)
SemanticViewT = TypeVar("SemanticViewT", bound="SemanticView")
class SemanticLayer(ABC, Generic[ConfigT, SemanticViewT]):
"""
Abstract base class for semantic layers.
"""
configuration_class: type[BaseModel]
@classmethod
@abstractmethod
def from_configuration(
cls,
configuration: dict[str, Any],
) -> SemanticLayer[ConfigT, SemanticViewT]:
"""
Create a semantic layer from its configuration.
"""
raise NotImplementedError(
"Semantic layers must implement the from_configuration method"
)
@classmethod
@abstractmethod
def get_configuration_schema(
cls,
configuration: ConfigT | None = None,
) -> dict[str, Any]:
"""
Get the JSON schema for the configuration needed to add the semantic layer.
A partial configuration `configuration` can be sent to improve the schema,
allowing for progressive validation and better UX. For example, a semantic
layer might require:
- auth information
- a database
If the user provides the auth information, a client can send the partial
configuration to this method, and the resulting JSON schema would include
the list of databases the user has access to, allowing a dropdown to be
populated.
The Snowflake semantic layer has an example implementation of this method, where
database and schema names are populated based on the provided connection info.
"""
raise NotImplementedError(
"Semantic layers must implement the get_configuration_schema method"
)
@classmethod
@abstractmethod
def get_runtime_schema(
cls,
configuration: ConfigT,
runtime_data: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""
Get the JSON schema for the runtime parameters needed to load semantic views.
This returns the schema needed to connect to a semantic view given the
configuration for the semantic layer. For example, a semantic layer might
be configured by:
- auth information
- an optional database
If the user does not provide a database when creating the semantic layer, the
runtime schema would require the database name to be provided before loading any
semantic views. This allows users to create semantic layers that connect to a
specific database (or project, account, etc.), or that allow users to select it
at query time.
The Snowflake semantic layer has an example implementation of this method, where
database and schema names are required if they were not provided in the initial
configuration.
"""
raise NotImplementedError(
"Semantic layers must implement the get_runtime_schema method"
)
@abstractmethod
def get_semantic_views(
self,
runtime_configuration: dict[str, Any],
) -> set[SemanticViewT]:
"""
Get the semantic views available in the semantic layer.
The runtime configuration can provide information like a given project or
schema, used to restrict the semantic views returned.
"""
@abstractmethod
def get_semantic_view(
self,
name: str,
additional_configuration: dict[str, Any],
) -> SemanticViewT:
"""
Get a specific semantic view by its name and additional configuration.
"""

View File

@@ -0,0 +1,85 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Semantic layer model interfaces for superset-core.
Provides abstract model classes for semantic layers and views that will be
replaced by the host implementation's concrete SQLAlchemy models during
initialization.
Usage:
from superset_core.semantic_layers.models import (
SemanticLayerModel,
SemanticViewModel,
)
"""
from __future__ import annotations
from datetime import datetime
from uuid import UUID
from superset_core.common.models import CoreModel
class SemanticLayerModel(CoreModel):
"""
Abstract interface for the SemanticLayer database model.
Host implementations will replace this class during initialization
with a concrete SQLAlchemy model providing actual persistence.
"""
__abstract__ = True
# Type hints for expected column attributes
uuid: UUID
name: str
description: str | None
type: str
configuration: str
configuration_version: int
cache_timeout: int | None
created_on: datetime | None
changed_on: datetime | None
class SemanticViewModel(CoreModel):
"""
Abstract interface for the SemanticView database model.
Host implementations will replace this class during initialization
with a concrete SQLAlchemy model providing actual persistence.
"""
__abstract__ = True
# Type hints for expected column attributes
id: int
uuid: UUID
name: str
description: str | None
configuration: str
configuration_version: int
cache_timeout: int | None
semantic_layer_uuid: UUID
created_on: datetime | None
changed_on: datetime | None
__all__ = ["SemanticLayerModel", "SemanticViewModel"]

View File

@@ -0,0 +1,209 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import enum
from dataclasses import dataclass
from datetime import date, datetime, time, timedelta
import isodate
import pyarrow as pa
@dataclass(frozen=True)
class Grain:
"""
Represents a time grain (e.g., day, month, year).
Attributes:
name: Human-readable name of the grain (e.g., "Second")
representation: ISO 8601 duration (e.g., "PT1S", "P1D", "P1M")
"""
name: str
representation: str
def __post_init__(self) -> None:
isodate.parse_duration(self.representation)
def __eq__(self, other: object) -> bool:
if isinstance(other, Grain):
return self.representation == other.representation
return NotImplemented
def __hash__(self) -> int:
return hash(self.representation)
class Grains:
"""Pre-defined common grains and factory for custom ones."""
SECOND = Grain("Second", "PT1S")
MINUTE = Grain("Minute", "PT1M")
HOUR = Grain("Hour", "PT1H")
DAY = Grain("Day", "P1D")
WEEK = Grain("Week", "P1W")
MONTH = Grain("Month", "P1M")
QUARTER = Grain("Quarter", "P3M")
YEAR = Grain("Year", "P1Y")
_REGISTRY: dict[str, Grain] = {
"PT1S": SECOND,
"PT1M": MINUTE,
"PT1H": HOUR,
"P1D": DAY,
"P1W": WEEK,
"P1M": MONTH,
"P3M": QUARTER,
"P1Y": YEAR,
}
@classmethod
def get(cls, representation: str, name: str | None = None) -> Grain:
"""Return a pre-defined grain or create a custom one."""
if grain := cls._REGISTRY.get(representation):
return grain
return Grain(name or representation, representation)
@dataclass(frozen=True)
class Dimension:
id: str
name: str
type: pa.DataType
definition: str | None = None
description: str | None = None
grain: Grain | None = None
@dataclass(frozen=True)
class Metric:
id: str
name: str
type: pa.DataType
definition: str
description: str | None = None
@dataclass(frozen=True)
class AdhocExpression:
id: str
definition: str
class Operator(str, enum.Enum):
EQUALS = "="
NOT_EQUALS = "!="
GREATER_THAN = ">"
LESS_THAN = "<"
GREATER_THAN_OR_EQUAL = ">="
LESS_THAN_OR_EQUAL = "<="
IN = "IN"
NOT_IN = "NOT IN"
LIKE = "LIKE"
NOT_LIKE = "NOT LIKE"
IS_NULL = "IS NULL"
IS_NOT_NULL = "IS NOT NULL"
ADHOC = "ADHOC"
FilterValues = str | int | float | bool | datetime | date | time | timedelta | None
class PredicateType(enum.Enum):
WHERE = "WHERE"
HAVING = "HAVING"
@dataclass(frozen=True, order=True)
class Filter:
type: PredicateType
column: Dimension | Metric | None
operator: Operator
value: FilterValues | frozenset[FilterValues]
class OrderDirection(enum.Enum):
ASC = "ASC"
DESC = "DESC"
OrderTuple = tuple[Metric | Dimension | AdhocExpression, OrderDirection]
@dataclass(frozen=True)
class GroupLimit:
"""
Limit query to top/bottom N combinations of specified dimensions.
The `filters` parameter allows specifying separate filter constraints for the
group limit subquery. This is useful when you want to determine the top N groups
using different criteria (e.g., a different time range) than the main query.
For example, you might want to find the top 10 products by sales over the last
30 days, but then show daily sales for those products over the last 7 days.
"""
dimensions: list[Dimension]
top: int
metric: Metric | None
direction: OrderDirection = OrderDirection.DESC
group_others: bool = False
filters: set[Filter] | None = None
@dataclass(frozen=True)
class SemanticRequest:
"""
Represents a request made to obtain semantic results.
This could be a SQL query, an HTTP request, etc.
"""
type: str
definition: str
@dataclass(frozen=True)
class SemanticResult:
"""
Represents the results of a semantic query.
This includes any requests (SQL queries, HTTP requests) that were performed in order
to obtain the results, in order to help troubleshooting.
"""
requests: list[SemanticRequest]
results: pa.Table
@dataclass(frozen=True)
class SemanticQuery:
"""
Represents a semantic query.
"""
metrics: list[Metric]
dimensions: list[Dimension]
filters: set[Filter] | None = None
order: list[OrderTuple] | None = None
limit: int | None = None
offset: int | None = None
group_limit: GroupLimit | None = None

View File

@@ -0,0 +1,113 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import enum
from abc import ABC, abstractmethod
from superset_core.semantic_layers.types import (
Dimension,
Filter,
Metric,
SemanticQuery,
SemanticResult,
)
# TODO (betodealmeida): move to the extension JSON
class SemanticViewFeature(enum.Enum):
"""
Custom features supported by semantic layers.
"""
ADHOC_EXPRESSIONS_IN_ORDERBY = "ADHOC_EXPRESSIONS_IN_ORDERBY"
GROUP_LIMIT = "GROUP_LIMIT"
GROUP_OTHERS = "GROUP_OTHERS"
class SemanticView(ABC):
"""
Abstract base class for semantic views.
"""
features: frozenset[SemanticViewFeature]
# Implementations must expose a display name for the view.
# Declared here as a type annotation (not abstract) so that existing
# implementations are not required to add a formal @abstractmethod.
name: str
@abstractmethod
def uid(self) -> str:
"""
Returns a unique identifier for the semantic view.
"""
@abstractmethod
def get_dimensions(self) -> set[Dimension]:
"""
Get the dimensions defined in the semantic view.
"""
@abstractmethod
def get_metrics(self) -> set[Metric]:
"""
Get the metrics defined in the semantic view.
"""
@abstractmethod
def get_values(
self,
dimension: Dimension,
filters: set[Filter] | None = None,
) -> SemanticResult:
"""
Return distinct values for a dimension.
"""
@abstractmethod
def get_table(self, query: SemanticQuery) -> SemanticResult:
"""
Execute a semantic query and return the results.
"""
@abstractmethod
def get_row_count(self, query: SemanticQuery) -> SemanticResult:
"""
Execute a query and return the number of rows the result would have.
"""
@abstractmethod
def get_compatible_metrics(
self,
selected_metrics: set[Metric],
selected_dimensions: set[Dimension],
) -> set[Metric]:
"""
Return metrics compatible with the selected dimensions.
"""
@abstractmethod
def get_compatible_dimensions(
self,
selected_metrics: set[Metric],
selected_dimensions: set[Dimension],
) -> set[Dimension]:
"""
Return dimensions compatible with the selected metrics.
"""

View File

@@ -28,8 +28,14 @@
"@emotion/cache": "^11.4.0",
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.1",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/ibm-plex-mono": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@googleapis/sheets": "^13.0.1",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@luma.gl/constants": "~9.2.5",
"@luma.gl/core": "~9.2.5",
"@luma.gl/engine": "~9.2.5",
@@ -37,6 +43,7 @@
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",
@@ -3913,6 +3920,15 @@
}
}
},
"node_modules/@fontsource/fira-code": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz",
"integrity": "sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==",
"license": "OFL-1.1",
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
},
"node_modules/@fontsource/ibm-plex-mono": {
"version": "5.2.7",
"resolved": "https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.2.7.tgz",
@@ -3927,7 +3943,6 @@
"resolved": "https://registry.npmjs.org/@fontsource/inter/-/inter-5.2.8.tgz",
"integrity": "sha512-P6r5WnJoKiNVV+zvW2xM13gNdFhAEpQ9dQJHt3naLvfg+LkF2ldgSLiF4T41lf1SQCM9QmkqPTn4TH568IRagg==",
"license": "OFL-1.1",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/ayuhito"
}
@@ -3954,6 +3969,26 @@
"node": ">=12.0.0"
}
},
"node_modules/@great-expectations/jsonforms-antd-renderers": {
"version": "2.3.5",
"resolved": "https://registry.npmjs.org/@great-expectations/jsonforms-antd-renderers/-/jsonforms-antd-renderers-2.3.5.tgz",
"integrity": "sha512-nWJQCX6zg2mQNk+QT5SFZUkaq2SNDRO5H7zoJmNvlndd0Byoq6AaB+UTdGt/SpO1knJFe80mmiWwh99fY/go3A==",
"license": "MIT",
"dependencies": {
"lodash.isempty": "^4.4.0",
"lodash.merge": "^4.6.2",
"lodash.range": "^3.2.0",
"lodash.startcase": "^4.4.0"
},
"peerDependencies": {
"@ant-design/icons": "^5.3.0",
"@jsonforms/core": "^3.3.0",
"@jsonforms/react": "^3.3.0",
"antd": "^5.14.0",
"dayjs": "^1",
"react": "^17 || ^18"
}
},
"node_modules/@hapi/address": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/@hapi/address/-/address-5.1.1.tgz",
@@ -6324,6 +6359,45 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@jsonforms/core": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/core/-/core-3.7.0.tgz",
"integrity": "sha512-CE9viWtwi9QWLqlWLeOul1/R1GRAyOA9y6OoUpsCc0FhyR+g5p29F3k0fUExHWxL0Sf4KHcXYkfhtqfRBPS8ww==",
"license": "MIT",
"dependencies": {
"@types/json-schema": "^7.0.3",
"ajv": "^8.6.1",
"ajv-formats": "^2.1.0",
"lodash": "^4.17.21"
}
},
"node_modules/@jsonforms/react": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/react/-/react-3.7.0.tgz",
"integrity": "sha512-HkY7qAx8vW97wPEgZ7GxCB3iiXG1c95GuObxtcDHGPBJWMwnxWBnVYJmv5h7nthrInKsQKHZL5OusnC/sj/1GQ==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonforms/vanilla-renderers": {
"version": "3.7.0",
"resolved": "https://registry.npmjs.org/@jsonforms/vanilla-renderers/-/vanilla-renderers-3.7.0.tgz",
"integrity": "sha512-RdXQGsheARUJVbaTe6SqGw9W4/yrm0BgUok6OKUj8krp1NF4fqXc5UbYGHFksMR/p7LCuoYHCtQzKLXEfxJbDw==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"peerDependencies": {
"@jsonforms/core": "3.7.0",
"@jsonforms/react": "3.7.0",
"react": "^16.12.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/@jsonjoy.com/base64": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz",
@@ -9494,6 +9568,89 @@
"integrity": "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ==",
"license": "MIT"
},
"node_modules/@rjsf/antd": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/antd/-/antd-5.24.13.tgz",
"integrity": "sha512-UiWE8xoBxxCoe/SEkdQEmL5E6z3I1pw0+y0dTyGt8SHfAxxFc4/OWn7tKOAiNsKCXgf83t0JKn6CHWLD01sAdQ==",
"license": "Apache-2.0",
"dependencies": {
"classnames": "^2.5.1",
"lodash": "^4.17.21",
"lodash-es": "^4.17.21",
"rc-picker": "2.7.6"
},
"engines": {
"node": ">=14"
},
"peerDependencies": {
"@ant-design/icons": "^4.0.0 || ^5.0.0",
"@rjsf/core": "^5.24.x",
"@rjsf/utils": "^5.24.x",
"antd": "^4.24.0 || ^5.8.5",
"dayjs": "^1.8.0",
"react": "^16.14.0 || >=17"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker": {
"version": "2.7.6",
"resolved": "https://registry.npmjs.org/rc-picker/-/rc-picker-2.7.6.tgz",
"integrity": "sha512-H9if/BUJUZBOhPfWcPeT15JUI3/ntrG9muzERrXDkSoWmDj4yzmBvumozpxYrHwjcKnjyDGAke68d+whWwvhHA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "^2.2.1",
"date-fns": "2.x",
"dayjs": "1.x",
"moment": "^2.24.0",
"rc-trigger": "^5.0.4",
"rc-util": "^5.37.0",
"shallowequal": "^1.1.0"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger": {
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/rc-trigger/-/rc-trigger-5.3.4.tgz",
"integrity": "sha512-mQv+vas0TwKcjAO2izNPkqR4j86OemLRmvL2nOzdP9OWNWA1ivoTt5hzFqYNW9zACwmTezRiN8bttrC7cZzYSw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.18.3",
"classnames": "^2.2.6",
"rc-align": "^4.0.0",
"rc-motion": "^2.0.0",
"rc-util": "^5.19.2"
},
"engines": {
"node": ">=8.x"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/antd/node_modules/rc-picker/node_modules/rc-trigger/node_modules/rc-align": {
"version": "4.0.15",
"resolved": "https://registry.npmjs.org/rc-align/-/rc-align-4.0.15.tgz",
"integrity": "sha512-wqJtVH60pka/nOX7/IspElA8gjPNQKIx/ZqJ6heATCkXpe1Zg4cPVrMD2vC96wjsFFL8WsmhPbx9tdMo1qqlIA==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.10.1",
"classnames": "2.x",
"dom-align": "^1.7.0",
"rc-util": "^5.26.0",
"resize-observer-polyfill": "^1.5.1"
},
"peerDependencies": {
"react": ">=16.9.0",
"react-dom": ">=16.9.0"
}
},
"node_modules/@rjsf/core": {
"version": "5.24.13",
"resolved": "https://registry.npmjs.org/@rjsf/core/-/core-5.24.13.tgz",
@@ -20795,6 +20952,22 @@
"topojson": "^1.6.19"
}
},
"node_modules/date-fns": {
"version": "2.30.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz",
"integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==",
"license": "MIT",
"dependencies": {
"@babel/runtime": "^7.21.0"
},
"engines": {
"node": ">=0.11"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/date-fns"
}
},
"node_modules/dateformat": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.2.tgz",
@@ -21402,6 +21575,12 @@
"integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
"license": "MIT"
},
"node_modules/dom-align": {
"version": "1.12.4",
"resolved": "https://registry.npmjs.org/dom-align/-/dom-align-1.12.4.tgz",
"integrity": "sha512-R8LUSEay/68zE5c8/3BDxiTEvgb4xZTF0RKmAHfiEVN3klfIpXfi2/QCoiWPccVQ0J/ZGdz9OjzL4uJEP/MRAw==",
"license": "MIT"
},
"node_modules/dom-converter": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz",
@@ -33114,6 +33293,12 @@
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
"license": "MIT"
},
"node_modules/lodash.isempty": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz",
"integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==",
"license": "MIT"
},
"node_modules/lodash.isequal": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
@@ -33145,7 +33330,18 @@
"version": "4.6.2",
"resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
"integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
"dev": true,
"license": "MIT"
},
"node_modules/lodash.range": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/lodash.range/-/lodash.range-3.2.0.tgz",
"integrity": "sha512-Fgkb7SinmuzqgIhNhAElo0BL/R1rHCnhwSZf78omqSwvWqD0kD2ssOAutQonDKH/ldS8BxA72ORYI09qAY9CYg==",
"license": "MIT"
},
"node_modules/lodash.startcase": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
"integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
"license": "MIT"
},
"node_modules/lodash.uniq": {
@@ -36356,6 +36552,15 @@
"node": ">=0.10.0"
}
},
"node_modules/moment": {
"version": "2.30.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
"license": "MIT",
"engines": {
"node": "*"
}
},
"node_modules/monaco-editor": {
"version": "0.52.2",
"resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz",
@@ -43365,6 +43570,12 @@
"integrity": "sha512-b6i4ZpVuUxB9h5gfCxPiusKYkqTMOjEbBs4wMaFbkfia4yFv92UKZ6Df8WXcKbn08JNL/abvg3FnMAOfakDvUw==",
"license": "MIT"
},
"node_modules/shallowequal": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz",
"integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==",
"license": "MIT"
},
"node_modules/shapefile": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/shapefile/-/shapefile-0.3.1.tgz",
@@ -50432,7 +50643,7 @@
"acorn": "^8.16.0",
"d3-array": "^3.2.4",
"lodash": "^4.18.1",
"zod": "^4.4.1"
"zod": "^4.4.3"
},
"peerDependencies": {
"@apache-superset/core": "*",

View File

@@ -117,7 +117,14 @@
"@luma.gl/gltf": "~9.2.5",
"@luma.gl/shadertools": "~9.2.5",
"@luma.gl/webgl": "~9.2.5",
"@fontsource/fira-code": "^5.2.7",
"@fontsource/inter": "^5.2.8",
"@great-expectations/jsonforms-antd-renderers": "^2.2.10",
"@jsonforms/core": "^3.7.0",
"@jsonforms/react": "^3.7.0",
"@jsonforms/vanilla-renderers": "^3.7.0",
"@reduxjs/toolkit": "^1.9.3",
"@rjsf/antd": "^5.24.13",
"@rjsf/core": "^5.24.13",
"@rjsf/utils": "^5.24.3",
"@rjsf/validator-ajv8": "^5.24.13",

View File

@@ -23,7 +23,7 @@ import { Label } from '..';
// Define the prop types for DatasetTypeLabel
interface DatasetTypeLabelProps {
datasetType: 'physical' | 'virtual'; // Accepts only 'physical' or 'virtual'
datasetType: 'physical' | 'virtual' | 'semantic_view';
}
const SIZE = 's'; // Define the size as a constant
@@ -32,6 +32,22 @@ export const DatasetTypeLabel: React.FC<DatasetTypeLabelProps> = ({
datasetType,
}) => {
const theme = useTheme();
if (datasetType === 'semantic_view') {
return (
<Label
icon={
<Icons.ApartmentOutlined
iconSize={SIZE}
iconColor={theme.colorInfo}
/>
}
type="info"
style={{ color: theme.colorInfo }}
>
{t('Semantic')}
</Label>
);
}
const isPhysical = datasetType === 'physical';
const label: string = isPhysical ? t('Physical') : t('Virtual');
const labelType = isPhysical ? 'primary' : 'default';

View File

@@ -19,6 +19,15 @@
import { DatasourceType } from './types/Datasource';
const DATASOURCE_TYPE_MAP: Record<string, DatasourceType> = {
table: DatasourceType.Table,
query: DatasourceType.Query,
dataset: DatasourceType.Dataset,
sl_table: DatasourceType.SlTable,
saved_query: DatasourceType.SavedQuery,
semantic_view: DatasourceType.SemanticView,
};
export default class DatasourceKey {
readonly id: number;
@@ -27,8 +36,7 @@ export default class DatasourceKey {
constructor(key: string) {
const [idStr, typeStr] = key.split('__');
this.id = parseInt(idStr, 10);
this.type = DatasourceType.Table; // default to SqlaTable model
this.type = typeStr === 'query' ? DatasourceType.Query : this.type;
this.type = DATASOURCE_TYPE_MAP[typeStr] ?? DatasourceType.Table;
}
public toString() {

View File

@@ -26,6 +26,7 @@ export enum DatasourceType {
Dataset = 'dataset',
SlTable = 'sl_table',
SavedQuery = 'saved_query',
SemanticView = 'semantic_view',
}
export interface Currency {
@@ -40,6 +41,13 @@ export interface Datasource {
id: number;
name: string;
type: DatasourceType;
/**
* The parent resource that owns this datasource.
* For SQL-based datasets this is the database; for semantic views it is the
* semantic layer. Use this field instead of the legacy `database` field when
* you only need the display name.
*/
parent?: { name: string };
columns: Column[];
metrics: Metric[];
description?: string;

View File

@@ -61,6 +61,7 @@ export enum FeatureFlag {
ListviewsDefaultCardView = 'LISTVIEWS_DEFAULT_CARD_VIEW',
Matrixify = 'MATRIXIFY',
ScheduledQueries = 'SCHEDULED_QUERIES',
SemanticLayers = 'SEMANTIC_LAYERS',
SqllabBackendPersistence = 'SQLLAB_BACKEND_PERSISTENCE',
SqlValidatorsByEngine = 'SQL_VALIDATORS_BY_ENGINE',
SshTunneling = 'SSH_TUNNELING',

View File

@@ -19,6 +19,7 @@
import fetchMock from 'fetch-mock';
import { SupersetClient, SupersetClientClass } from '@superset-ui/core';
import type { SupersetClientInterface } from '@superset-ui/core';
import { LOGIN_GLOB } from './fixtures/constants';
beforeAll(() => fetchMock.mockGlobal());
@@ -31,6 +32,10 @@ describe('SupersetClient', () => {
afterEach(() => SupersetClient.reset());
const clientWithGetUrl = SupersetClient as SupersetClientInterface & {
getUrl: (...args: unknown[]) => string;
};
test('exposes configure, init, get, post, postForm, delete, put, request, reset, getGuestToken, getCSRFToken, getUrl, isAuthenticated, and reAuthenticate methods', () => {
expect(typeof SupersetClient.configure).toBe('function');
expect(typeof SupersetClient.init).toBe('function');
@@ -43,7 +48,7 @@ describe('SupersetClient', () => {
expect(typeof SupersetClient.reset).toBe('function');
expect(typeof SupersetClient.getGuestToken).toBe('function');
expect(typeof SupersetClient.getCSRFToken).toBe('function');
expect(typeof SupersetClient.getUrl).toBe('function');
expect(typeof clientWithGetUrl.getUrl).toBe('function');
expect(typeof SupersetClient.isAuthenticated).toBe('function');
expect(typeof SupersetClient.reAuthenticate).toBe('function');
});
@@ -58,7 +63,7 @@ describe('SupersetClient', () => {
expect(SupersetClient.request).toThrow();
expect(SupersetClient.getGuestToken).toThrow();
expect(SupersetClient.getCSRFToken).toThrow();
expect(SupersetClient.getUrl).toThrow();
expect(clientWithGetUrl.getUrl).toThrow();
expect(SupersetClient.isAuthenticated).toThrow();
expect(SupersetClient.reAuthenticate).toThrow();
expect(SupersetClient.configure).not.toThrow();
@@ -100,7 +105,7 @@ describe('SupersetClient', () => {
const getUrlSpy = jest.spyOn(SupersetClientClass.prototype, 'getUrl');
SupersetClient.configure({ appRoot: '/app' });
expect(SupersetClient.getUrl({ endpoint: '/some/path' })).toContain(
expect(clientWithGetUrl.getUrl({ endpoint: '/some/path' })).toContain(
'/app/some/path',
);
expect(getUrlSpy).toHaveBeenCalledTimes(1);

View File

@@ -28,10 +28,11 @@ test('DEFAULT_METRICS', () => {
});
test('DatasourceType', () => {
expect(Object.keys(DatasourceType).length).toBe(5);
expect(Object.keys(DatasourceType).length).toBe(6);
expect(DatasourceType.Table).toBe('table');
expect(DatasourceType.Query).toBe('query');
expect(DatasourceType.Dataset).toBe('dataset');
expect(DatasourceType.SlTable).toBe('sl_table');
expect(DatasourceType.SavedQuery).toBe('saved_query');
expect(DatasourceType.SemanticView).toBe('semantic_view');
});

View File

@@ -71,10 +71,16 @@ describe('TimeFormatter', () => {
// PivotData.processRecord coerces values with String(), turning numeric
// timestamps into strings.
const timestamp = PREVIEW_TIME.getTime().toString();
expect(formatter.format(timestamp)).toEqual('2017');
expect(formatter.format(timestamp as unknown as number | Date)).toEqual(
'2017',
);
});
test('handles ISO-8601 string without misinterpreting it as a number', () => {
expect(formatter.format('2017-02-14T11:22:33.000Z')).toEqual('2017');
expect(
formatter.format(
'2017-02-14T11:22:33.000Z' as unknown as number | Date,
),
).toEqual('2017');
});
test('otherwise returns formatted value', () => {
expect(formatter.format(PREVIEW_TIME)).toEqual('2017');

View File

@@ -1402,7 +1402,7 @@ test('getAxisType with forced categorical', () => {
test('getAxisType treats numeric as category for bar charts', () => {
expect(
getAxisType(
(getAxisType as (...args: unknown[]) => AxisType)(
false,
false,
GenericDataType.Numeric,
@@ -1410,7 +1410,7 @@ test('getAxisType treats numeric as category for bar charts', () => {
),
).toEqual(AxisType.Category);
expect(
getAxisType(
(getAxisType as (...args: unknown[]) => AxisType)(
false,
false,
GenericDataType.Numeric,

View File

@@ -359,7 +359,9 @@ class Chart extends PureComponent<ChartProps, {}> {
width,
} = this.props;
const databaseName = datasource?.database?.name as string | undefined;
const databaseName =
datasource?.parent?.name ??
(datasource?.database?.name as string | undefined);
const isLoading = chartStatus === 'loading';
// Suppress spinner during auto-refresh to avoid visual flicker

View File

@@ -58,6 +58,7 @@ import { Dataset } from '../types';
import TableControls from './DrillDetailTableControls';
import { getDrillPayload } from './utils';
import { ResultsPage } from './types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const PAGE_SIZE = 50;
@@ -373,7 +374,7 @@ export default function DrillDetailPane({
tableContent = <Loading />;
} else if (resultsPage?.total === 0) {
// Render empty state if no results are returned for page
const title = t('No rows were returned for this dataset');
const title = t('No rows were returned for this %s', datasetLabelLower());
tableContent = <EmptyState image="document.svg" title={title} />;
} else {
// Render table if at least one page has successfully loaded

View File

@@ -52,6 +52,10 @@ import type {
DatabaseObject,
} from './types';
import { StyledFormLabel } from './styles';
import {
databaseLabel,
databasesLabelLower,
} from 'src/features/semanticLayers/label';
const DatabaseSelectorWrapper = styled.div<{ horizontal?: boolean }>`
${({ theme, horizontal }) =>
@@ -433,7 +437,11 @@ export function DatabaseSelector({
function renderDatabaseSelect() {
if (sqlLabMode) {
return renderSelectRow(
t('Select database or type to search databases'),
t(
'Select %s or type to search %s',
databaseLabel().toLowerCase(),
databasesLabelLower(),
),
null,
null,
{
@@ -450,16 +458,24 @@ export function DatabaseSelector({
return (
<div>
{renderSelectRow(
t('Database'),
databaseLabel(),
<AsyncSelect
ariaLabel={t('Select database or type to search databases')}
ariaLabel={t(
'Select %s or type to search %s',
databaseLabel().toLowerCase(),
databasesLabelLower(),
)}
optionFilterProps={['database_name', 'value']}
data-test="select-database"
lazyLoading={false}
notFoundContent={emptyState}
onChange={changeDatabase}
value={currentDb}
placeholder={t('Select database or type to search databases')}
placeholder={t(
'Select %s or type to search %s',
databaseLabel().toLowerCase(),
databasesLabelLower(),
)}
disabled={!isDatabaseSelectEnabled || readOnly}
options={loadDatabases}
sortComparator={sortComparator}

View File

@@ -27,6 +27,7 @@ const mockStore = configureStore([thunk]);
const store = mockStore({});
const mockedProps = {
addSuccessToast: jest.fn(),
addDangerToast: () => {},
onDatasourceSave: jest.fn(),
onChange: () => {},
@@ -91,3 +92,36 @@ test('changes the datasource', async () => {
expect(fetchMock.callHistory.calls(/api\/v1\/dataset\/7/)).toHaveLength(1),
);
});
test('does not show success toast or close modal when datasource request fails', async () => {
const props = {
...mockedProps,
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
onHide: jest.fn(),
};
(fetchMock.removeRoutes as any)(DATASOURCE_ENDPOINT);
(fetchMock.removeRoutes as any)(DATASOURCES_ENDPOINT);
(fetchMock.removeRoutes as any)(INFO_ENDPOINT);
fetchMock.get(DATASOURCES_ENDPOINT, { result: [mockDatasource['7__table']] });
fetchMock.get(INFO_ENDPOINT, {});
fetchMock.get(DATASOURCE_ENDPOINT, 500);
const { findByTestId, getByRole } = setup(props);
const confirmLink = await findByTestId('datasource-link');
fireEvent.click(confirmLink);
fireEvent.click(getByRole('button', { name: 'Proceed' }));
await waitFor(() => {
expect(fetchMock.callHistory.calls(/api\/v1\/dataset\/7/)).toHaveLength(1);
});
expect(props.addSuccessToast).not.toHaveBeenCalled();
expect(props.onHide).not.toHaveBeenCalled();
(fetchMock.removeRoutes as any)(DATASOURCE_ENDPOINT);
(fetchMock.removeRoutes as any)(DATASOURCES_ENDPOINT);
(fetchMock.removeRoutes as any)(INFO_ENDPOINT);
fetchMock.get(DATASOURCES_ENDPOINT, { result: [mockDatasource['7__table']] });
fetchMock.get(INFO_ENDPOINT, {});
fetchMock.get(DATASOURCE_ENDPOINT, DATASOURCE_PAYLOAD);
});

View File

@@ -53,6 +53,7 @@ import {
import withToasts from 'src/components/MessageToasts/withToasts';
import { InputRef } from 'antd';
import type { Datasource, ChangeDatasourceModalProps } from '../types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const CONFIRM_WARNING_MESSAGE = t(
'Warning! Changing the dataset may break the chart if the metadata does not exist.',
@@ -109,7 +110,11 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
const {
state: { loading, resourceCollection, resourceCount },
fetchData,
} = useListViewResource<Dataset>('dataset', t('dataset'), addDangerToast);
} = useListViewResource<Dataset>(
'dataset',
datasetLabelLower(),
addDangerToast,
);
const selectDatasource = useCallback((datasource: Datasource) => {
setConfirmChange(true);
@@ -166,28 +171,27 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
setPageIndex(0);
};
const handleChangeConfirm = () => {
SupersetClient.get({
endpoint: `/api/v1/dataset/${confirmedDataset?.id}`,
})
.then(({ json }) => {
// eslint-disable-next-line no-param-reassign
json.result.type = 'table';
onDatasourceSave(json.result);
onChange(`${confirmedDataset?.id}__table`);
})
.catch(response => {
getClientErrorObject(response).then(
({ error, message }: { error: any; message: string }) => {
const errorMessage = error
? error.error || error.statusText || error
: message;
addDangerToast(errorMessage);
},
);
const handleChangeConfirm = async () => {
try {
const { json } = await SupersetClient.get({
endpoint: `/api/v1/dataset/${confirmedDataset?.id}`,
});
onHide();
addSuccessToast(t('Successfully changed dataset!'));
// eslint-disable-next-line no-param-reassign
json.result.type = 'table';
onDatasourceSave(json.result);
onChange(`${confirmedDataset?.id}__table`);
onHide();
addSuccessToast(t('Successfully changed %s!', datasetLabelLower()));
} catch (response) {
getClientErrorObject(response).then(
({ error, message }: { error: any; message: string }) => {
const errorMessage = error
? error.error || error.statusText || error
: message;
addDangerToast(errorMessage);
},
);
}
};
const handlerCancelConfirm = () => {
@@ -253,7 +257,7 @@ const ChangeDatasourceModal: FunctionComponent<ChangeDatasourceModalProps> = ({
onHide={onHide}
responsive
name="Swap dataset"
title={t('Swap dataset')}
title={t('Swap %s', datasetLabelLower())}
width={confirmChange ? '432px' : ''}
height={confirmChange ? 'auto' : '540px'}
hideFooter={!confirmChange}

View File

@@ -20,6 +20,7 @@ import { t } from '@apache-superset/core/translation';
import type { ErrorMessageComponentProps } from './types';
import { ErrorAlert } from './ErrorAlert';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export function DatasetNotFoundErrorMessage({
error,
@@ -29,7 +30,7 @@ export function DatasetNotFoundErrorMessage({
const { level, message } = error;
return (
<ErrorAlert
errorType={t('Missing dataset')}
errorType={t('Missing %s', datasetLabelLower())}
message={subtitle}
description={message}
type={level}

View File

@@ -60,6 +60,12 @@ function UIFilters(
filter.current?.clearFilter?.();
});
},
clearFilterById: (id: string) => {
const index = filters.findIndex(f => f.id === id);
if (index >= 0) {
filterRefs[index]?.current?.clearFilter?.();
}
},
}));
return (

View File

@@ -19,7 +19,14 @@
import { t } from '@apache-superset/core/translation';
import { Alert } from '@apache-superset/core/components';
import { styled } from '@apache-superset/core/theme';
import { useCallback, useEffect, useRef, useState, ReactNode } from 'react';
import {
useCallback,
useEffect,
useLayoutEffect,
useRef,
useState,
ReactNode,
} from 'react';
import cx from 'classnames';
import TableCollection from '@superset-ui/core/components/TableCollection';
import BulkTagModal from 'src/features/tags/BulkTagModal';
@@ -265,6 +272,11 @@ export interface ListViewProps<T extends object = any> {
columnsForWrapText?: string[];
enableBulkTag?: boolean;
bulkTagResourceName?: string;
/** Optional ref exposed to callers for programmatic filter control. */
filtersRef?: React.RefObject<{
clearFilters: () => void;
clearFilterById: (id: string) => void;
}>;
}
export function ListView<T extends object = any>({
@@ -291,6 +303,7 @@ export function ListView<T extends object = any>({
columnsForWrapText,
enableBulkTag = false,
bulkTagResourceName,
filtersRef,
addSuccessToast,
addDangerToast,
}: ListViewProps<T>) {
@@ -338,7 +351,21 @@ export function ListView<T extends object = any>({
});
}
const filterControlsRef = useRef<{ clearFilters: () => void }>(null);
const filterControlsRef = useRef<{
clearFilters: () => void;
clearFilterById: (id: string) => void;
}>(null);
// Wire the optional external filtersRef to our internal filterControlsRef.
// useLayoutEffect fires synchronously after DOM mutations, guaranteeing the
// ref is populated before the first paint and after every update.
useLayoutEffect(() => {
if (filtersRef) {
(
filtersRef as React.MutableRefObject<typeof filterControlsRef.current>
).current = filterControlsRef.current;
}
});
const handleClearFilterControls = useCallback(() => {
if (query.filters) {

View File

@@ -36,6 +36,7 @@ import { Tooltip, ImageLoader } from '@superset-ui/core/components';
import { GenericLink, usePluginContext } from 'src/components';
import { assetUrl } from 'src/utils/assetUrl';
import { Theme } from '@emotion/react';
import { datasetLabel } from 'src/features/semanticLayers/label';
const FALLBACK_THUMBNAIL_URL = assetUrl(
'/static/assets/images/chart-card-fallback.svg',
@@ -283,7 +284,7 @@ const AddSliceCard: FC<{
>
<MetadataItem label={t('Viz type')} value={vizName} />
<MetadataItem
label={t('Dataset')}
label={datasetLabel()}
value={
datasourceUrl ? (
<GenericLink to={datasourceUrl}>

View File

@@ -55,6 +55,7 @@ import type { ConnectDragSource } from 'react-dnd';
import AddSliceCard from './AddSliceCard';
import AddSliceDragPreview from './dnd/AddSliceDragPreview';
import { DragDroppable } from './dnd/DragDroppable';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export type SliceAdderProps = {
theme: Theme;
@@ -88,7 +89,7 @@ const KEYS_TO_FILTERS = ['slice_name', 'viz_type', 'datasource_name'];
const KEYS_TO_SORT = {
slice_name: t('name'),
viz_type: t('viz type'),
datasource_name: t('dataset'),
datasource_name: datasetLabelLower(),
changed_on: t('recent'),
};

View File

@@ -51,6 +51,10 @@ import { addDangerToast } from 'src/components/MessageToasts/actions';
import { cachedSupersetGet } from 'src/utils/cachedSupersetGet';
import { dispatchChartCustomizationHoverAction } from './utils';
import { mergeExtraFormData } from '../../utils';
import {
datasetLabel as getDatasetLabel,
datasetLabelLower,
} from 'src/features/semanticLayers/label';
interface ColumnApiResponse {
column_name?: string;
@@ -262,9 +266,9 @@ const GroupByFilterCardContent: FC<{
</Row>
<Row>
<RowLabel>{t('Dataset')}</RowLabel>
<RowLabel>{getDatasetLabel()}</RowLabel>
<RowValue>
{typeof datasetLabel === 'string' ? datasetLabel : 'Dataset'}
{typeof datasetLabel === 'string' ? datasetLabel : t('Dataset')}
</RowValue>
</Row>
@@ -475,7 +479,13 @@ const GroupByFilterCard: FC<GroupByFilterCardProps> = ({
} catch (error) {
setColumnOptions([]);
dispatch(
addDangerToast(t('Failed to load columns for dataset %s', datasetId)),
addDangerToast(
t(
'Failed to load columns for %s %s',
datasetLabelLower(),
datasetId,
),
),
);
} finally {
setLoading(false);

View File

@@ -30,6 +30,11 @@ import {
Dataset,
DatasetSelectLabel,
} from 'src/features/datasets/DatasetSelectLabel';
import {
datasetLabel,
datasetLabelLower,
datasetsLabelLower,
} from 'src/features/semanticLayers/label';
interface DatasetSelectProps {
onChange: (value: { label: string | ReactNode; value: number }) => void;
@@ -101,13 +106,13 @@ const DatasetSelect = ({
return (
<AsyncSelect
ariaLabel={t('Dataset')}
ariaLabel={datasetLabel()}
value={value}
options={loadDatasetOptionsCallback}
onChange={onChange}
optionFilterProps={['table_name']}
notFoundContent={t('No compatible datasets found')}
placeholder={t('Select a dataset')}
notFoundContent={t('No compatible %s found', datasetsLabelLower())}
placeholder={t('Select a %s', datasetLabelLower())}
/>
);
};

View File

@@ -120,6 +120,7 @@ import {
INPUT_WIDTH,
} from './constants';
import DependencyList from './DependencyList';
import { datasetLabel } from 'src/features/semanticLayers/label';
const FORM_ITEM_WIDTH = 260;
@@ -325,6 +326,12 @@ const FiltersConfigForm = (
const filters = form.getFieldValue('filters');
const formValues = filters?.[filterId];
const formFilter = formValues || undoFormValues || defaultFormFilter;
const formFilterWithTimeGrains = formFilter as typeof formFilter & {
time_grains?: string[];
};
const filterToEditWithTimeGrains = filterToEdit as
| (Filter & { time_grains?: string[] })
| undefined;
const handleModifyFilter = useCallback(() => {
if (onModifyFilter) {
@@ -587,7 +594,8 @@ const FiltersConfigForm = (
!!filterToEdit?.time_range;
const hasTimeGrainPreFilter = !!(
formFilter?.time_grains?.length || filterToEdit?.time_grains?.length
formFilterWithTimeGrains?.time_grains?.length ||
filterToEditWithTimeGrains?.time_grains?.length
);
const hasEnableSingleValue =
@@ -1052,7 +1060,7 @@ const FiltersConfigForm = (
<StyledFormItem
expanded={expanded}
name={['filters', filterId, 'dataset']}
label={<StyledLabel>{t('Dataset')}</StyledLabel>}
label={<StyledLabel>{datasetLabel()}</StyledLabel>}
initialValue={
datasetDetails
? {
@@ -1072,7 +1080,10 @@ const FiltersConfigForm = (
rules={[
{
required: !isRemoved,
message: t('Dataset is required'),
message:
datasetLabel() === t('Datasource')
? t('Datasource is required')
: t('Dataset is required'),
},
]}
{...getFiltersConfigModalTestId('datasource-input')}
@@ -1098,7 +1109,7 @@ const FiltersConfigForm = (
) : (
<StyledFormItem
expanded={expanded}
label={<StyledLabel>{t('Dataset')}</StyledLabel>}
label={<StyledLabel>{datasetLabel()}</StyledLabel>}
>
<Loading position="inline-centered" />
</StyledFormItem>
@@ -1322,7 +1333,7 @@ const FiltersConfigForm = (
'time_grains',
]}
initialValue={
filterToEdit?.time_grains
filterToEditWithTimeGrains?.time_grains
}
{...getFiltersConfigModalTestId(
'time-grain-allowlist',

View File

@@ -113,7 +113,7 @@ function transformFormInput(
excluded: [],
};
return {
const result: Filter & { time_grains?: string[] } = {
id,
type: NativeFilterType.NativeFilter,
name: formInputs.name,
@@ -127,14 +127,17 @@ function transformFormInput(
adhoc_filters: formInputs.adhoc_filters,
time_range: formInputs.time_range,
granularity_sqla: formInputs.granularity_sqla,
time_grains: formInputs.time_grains?.length
? formInputs.time_grains
: undefined,
sortMetric: formInputs.sortMetric ?? null,
requiredFirst: formInputs.requiredFirst
? Object.values(formInputs.requiredFirst).find(rf => rf)
: undefined,
};
if (formInputs.time_grains?.length) {
result.time_grains = formInputs.time_grains;
}
return result;
}
function transformSavedFilter(id: string, filter: Filter): Filter {

View File

@@ -17,6 +17,7 @@
* under the License.
*/
import type { AnyAction } from 'redux';
import { SupersetClient } from '@superset-ui/core';
import { defaultState } from 'src/explore/store';
import exploreReducer, {
ExploreState,
@@ -240,3 +241,107 @@ describe('reducers', () => {
);
});
});
test('fetchCompatibility ignores stale async responses', async () => {
const dispatch = jest.fn();
let resolveFirst: (value: {
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}) => void;
let resolveSecond: (value: {
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}) => void;
const firstPromise = new Promise<{
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}>(resolve => {
resolveFirst = resolve;
});
const secondPromise = new Promise<{
json: {
result: {
compatible_metrics: string[];
compatible_dimensions: string[];
};
};
}>(resolve => {
resolveSecond = resolve;
});
const postSpy = jest.spyOn(SupersetClient, 'post');
postSpy
.mockImplementationOnce(() => firstPromise as never)
.mockImplementationOnce(() => secondPromise as never);
const firstThunk = actions.fetchCompatibility(
'semantic_view',
7,
['m1'],
['d1'],
)(dispatch as any);
const secondThunk = actions.fetchCompatibility(
'semantic_view',
7,
['m2'],
['d2'],
)(dispatch as any);
resolveSecond!({
json: {
result: {
compatible_metrics: ['m2'],
compatible_dimensions: ['d2'],
},
},
});
await secondThunk;
resolveFirst!({
json: {
result: {
compatible_metrics: ['m1'],
compatible_dimensions: ['d1'],
},
},
});
await firstThunk;
const compatibilityActions = dispatch.mock.calls
.map(call => call[0])
.filter((action: AnyAction) => action.type === actions.SET_COMPATIBILITY);
const successfulActions = compatibilityActions.filter(
(action: AnyAction) => action.compatibilityLoading === false,
);
expect(successfulActions).toContainEqual(
expect.objectContaining({
compatibleMetrics: ['m2'],
compatibleDimensions: ['d2'],
compatibilityLoading: false,
}),
);
expect(successfulActions).not.toContainEqual(
expect.objectContaining({
compatibleMetrics: ['m1'],
compatibleDimensions: ['d1'],
compatibilityLoading: false,
}),
);
postSpy.mockRestore();
});

View File

@@ -166,6 +166,90 @@ export function updateExploreChartState(
};
}
export const SET_COMPATIBILITY = 'SET_COMPATIBILITY';
export function setCompatibility(payload: {
compatibleMetrics: string[] | null;
compatibleDimensions: string[] | null;
compatibilityLoading: boolean;
}) {
return { type: SET_COMPATIBILITY, ...payload };
}
let compatibilityRequestSeq = 0;
/**
* Fetch compatible metrics and dimensions for the current selection.
*
* Only fires for semantic views — SQL datasets always have full compatibility
* so we short-circuit to `null` (no filtering) for everything else.
*
* Covers both real-time selection changes (M3) and saved-chart loading (M4):
* call this thunk on mount as well as whenever the metric / dimension
* selection changes in Explore.
*/
export function fetchCompatibility(
datasourceType: string,
datasourceId: number,
selectedMetrics: string[],
selectedDimensions: string[],
) {
return async (dispatch: Dispatch) => {
compatibilityRequestSeq += 1;
const requestSeq = compatibilityRequestSeq;
if (datasourceType !== 'semantic_view') {
dispatch(
setCompatibility({
compatibleMetrics: null,
compatibleDimensions: null,
compatibilityLoading: false,
}),
);
return;
}
dispatch(
setCompatibility({
compatibleMetrics: null,
compatibleDimensions: null,
compatibilityLoading: true,
}),
);
try {
const { json } = await SupersetClient.post({
endpoint: `/api/v1/datasource/${datasourceType}/${datasourceId}/compatible`,
jsonPayload: {
selected_metrics: selectedMetrics,
selected_dimensions: selectedDimensions,
},
});
if (requestSeq !== compatibilityRequestSeq) {
return;
}
dispatch(
setCompatibility({
compatibleMetrics: json.result.compatible_metrics,
compatibleDimensions: json.result.compatible_dimensions,
compatibilityLoading: false,
}),
);
} catch {
// On error fall back to no filtering so the user is never blocked.
if (requestSeq !== compatibilityRequestSeq) {
return;
}
dispatch(
setCompatibility({
compatibleMetrics: null,
compatibleDimensions: null,
compatibilityLoading: false,
}),
);
}
};
}
export const SET_STASH_FORM_DATA = 'SET_STASH_FORM_DATA';
export function setStashFormData(
isHidden: boolean,
@@ -208,6 +292,7 @@ export const exploreActions = {
sliceUpdated,
setForceQuery,
syncDatasourceMetadata,
fetchCompatibility,
};
export type ExploreActions = typeof exploreActions;

View File

@@ -151,11 +151,8 @@ export const getSlicePayload = async (
const [id, typeString] = formData.datasource.split('__');
datasourceId = parseInt(id, 10);
const formattedTypeString =
typeString.charAt(0).toUpperCase() + typeString.slice(1);
if (formattedTypeString in DatasourceType) {
datasourceType =
DatasourceType[formattedTypeString as keyof typeof DatasourceType];
if (Object.values(DatasourceType).includes(typeString as DatasourceType)) {
datasourceType = typeString as DatasourceType;
}
}

View File

@@ -19,6 +19,7 @@
import { useState, useEffect, useMemo, useCallback } from 'react';
import { t } from '@apache-superset/core/translation';
import { ensureIsArray } from '@superset-ui/core';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
import { styled } from '@apache-superset/core/theme';
import { EmptyState, Loading } from '@superset-ui/core/components';
import { GenericDataType } from '@apache-superset/core/common';
@@ -160,7 +161,10 @@ export const SamplesPane = ({
}
if (data.length === 0) {
const title = t('No samples were returned for this dataset');
const title = t(
'No samples were returned for this %s',
datasetLabelLower(),
);
return <EmptyState image="document.svg" title={title} />;
}

View File

@@ -26,7 +26,7 @@ test('should render', async () => {
value={{ metric_name: 'test', uuid: '1' }}
type={DndItemType.Metric}
/>,
{ useDnd: true },
{ useDnd: true, useRedux: true, initialState: { explore: {} } },
);
expect(
@@ -41,7 +41,7 @@ test('should have attribute draggable:true', async () => {
value={{ metric_name: 'test', uuid: '1' }}
type={DndItemType.Metric}
/>,
{ useDnd: true },
{ useDnd: true, useRedux: true, initialState: { explore: {} } },
);
expect(

View File

@@ -16,8 +16,9 @@
* specific language governing permissions and limitations
* under the License.
*/
import { RefObject } from 'react';
import { RefObject, useMemo } from 'react';
import { useDrag } from 'react-dnd';
import { useSelector } from 'react-redux';
import { Metric } from '@superset-ui/core';
import { css, styled, useTheme } from '@apache-superset/core/theme';
import { ColumnMeta } from '@superset-ui/chart-controls';
@@ -27,6 +28,7 @@ import {
StyledMetricOption,
} from 'src/explore/components/optionRenderers';
import { Icons } from '@superset-ui/core/components/Icons';
import { ExplorePageState } from 'src/explore/types';
import { DatasourcePanelDndItem } from '../types';
@@ -70,11 +72,38 @@ export default function DatasourcePanelDragOption(
) {
const { labelRef, showTooltip, type, value } = props;
const theme = useTheme();
// Read compatibility lists from Redux.
// `null` means no filtering is active (SQL datasets, or no selection yet).
const compatibleMetrics = useSelector<
ExplorePageState,
string[] | null | undefined
>(state => state.explore.compatibleMetrics);
const compatibleDimensions = useSelector<
ExplorePageState,
string[] | null | undefined
>(state => state.explore.compatibleDimensions);
// An item is compatible when the list is null (no filter) or when its
// name explicitly appears in the list returned by the backend.
const isCompatible = useMemo(() => {
if (type === DndItemType.Metric) {
if (!compatibleMetrics) return true;
return compatibleMetrics.includes((value as Metric).metric_name);
}
if (type === DndItemType.Column) {
if (!compatibleDimensions) return true;
return compatibleDimensions.includes((value as ColumnMeta).column_name);
}
return true;
}, [type, value, compatibleMetrics, compatibleDimensions]);
const [{ isDragging }, drag] = useDrag({
item: {
value: props.value,
type: props.type,
},
canDrag: isCompatible,
collect: monitor => ({
isDragging: monitor.isDragging(),
}),
@@ -87,7 +116,14 @@ export default function DatasourcePanelDragOption(
};
return (
<DatasourceItemContainer data-test="DatasourcePanelDragOption" ref={drag}>
<DatasourceItemContainer
data-test="DatasourcePanelDragOption"
ref={drag}
style={{
opacity: isCompatible ? 1 : 0.35,
cursor: isCompatible ? 'grab' : 'not-allowed',
}}
>
{type === DndItemType.Column ? (
<StyledColumnOption column={value as ColumnMeta} {...optionProps} />
) : (

View File

@@ -89,7 +89,7 @@ const setup = (data: DatasourcePanelItemProps['data'] = mockData) =>
<DatasourcePanelItem index={index} data={data} style={{}} />
))}
</>,
{ useDnd: true },
{ useDnd: true, useRedux: true, initialState: { explore: {} } },
);
test('renders each item accordingly', () => {

View File

@@ -122,7 +122,7 @@ const sortColumns = (slice: DatasourcePanelColumn[]) =>
if (col2?.is_dttm && !col1?.is_dttm) {
return 1;
}
return 0;
return (col1?.column_name ?? '').localeCompare(col2?.column_name ?? '');
})
.sort((a, b) => (b?.is_certified ?? 0) - (a?.is_certified ?? 0));
@@ -191,7 +191,9 @@ export default function DataSourcePanel({
const filteredMetrics = useMemo(() => {
if (!searchKeyword) {
return allowedMetrics ?? [];
return [...(allowedMetrics ?? [])].sort((a, b) =>
(a?.metric_name ?? '').localeCompare(b?.metric_name ?? ''),
);
}
return matchSorter(allowedMetrics, searchKeyword, {
keys: [

View File

@@ -36,6 +36,7 @@ import {
JsonObject,
MatrixifyFormData,
DatasourceType,
ensureIsArray,
} from '@superset-ui/core';
import {
ControlStateMapping,
@@ -412,6 +413,48 @@ function ExploreViewContainer(props: ExploreViewContainerProps) {
[originalTitle, theme?.brandAppName, theme?.brandLogoAlt],
);
// M3 + M4: fire compatibility check on mount and whenever the metric /
// dimension selection changes. Only semantic views use the endpoint;
// SQL datasets short-circuit to null inside fetchCompatibility.
const selectedMetrics = useMemo(
() =>
ensureIsArray(props.form_data.metrics).filter(
(m): m is string => typeof m === 'string',
),
// eslint-disable-next-line react-hooks/exhaustive-deps
[JSON.stringify(props.form_data.metrics)],
);
const selectedDimensions = useMemo(
() =>
[
...ensureIsArray(props.form_data.groupby),
...ensureIsArray(props.form_data.columns),
...(typeof props.form_data.x_axis === 'string'
? [props.form_data.x_axis]
: []),
].filter((d): d is string => typeof d === 'string'),
// eslint-disable-next-line react-hooks/exhaustive-deps
[
JSON.stringify(props.form_data.groupby),
JSON.stringify(props.form_data.columns),
props.form_data.x_axis,
],
);
useEffect(() => {
props.actions.fetchCompatibility(
props.datasource.type,
props.datasource.id as number,
selectedMetrics,
selectedDimensions,
);
// props.datasource.id covers the saved-chart-loading case (M4)
}, [
props.datasource.id,
props.datasource.type,
selectedMetrics,
selectedDimensions,
]);
const addHistory = useCallback(
async ({
isReplace = false,

View File

@@ -19,15 +19,19 @@
import { SHARED_COLUMN_CONFIG_PROPS } from './constants';
const tokenSeparators =
SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat.tokenSeparators;
test('should allow commas in D3 format inputs', () => {
expect(tokenSeparators).toBeDefined();
expect(tokenSeparators).not.toContain(',');
const { options } = SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat;
const labels = (options ?? []).map((option: { label: unknown }) =>
String(option.label),
);
expect(labels.some((label: string) => label.includes(','))).toBe(true);
});
test('should have correct default token separators', () => {
const expectedSeparators = ['\r\n', '\n', '\t', ';'];
expect(tokenSeparators).toEqual(expectedSeparators);
test('should use defaults from Select token separators', () => {
expect(
Object.prototype.hasOwnProperty.call(
SHARED_COLUMN_CONFIG_PROPS.d3NumberFormat,
'tokenSeparators',
),
).toBe(false);
});

View File

@@ -58,8 +58,6 @@ const d3NumberFormat: ControlFormItemSpec<'Select'> = {
creatable: true,
minWidth: '14em',
debounceDelay: 500,
// default value tokenSeparators in superset-frontend/packages/superset-ui-core/src/components/Select/constants.ts
tokenSeparators: ['\r\n', '\n', '\t', ';'],
};
const d3TimeFormat: ControlFormItemSpec<'Select'> = {

View File

@@ -40,11 +40,13 @@ import {
DatasourceModal,
ErrorAlert,
} from 'src/components';
import SemanticViewEditModal from 'src/features/semanticViews/SemanticViewEditModal';
import { Menu } from '@superset-ui/core/components/Menu';
import { Icons } from '@superset-ui/core/components/Icons';
import WarningIconWithTooltip from '@superset-ui/core/components/WarningIconWithTooltip';
import { URL_PARAMS } from 'src/constants';
import { getDatasourceAsSaveableDataset } from 'src/utils/datasourceUtils';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
import {
userHasPermission,
isUserAdmin,
@@ -68,6 +70,7 @@ interface ExtendedDatasource extends Datasource {
}>;
extra?: string;
health_check_message?: string;
cache_timeout?: number | null;
database?: {
id: number;
database_name: string;
@@ -375,7 +378,7 @@ class DatasourceControl extends PureComponent<
const canAccessSqlLab = userHasPermission(user, 'SQL Lab', 'menu_access');
const editText = t('Edit dataset');
const editText = t('Edit %s', datasetLabelLower());
const requestedQuery = {
datasourceKey: `${datasource.id}__${datasource.type}`,
sql: datasource.sql,
@@ -387,7 +390,9 @@ class DatasourceControl extends PureComponent<
label: !allowEdit ? (
<Tooltip
title={t(
'You must be a dataset owner in order to edit. Please reach out to a dataset owner to request modifications or edit access.',
'You must be a %s owner in order to edit. Please reach out to a %s owner to request modifications or edit access.',
datasetLabelLower(),
datasetLabelLower(),
)}
>
{editText}
@@ -402,7 +407,7 @@ class DatasourceControl extends PureComponent<
defaultDatasourceMenuItems.push({
key: CHANGE_DATASET,
label: t('Swap dataset'),
label: t('Swap %s', datasetLabelLower()),
});
if (!isMissingDatasource && canAccessSqlLab) {
@@ -481,7 +486,7 @@ class DatasourceControl extends PureComponent<
queryDatasourceMenuItems.push({
key: SAVE_AS_DATASET,
label: <span>{t('Save as dataset')}</span>,
label: <span>{t('Save as %s', datasetLabelLower())}</span>,
});
const queryDatasourceMenu = (
@@ -495,7 +500,7 @@ class DatasourceControl extends PureComponent<
const titleText =
isMissingDatasource && !datasource.name
? t('Missing dataset')
? t('Missing %s', datasetLabelLower())
: getDatasourceTitle(datasource);
const tooltip = titleText;
@@ -561,14 +566,15 @@ class DatasourceControl extends PureComponent<
) : (
<ErrorAlert
type="warning"
message={t('Missing dataset')}
message={t('Missing %s', datasetLabelLower())}
descriptionPre={false}
descriptionDetailsCollapsed={false}
descriptionDetails={
<>
<p>
{t(
'The dataset linked to this chart may have been deleted.',
'The %s linked to this chart may have been deleted.',
datasetLabelLower(),
)}
</p>
<p>
@@ -578,7 +584,7 @@ class DatasourceControl extends PureComponent<
this.handleMenuItemClick({ key: CHANGE_DATASET })
}
>
{t('Swap dataset')}
{t('Swap %s', datasetLabelLower())}
</Button>
</p>
</>
@@ -587,14 +593,27 @@ class DatasourceControl extends PureComponent<
)}
</div>
)}
{showEditDatasourceModal && (
<DatasourceModal
datasource={datasource}
show={showEditDatasourceModal}
onDatasourceSave={this.onDatasourceSave}
onHide={this.toggleEditDatasourceModal}
/>
)}
{showEditDatasourceModal &&
(String(datasource.type) === 'semantic_view' ? (
<SemanticViewEditModal
show={showEditDatasourceModal}
onHide={this.toggleEditDatasourceModal}
onSave={() => this.onDatasourceSave(datasource)}
semanticView={{
id: datasource.id,
table_name: datasource.name,
description: datasource.description,
cache_timeout: datasource.cache_timeout,
}}
/>
) : (
<DatasourceModal
datasource={datasource}
show={showEditDatasourceModal}
onDatasourceSave={this.onDatasourceSave}
onHide={this.toggleEditDatasourceModal}
/>
))}
{showChangeDatasourceModal && (
<ChangeDatasourceModal
onDatasourceSave={this.onDatasourceSave}

View File

@@ -142,6 +142,10 @@ const ColumnSelectPopover = ({
const datasourceType = useSelector<ExplorePageState, string | undefined>(
state => state.explore.datasource.type,
);
const compatibleDimensions = useSelector<
ExplorePageState,
string[] | null | undefined
>(state => state.explore.compatibleDimensions);
const [initialLabel] = useState(label);
const [initialAdhocColumn, initialCalculatedColumn, initialSimpleColumn] =
getInitialColumnValues(editedColumn);
@@ -167,21 +171,22 @@ const ColumnSelectPopover = ({
const sqlEditorRef = useRef<editors.EditorHandle>(null);
const [calculatedColumns, simpleColumns] = useMemo(
() =>
columns?.reduce(
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
if (column.expression) {
acc[0].push(column);
} else {
acc[1].push(column);
}
return acc;
},
[[], []],
),
[columns],
);
const [calculatedColumns, simpleColumns] = useMemo(() => {
const [calc, simple] = (columns ?? []).reduce(
(acc: [ColumnMeta[], ColumnMeta[]], column: ColumnMeta) => {
if (column.expression) {
acc[0].push(column);
} else {
acc[1].push(column);
}
return acc;
},
[[], []],
);
const alpha = (a: ColumnMeta, b: ColumnMeta) =>
(a.column_name ?? '').localeCompare(b.column_name ?? '');
return [calc.sort(alpha), simple.sort(alpha)];
}, [columns]);
// Filter metrics that are already selected in the chart
const availableMetrics = useMemo(() => {
@@ -551,6 +556,11 @@ const ColumnSelectPopover = ({
key: `column-${simpleColumn.column_name}`,
column_name: simpleColumn.column_name,
verbose_name: simpleColumn.verbose_name ?? '',
disabled:
compatibleDimensions != null &&
!compatibleDimensions.includes(
simpleColumn.column_name,
),
})),
...availableMetrics.map(metric => ({
value: metric.metric_name,
@@ -565,6 +575,9 @@ const ColumnSelectPopover = ({
key: `metric-${metric.metric_name}`,
metric_name: metric.metric_name,
verbose_name: metric.verbose_name ?? '',
disabled:
compatibleDimensions != null &&
!compatibleDimensions.includes(metric.metric_name),
})),
]}
optionFilterProps={[

View File

@@ -23,6 +23,7 @@ import AdhocFilter from 'src/explore/components/controls/FilterControl/AdhocFilt
import { OptionSortType } from 'src/explore/types';
import { useGetTimeRangeLabel } from 'src/explore/components/controls/FilterControl/utils';
import OptionWrapper from './OptionWrapper';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export interface DndAdhocFilterOptionProps {
adhocFilter: AdhocFilter;
@@ -68,7 +69,10 @@ export default function DndAdhocFilterOption({
isExtra={adhocFilter.isExtra}
datasourceWarningMessage={
adhocFilter.datasourceWarning
? t('This filter might be incompatible with current dataset')
? t(
'This filter might be incompatible with current %s',
datasetLabelLower(),
)
: undefined
}
/>

View File

@@ -38,6 +38,7 @@ import AdhocMetric from 'src/explore/components/controls/MetricControl/AdhocMetr
import MetricDefinitionValue from 'src/explore/components/controls/MetricControl/MetricDefinitionValue';
import ColumnSelectPopoverTrigger from './ColumnSelectPopoverTrigger';
import { DndControlProps } from './types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const AGGREGATED_DECK_GL_CHART_TYPES = [
'deck_screengrid',
@@ -129,6 +130,16 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
formData,
} = props;
// Semantic views do not support arbitrary SQL expressions as dimensions.
// Merge 'sqlExpression' into disabledTabs so the Custom SQL tab is hidden.
const effectiveDisabledTabs = useMemo(
() =>
String(datasource?.type) === 'semantic_view'
? new Set([...(disabledTabs ?? []), 'sqlExpression'])
: disabledTabs,
[datasource?.type, disabledTabs],
);
const [newColumnPopoverVisible, setNewColumnPopoverVisible] = useState(false);
const combinedOptionsMap = useMemo(() => {
@@ -303,7 +314,7 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
}}
editedColumn={column}
isTemporal={isTemporal}
disabledTabs={disabledTabs}
disabledTabs={effectiveDisabledTabs}
>
<OptionWrapper
key={`column-${idx}`}
@@ -326,7 +337,10 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
typeof item === 'object' &&
'error_text' in item &&
item.error_text)
? t('This metric might be incompatible with current dataset')
? t(
'This metric might be incompatible with current %s',
datasetLabelLower(),
)
: undefined;
return (
@@ -440,7 +454,7 @@ function DndColumnMetricSelect(props: DndColumnMetricSelectProps) {
togglePopover={toggleColumnPopover}
closePopover={closeColumnPopover}
isTemporal={false}
disabledTabs={disabledTabs}
disabledTabs={effectiveDisabledTabs}
metrics={savedMetrics}
selectedMetrics={selectedMetrics}
>

View File

@@ -17,6 +17,7 @@
* under the License.
*/
import { useCallback, useMemo, useState } from 'react';
import { useSelector } from 'react-redux';
import { t } from '@apache-superset/core/translation';
import { AdhocColumn, QueryFormColumn, isAdhocColumn } from '@superset-ui/core';
import { tn } from '@apache-superset/core/translation';
@@ -27,8 +28,10 @@ import OptionWrapper from 'src/explore/components/controls/DndColumnSelectContro
import { OptionSelector } from 'src/explore/components/controls/DndColumnSelectControl/utils';
import { DatasourcePanelDndItem } from 'src/explore/components/DatasourcePanel/types';
import { DndItemType } from 'src/explore/components/DndItemType';
import { ExplorePageState } from 'src/explore/types';
import ColumnSelectPopoverTrigger from './ColumnSelectPopoverTrigger';
import { DndControlProps } from './types';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
export type DndColumnSelectProps = DndControlProps<QueryFormColumn> & {
options: ColumnMeta[];
@@ -49,6 +52,19 @@ function DndColumnSelect(props: DndColumnSelectProps) {
isTemporal,
disabledTabs,
} = props;
// Semantic views do not support arbitrary SQL expressions as dimensions.
const datasourceType = useSelector<ExplorePageState, string | undefined>(
state => state.explore.datasource?.type,
);
const effectiveDisabledTabs = useMemo(
() =>
datasourceType === 'semantic_view'
? new Set([...(disabledTabs ?? []), 'sqlExpression'])
: disabledTabs,
[datasourceType, disabledTabs],
);
const [newColumnPopoverVisible, setNewColumnPopoverVisible] = useState(false);
const optionSelector = useMemo(() => {
@@ -103,7 +119,10 @@ function DndColumnSelect(props: DndColumnSelectProps) {
optionSelector.values.map((column, idx) => {
const datasourceWarningMessage =
isAdhocColumn(column) && column.datasourceWarning
? t('This column might be incompatible with current dataset')
? t(
'This column might be incompatible with current %s',
datasetLabelLower(),
)
: undefined;
const withCaret = isAdhocColumn(column) || !column.error_text;
@@ -121,7 +140,7 @@ function DndColumnSelect(props: DndColumnSelectProps) {
}}
editedColumn={column}
isTemporal={isTemporal}
disabledTabs={disabledTabs}
disabledTabs={effectiveDisabledTabs}
>
<OptionWrapper
key={idx}
@@ -205,7 +224,7 @@ function DndColumnSelect(props: DndColumnSelectProps) {
closePopover={closePopover}
visible={newColumnPopoverVisible}
isTemporal={isTemporal}
disabledTabs={disabledTabs}
disabledTabs={effectiveDisabledTabs}
>
<div />
</ColumnSelectPopoverTrigger>

View File

@@ -69,7 +69,7 @@ const baseFormData = {
};
const mockStore = configureStore([thunk]);
const store = mockStore({});
const store = mockStore({ explore: {} });
function setup({
value = undefined,

View File

@@ -69,14 +69,20 @@ const adhocMetricB = {
};
test('renders with default props', () => {
render(<DndMetricSelect {...defaultProps} />, { useDnd: true });
render(<DndMetricSelect {...defaultProps} />, {
useDnd: true,
useRedux: true,
});
expect(
screen.getByText('Drop a column/metric here or click'),
).toBeInTheDocument();
});
test('renders with default props and multi = true', () => {
render(<DndMetricSelect {...defaultProps} multi />, { useDnd: true });
render(<DndMetricSelect {...defaultProps} multi />, {
useDnd: true,
useRedux: true,
});
expect(
screen.getByText('Drop columns/metrics here or click'),
).toBeInTheDocument();
@@ -86,6 +92,7 @@ test('render selected metrics correctly', () => {
const metricValues = ['metric_a', 'metric_b', adhocMetricB];
render(<DndMetricSelect {...defaultProps} value={metricValues} multi />, {
useDnd: true,
useRedux: true,
});
expect(screen.getByText('metric_a')).toBeVisible();
expect(screen.getByText('Metric B')).toBeVisible();
@@ -107,6 +114,7 @@ test('warn selected custom metric when metric gets removed from dataset', async
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -159,6 +167,7 @@ test('warn selected custom metric when metric gets removed from dataset for sing
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -217,6 +226,7 @@ test('remove selected adhoc metric when column gets removed from dataset', async
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -259,6 +269,7 @@ test('update adhoc metric name when column label in dataset changes', () => {
/>,
{
useDnd: true,
useRedux: true,
},
);
@@ -304,6 +315,7 @@ test('can drag metrics', async () => {
const metricValues = ['metric_a', 'metric_b', adhocMetricB];
render(<DndMetricSelect {...defaultProps} value={metricValues} multi />, {
useDnd: true,
useRedux: true,
});
expect(screen.getByText('metric_a')).toBeVisible();
@@ -341,6 +353,7 @@ test('cannot drop a duplicated item', () => {
</>,
{
useDnd: true,
useRedux: true,
},
);
@@ -374,6 +387,7 @@ test('can drop a saved metric when disallow_adhoc_metrics', () => {
</>,
{
useDnd: true,
useRedux: true,
},
);
@@ -415,6 +429,7 @@ test('cannot drop non-saved metrics when disallow_adhoc_metrics', () => {
</>,
{
useDnd: true,
useRedux: true,
},
);
@@ -463,6 +478,7 @@ test('title changes on custom SQL text change', async () => {
/>,
{
useDnd: true,
useRedux: true,
},
);

View File

@@ -41,6 +41,7 @@ import { DndItemType } from 'src/explore/components/DndItemType';
import DndSelectLabel from 'src/explore/components/controls/DndColumnSelectControl/DndSelectLabel';
import { savedMetricType } from 'src/explore/components/controls/MetricControl/types';
import { AGGREGATES } from 'src/explore/constants';
import { datasetLabelLower } from 'src/features/semanticLayers/label';
const EMPTY_OBJECT = {};
const DND_ACCEPTED_TYPES = [DndItemType.Column, DndItemType.Metric];
@@ -77,7 +78,10 @@ const coerceMetrics = (
) {
return {
metric_name: metric,
error_text: t('This metric might be incompatible with current dataset'),
error_text: t(
'This metric might be incompatible with current %s',
datasetLabelLower(),
),
uuid: nanoid(),
};
}
@@ -128,6 +132,26 @@ const DndMetricSelect = (props: any) => {
return extra;
}, [datasource?.extra]);
// Semantic views do not support arbitrary SQL expressions as metrics.
const disallowAdhocMetrics =
extra.disallow_adhoc_metrics || datasource?.type === 'semantic_view';
// AdhocMetricEditPopover reads `datasource.extra.disallow_adhoc_metrics`
// directly, so we need to inject the flag there too — not just in canDrop.
const datasourceForPopover = useMemo(() => {
if (!disallowAdhocMetrics || !datasource) return datasource;
let parsedExtra: Record<string, unknown> = {};
if (datasource.extra) {
try {
parsedExtra = JSON.parse(datasource.extra as string);
} catch {} // eslint-disable-line no-empty
}
return {
...datasource,
extra: JSON.stringify({ ...parsedExtra, disallow_adhoc_metrics: true }),
};
}, [disallowAdhocMetrics, datasource]);
const savedMetricSet = useMemo(
() =>
new Set(
@@ -184,7 +208,7 @@ const DndMetricSelect = (props: any) => {
const canDrop = useCallback(
(item: DatasourcePanelDndItem) => {
if (
extra.disallow_adhoc_metrics &&
disallowAdhocMetrics &&
(item.type !== DndItemType.Metric ||
!savedMetricSet.has(item.value.metric_name))
) {
@@ -293,14 +317,17 @@ const DndMetricSelect = (props: any) => {
columns={props.columns}
savedMetrics={props.savedMetrics}
savedMetricsOptions={getSavedMetricOptionsForMetric(index)}
datasource={props.datasource}
datasource={datasourceForPopover}
onMoveLabel={moveLabel}
onDropLabel={handleDropLabel}
type={`${DndItemType.AdhocMetricOption}_${props.name}_${props.label}`}
multi={multi}
datasourceWarningMessage={
option instanceof AdhocMetric && option.datasourceWarning
? t('This metric might be incompatible with current dataset')
? t(
'This metric might be incompatible with current %s',
datasetLabelLower(),
)
: undefined
}
/>
@@ -399,7 +426,7 @@ const DndMetricSelect = (props: any) => {
columns={props.columns}
savedMetricsOptions={newSavedMetricOptions}
savedMetric={EMPTY_OBJECT as savedMetricType}
datasource={props.datasource}
datasource={datasourceForPopover}
isControlledComponent
visible={newMetricPopoverVisible}
togglePopover={togglePopover}

View File

@@ -415,21 +415,25 @@ export default class AdhocFilterEditPopover extends Component<
</ErrorBoundary>
),
},
{
key: ExpressionTypes.Sql,
label: t('Custom SQL'),
children: (
<ErrorBoundary>
<AdhocFilterEditPopoverSqlTabContent
adhocFilter={this.state.adhocFilter}
onChange={this.onAdhocFilterChange}
options={this.props.options}
height={this.state.height}
datasource={datasource}
/>
</ErrorBoundary>
),
},
...(datasource?.type === 'semantic_view'
? []
: [
{
key: ExpressionTypes.Sql,
label: t('Custom SQL'),
children: (
<ErrorBoundary>
<AdhocFilterEditPopoverSqlTabContent
adhocFilter={this.state.adhocFilter}
onChange={this.onAdhocFilterChange}
options={this.props.options}
height={this.state.height}
datasource={datasource}
/>
</ErrorBoundary>
),
},
]),
]}
/>
{hasDeckSlices && (

View File

@@ -67,13 +67,19 @@ const createProps = () => ({
test('Should render', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByTestId('metrics-edit-popover')).toBeVisible();
});
test('Should render correct elements', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByRole('tablist')).toBeVisible();
expect(screen.getByRole('button', { name: 'Resize' })).toBeVisible();
expect(screen.getByRole('button', { name: 'Save' })).toBeVisible();
@@ -82,7 +88,10 @@ test('Should render correct elements', () => {
test('Should render correct elements for SQL', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByRole('tab', { name: 'Custom SQL' })).toBeVisible();
expect(screen.getByRole('tab', { name: 'Simple' })).toBeVisible();
expect(screen.getByRole('tab', { name: 'Saved' })).toBeVisible();
@@ -94,7 +103,10 @@ test('Should render correct elements for allow ad-hoc metrics', () => {
...createProps(),
datasource: { extra: '{"disallow_adhoc_metrics": false}' },
};
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByRole('tab', { name: 'Custom SQL' })).toBeEnabled();
expect(screen.getByRole('tab', { name: 'Simple' })).toBeEnabled();
expect(screen.getByRole('tab', { name: 'Saved' })).toBeEnabled();
@@ -106,7 +118,10 @@ test('Should render correct elements for disallow ad-hoc metrics', () => {
...createProps(),
datasource: { extra: '{"disallow_adhoc_metrics": true}' },
};
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByRole('tab', { name: 'Custom SQL' })).toHaveAttribute(
'aria-disabled',
'true',
@@ -121,7 +136,10 @@ test('Should render correct elements for disallow ad-hoc metrics', () => {
test('Clicking on "Close" should call onClose', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Close' }));
expect(props.onClose).toHaveBeenCalledTimes(1);
@@ -129,7 +147,10 @@ test('Clicking on "Close" should call onClose', () => {
test('Clicking on "Save" should call onChange and onClose', async () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(
@@ -145,7 +166,10 @@ test('Clicking on "Save" should call onChange and onClose', async () => {
test('Clicking on "Save" should not call onChange and onClose', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Save' }));
@@ -155,7 +179,10 @@ test('Clicking on "Save" should not call onChange and onClose', () => {
test('Clicking on "Save" should call onChange and onClose for new metric', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} isNewMetric />);
render(<AdhocMetricEditPopover {...props} isNewMetric />, {
useRedux: true,
initialState: { explore: {} },
});
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Save' }));
@@ -165,7 +192,10 @@ test('Clicking on "Save" should call onChange and onClose for new metric', () =>
test('Clicking on "Save" should call onChange and onClose for new title', () => {
const props = createProps();
render(<AdhocMetricEditPopover {...props} isLabelModified />);
render(<AdhocMetricEditPopover {...props} isLabelModified />, {
useRedux: true,
initialState: { explore: {} },
});
expect(props.onChange).toHaveBeenCalledTimes(0);
expect(props.onClose).toHaveBeenCalledTimes(0);
userEvent.click(screen.getByRole('button', { name: 'Save' }));
@@ -178,7 +208,10 @@ test('Should switch to tab:Simple', () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByRole('tabpanel', { name: 'Saved' })).toBeVisible();
expect(
@@ -202,7 +235,10 @@ test('Should render "Simple" tab correctly', () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
const tab = screen.getByRole('tab', { name: 'Simple' }).parentElement!;
userEvent.click(tab);
@@ -216,7 +252,10 @@ test('Should switch to tab:Custom SQL', () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
expect(screen.getByRole('tabpanel', { name: 'Saved' })).toBeVisible();
expect(
@@ -242,7 +281,10 @@ test('Should render "Custom SQL" tab correctly', async () => {
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
const tab = screen.getByRole('tab', { name: 'Custom SQL' }).parentElement!;
userEvent.click(tab);
@@ -286,7 +328,10 @@ test('Should filter saved metrics by metric_name and verbose_name', async () =>
},
],
};
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
const combobox = screen.getByRole('combobox', {
name: 'Select saved metrics',
@@ -362,7 +407,10 @@ test('Should filter columns by column_name and verbose_name in Simple tab', asyn
props.getCurrentTab.mockImplementation(tab => {
props.adhocMetric.expressionType = tab;
});
render(<AdhocMetricEditPopover {...props} />);
render(<AdhocMetricEditPopover {...props} />, {
useRedux: true,
initialState: { explore: {} },
});
const tab = screen.getByRole('tab', { name: 'Simple' }).parentElement!;
userEvent.click(tab);

View File

@@ -18,6 +18,7 @@
*/
/* eslint-disable camelcase */
import { PureComponent, createRef } from 'react';
import { useSelector } from 'react-redux';
import { isDefined, ensureIsArray, DatasourceType } from '@superset-ui/core';
import { t } from '@apache-superset/core/translation';
import type { editors } from '@apache-superset/core';
@@ -94,6 +95,8 @@ interface AdhocMetricEditPopoverProps {
datasource?: DatasourceInfo;
isNewMetric?: boolean;
isLabelModified?: boolean;
/** Names of metrics the user may select; null means no filtering. */
compatibleMetrics?: string[] | null;
}
interface AdhocMetricEditPopoverState {
@@ -123,7 +126,7 @@ const StyledSelect = styled(Select)`
export const SAVED_TAB_KEY = 'SAVED';
export default class AdhocMetricEditPopover extends PureComponent<
class AdhocMetricEditPopover extends PureComponent<
AdhocMetricEditPopoverProps,
AdhocMetricEditPopoverState
> {
@@ -438,15 +441,24 @@ export default class AdhocMetricEditPopover extends PureComponent<
ensureIsArray(savedMetricsOptions).length > 0 ? (
<FormItem label={t('Saved metric')}>
<StyledSelect
options={ensureIsArray(savedMetricsOptions).map(
savedMetric => ({
options={[...ensureIsArray(savedMetricsOptions)]
.sort((a, b) =>
(a.metric_name ?? '').localeCompare(
b.metric_name ?? '',
),
)
.map(savedMetric => ({
value: savedMetric.metric_name,
label: this.renderMetricOption(savedMetric),
key: savedMetric.id,
metric_name: savedMetric.metric_name,
verbose_name: savedMetric.verbose_name ?? '',
}),
)}
disabled:
this.props.compatibleMetrics != null &&
!this.props.compatibleMetrics.includes(
savedMetric.metric_name,
),
}))}
optionFilterProps={['metric_name', 'verbose_name']}
{...savedSelectProps}
/>
@@ -596,3 +608,20 @@ export default class AdhocMetricEditPopover extends PureComponent<
}
// @ts-expect-error - defaultProps for backward compatibility
AdhocMetricEditPopover.defaultProps = defaultProps;
// ---------------------------------------------------------------------------
// Thin functional wrapper that injects compatibility data from Redux.
// AdhocMetricEditPopover is a class component and cannot use hooks directly.
// ---------------------------------------------------------------------------
function AdhocMetricEditPopoverWithRedux(props: AdhocMetricEditPopoverProps) {
const compatibleMetrics = useSelector(
(state: any) =>
state.explore?.compatibleMetrics as string[] | null | undefined,
);
return (
<AdhocMetricEditPopover {...props} compatibleMetrics={compatibleMetrics} />
);
}
export { AdhocMetricEditPopover };
export default AdhocMetricEditPopoverWithRedux;

View File

@@ -61,7 +61,11 @@ function setup(overrides: Record<string, unknown> = {}) {
...overrides,
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return render(<AdhocMetricOption {...(props as any)} />, { useDnd: true });
return render(<AdhocMetricOption {...(props as any)} />, {
useDnd: true,
useRedux: true,
initialState: { explore: {} },
});
}
test('renders an overlay trigger wrapper for the label', () => {

View File

@@ -62,7 +62,10 @@ function setup(overrides: Record<string, unknown> = {}) {
...defaultProps,
...overrides,
};
const result = render(<MetricsControl {...props} />, { useDnd: true });
const result = render(<MetricsControl {...props} />, {
useDnd: true,
useRedux: true,
});
return { onChange, ...result };
}
@@ -166,7 +169,7 @@ test('does not remove custom SQL metric if savedMetrics changes', async () => {
]}
datasource={undefined}
/>,
{ useDnd: true },
{ useDnd: true, useRedux: true },
);
expect(screen.getByText('old label')).toBeInTheDocument();

View File

@@ -64,6 +64,7 @@ import {
validateNonEmpty,
} from '@superset-ui/core';
import { t } from '@apache-superset/core/translation';
import { datasetLabel } from 'src/features/semanticLayers/label';
import { formatSelectOptions } from 'src/explore/exploreUtils';
import { TIME_FILTER_LABELS } from './constants';
import { StyledColumnOption } from './components/optionRenderers';
@@ -214,7 +215,7 @@ export const controls = {
datasource: {
type: 'DatasourceControl',
label: t('Dataset'),
label: datasetLabel(),
default: null,
description: null,
mapStateToProps: ({ datasource }: ControlState) => ({

View File

@@ -70,6 +70,9 @@ export interface ExploreState {
metadata?: {
owners?: string[] | null;
};
compatibleMetrics?: string[] | null;
compatibleDimensions?: string[] | null;
compatibilityLoading?: boolean;
saveAction?: SaveActionType | null;
chartStates?: Record<number, JsonObject>;
}
@@ -178,6 +181,13 @@ interface UpdateExploreChartStateAction {
lastModified: number;
}
interface SetCompatibilityAction {
type: typeof actions.SET_COMPATIBILITY;
compatibleMetrics: string[] | null;
compatibleDimensions: string[] | null;
compatibilityLoading: boolean;
}
type ExploreAction =
| DynamicPluginControlsReadyAction
| ToggleFaveStarAction
@@ -197,6 +207,7 @@ type ExploreAction =
| SliceUpdatedAction
| SetForceQueryAction
| UpdateExploreChartStateAction
| SetCompatibilityAction
| HydrateExplore;
// Extended control state for dynamic form controls - uses Record for flexibility
@@ -635,6 +646,15 @@ export default function exploreReducer(
force: typedAction.force,
};
},
[actions.SET_COMPATIBILITY]() {
const typedAction = action as SetCompatibilityAction;
return {
...state,
compatibleMetrics: typedAction.compatibleMetrics,
compatibleDimensions: typedAction.compatibleDimensions,
compatibilityLoading: typedAction.compatibilityLoading,
};
},
[actions.UPDATE_EXPLORE_CHART_STATE]() {
const typedAction = action as UpdateExploreChartStateAction;
return {

View File

@@ -71,6 +71,8 @@ export type OptionSortType = Partial<
export type Datasource = Dataset & {
database?: DatabaseObject;
/** The parent resource that owns this datasource (database or semantic layer). */
parent?: { name: string };
datasource?: string;
catalog?: string | null;
schema?: string;
@@ -131,6 +133,9 @@ export interface ExplorePageState {
standalone: boolean;
force: boolean;
common: JsonObject;
compatibleMetrics?: string[] | null;
compatibleDimensions?: string[] | null;
compatibilityLoading?: boolean;
};
sliceEntities?: JsonObject; // propagated from Dashboard view
}

View File

@@ -35,6 +35,7 @@ import {
MenuObjectProps,
MenuData,
} from 'src/types/bootstrapTypes';
import { datasetsLabel } from 'src/features/semanticLayers/label';
import RightMenu from './RightMenu';
import { NAVBAR_MENU_POPUP_OFFSET } from './commonMenuData';
@@ -223,7 +224,7 @@ export function Menu({
setActiveTabs(['Charts']);
break;
case path.startsWith(Paths.Datasets):
setActiveTabs(['Datasets']);
setActiveTabs([datasetsLabel()]);
break;
case path.startsWith(Paths.SqlLab) || path.startsWith(Paths.SavedQueries):
setActiveTabs(['SQL']);
@@ -408,6 +409,12 @@ export default function MenuWrapper({ data, ...rest }: MenuProps) {
Manage: true,
};
// Remap labels that depend on feature flags so they stay in sync with
// the active-tab key used in the Menu component above.
const labelOverrides: Record<string, () => string> = {
Datasets: datasetsLabel,
};
// Cycle through menu.menu to build out cleanedMenu and settings
const cleanedMenu: MenuObjectProps[] = [];
const settings: MenuObjectProps[] = [];
@@ -419,6 +426,10 @@ export default function MenuWrapper({ data, ...rest }: MenuProps) {
const children: (MenuObjectProps | string)[] = [];
const newItem = {
...item,
// Apply any label override for this item (keyed by FAB internal name).
...(item.name && labelOverrides[item.name]
? { label: labelOverrides[item.name]() }
: {}),
};
// Filter childs

View File

@@ -149,6 +149,7 @@ export interface ButtonProps {
buttonStyle: 'primary' | 'secondary' | 'dashed' | 'link' | 'tertiary';
loading?: boolean;
icon?: ReactNode;
component?: ReactNode;
}
export interface SubMenuProps {
@@ -312,18 +313,22 @@ const SubMenuComponent: FunctionComponent<SubMenuProps> = props => {
),
}))}
/>
{props.buttons?.map((btn, i) => (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
))}
{props.buttons?.map((btn, i) =>
btn.component ? (
<span key={i}>{btn.component}</span>
) : (
<Button
key={i}
buttonStyle={btn.buttonStyle}
icon={btn.icon}
onClick={btn.onClick}
data-test={btn['data-test']}
loading={btn.loading ?? false}
>
{btn.name}
</Button>
),
)}
</div>
</Row>
{props.children}

View File

@@ -0,0 +1,130 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { SupersetClient } from '@superset-ui/core';
import { render, waitFor } from 'spec/helpers/testing-library';
import SemanticLayerModal from './SemanticLayerModal';
let mockJsonFormsChangeTriggered = false;
jest.mock('@jsonforms/react', () => ({
...jest.requireActual('@jsonforms/react'),
JsonForms: ({ onChange }: { onChange: (value: unknown) => void }) => {
// eslint-disable-next-line react-hooks/rules-of-hooks
if (!mockJsonFormsChangeTriggered) {
mockJsonFormsChangeTriggered = true;
onChange({
data: { warehouse: 'wh1' },
errors: [],
});
}
return null;
},
}));
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
get: jest.fn(),
post: jest.fn(),
put: jest.fn(),
},
getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
}));
const mockedGet = SupersetClient.get as jest.Mock;
const mockedPost = SupersetClient.post as jest.Mock;
const props = {
show: true,
onHide: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
semanticLayerUuid: '11111111-1111-1111-1111-111111111111',
};
beforeEach(() => {
mockJsonFormsChangeTriggered = false;
jest.useFakeTimers();
mockedGet.mockReset();
mockedPost.mockReset();
mockedGet
.mockResolvedValueOnce({
json: {
result: [{ id: 'snowflake', name: 'Snowflake', description: '' }],
},
})
.mockResolvedValueOnce({
json: {
result: {
name: 'Layer 1',
type: 'snowflake',
configuration: { warehouse: 'wh0' },
},
},
});
mockedPost.mockResolvedValue({
json: {
result: {
type: 'object',
properties: {
warehouse: {
type: 'string',
'x-dynamic': true,
'x-dependsOn': ['warehouse'],
},
},
},
},
});
});
afterEach(() => {
jest.runOnlyPendingTimers();
jest.useRealTimers();
});
test('posts configuration schema refresh after debounce', async () => {
render(<SemanticLayerModal {...props} />);
await waitFor(() => {
expect(mockedPost).toHaveBeenNthCalledWith(1, {
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: {
type: 'snowflake',
configuration: { warehouse: 'wh0' },
},
});
});
jest.advanceTimersByTime(501);
await waitFor(() => {
expect(mockedPost).toHaveBeenNthCalledWith(2, {
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: {
type: 'snowflake',
configuration: { warehouse: 'wh1' },
},
});
});
});

View File

@@ -0,0 +1,408 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { t } from '@apache-superset/core/translation';
import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import { Input, Select, Button } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import { JsonForms } from '@jsonforms/react';
import type { JsonSchema, UISchemaElement } from '@jsonforms/core';
import { cellRegistryEntries } from '@great-expectations/jsonforms-antd-renderers';
import type { ErrorObject } from 'ajv';
import {
StandardModal,
ModalFormField,
MODAL_STANDARD_WIDTH,
MODAL_MEDIUM_WIDTH,
} from 'src/components/Modal';
import { styled } from '@apache-superset/core/theme';
import {
renderers,
sanitizeSchema,
buildUiSchema,
getDynamicDependencies,
areDependenciesSatisfied,
serializeDependencyValues,
SCHEMA_REFRESH_DEBOUNCE_MS,
} from './jsonFormsHelpers';
const ModalContent = styled.div`
padding: ${({ theme }) => theme.sizeUnit * 4}px;
`;
type Step = 'type' | 'config';
type ValidationMode = 'ValidateAndHide' | 'ValidateAndShow';
interface SemanticLayerType {
id: string;
name: string;
description: string;
}
interface SemanticLayerModalProps {
show: boolean;
onHide: () => void;
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
semanticLayerUuid?: string;
}
export default function SemanticLayerModal({
show,
onHide,
addDangerToast,
addSuccessToast,
semanticLayerUuid,
}: SemanticLayerModalProps) {
const isEditMode = !!semanticLayerUuid;
const [step, setStep] = useState<Step>('type');
const [name, setName] = useState('');
const [selectedType, setSelectedType] = useState<string | null>(null);
const [types, setTypes] = useState<SemanticLayerType[]>([]);
const [loading, setLoading] = useState(false);
const [configSchema, setConfigSchema] = useState<JsonSchema | null>(null);
const [uiSchema, setUiSchema] = useState<UISchemaElement | undefined>(
undefined,
);
const [formData, setFormData] = useState<Record<string, unknown>>({});
const [saving, setSaving] = useState(false);
const [hasErrors, setHasErrors] = useState(true);
const [refreshingSchema, setRefreshingSchema] = useState(false);
const [validationMode, setValidationMode] =
useState<ValidationMode>('ValidateAndHide');
const errorsRef = useRef<ErrorObject[]>([]);
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const lastDepSnapshotRef = useRef<string>('');
const dynamicDepsRef = useRef<Record<string, string[]>>({});
const fetchTypes = useCallback(async () => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: '/api/v1/semantic_layer/types',
});
setTypes(json.result ?? []);
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
t('An error occurred while fetching semantic layer types'),
);
} finally {
setLoading(false);
}
}, [addDangerToast]);
const applySchema = useCallback((rawSchema: JsonSchema) => {
const schema = sanitizeSchema(rawSchema);
setConfigSchema(schema);
setUiSchema(buildUiSchema(schema));
dynamicDepsRef.current = getDynamicDependencies(rawSchema);
}, []);
const fetchConfigSchema = useCallback(
async (type: string, configuration?: Record<string, unknown>) => {
const isInitialFetch = !configuration;
if (isInitialFetch) setLoading(true);
else setRefreshingSchema(true);
try {
const { json } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type, configuration },
});
applySchema(json.result);
if (json.warning) {
addDangerToast(String(json.warning));
}
if (isInitialFetch) setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
if (isInitialFetch) {
addDangerToast(
clientError.error ||
t('An error occurred while fetching the configuration schema'),
);
} else {
addDangerToast(
clientError.error ||
t('An error occurred while refreshing the configuration schema'),
);
}
} finally {
if (isInitialFetch) setLoading(false);
else setRefreshingSchema(false);
}
},
[addDangerToast, applySchema],
);
const fetchExistingLayer = useCallback(
async (uuid: string) => {
setLoading(true);
try {
const { json } = await SupersetClient.get({
endpoint: `/api/v1/semantic_layer/${uuid}`,
});
const layer = json.result;
setName(layer.name ?? '');
setSelectedType(layer.type);
setFormData(layer.configuration ?? {});
setHasErrors(false);
// In edit mode, fetch the enriched schema using the full saved
// configuration so that dynamic dropdowns (account, project,
// environment) show their human-readable labels immediately rather
// than flashing raw IDs while the background refresh completes.
const { json: schemaJson } = await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/schema/configuration',
jsonPayload: { type: layer.type, configuration: layer.configuration },
});
applySchema(schemaJson.result);
setStep('config');
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
t('An error occurred while fetching the semantic layer'),
);
} finally {
setLoading(false);
}
},
[addDangerToast, applySchema],
);
useEffect(() => {
if (show) {
if (isEditMode && semanticLayerUuid) {
fetchTypes();
fetchExistingLayer(semanticLayerUuid);
} else {
fetchTypes();
}
} else {
setStep('type');
setName('');
setSelectedType(null);
setTypes([]);
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setHasErrors(true);
setRefreshingSchema(false);
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
}
}, [show, fetchTypes, isEditMode, semanticLayerUuid, fetchExistingLayer]);
const handleStepAdvance = () => {
if (selectedType) {
fetchConfigSchema(selectedType);
}
};
const handleBack = () => {
setStep('type');
setConfigSchema(null);
setUiSchema(undefined);
setFormData({});
setValidationMode('ValidateAndHide');
errorsRef.current = [];
lastDepSnapshotRef.current = '';
dynamicDepsRef.current = {};
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
};
const handleCreate = async () => {
setSaving(true);
try {
if (isEditMode && semanticLayerUuid) {
await SupersetClient.put({
endpoint: `/api/v1/semantic_layer/${semanticLayerUuid}`,
jsonPayload: { name, configuration: formData },
});
addSuccessToast(t('Semantic layer updated'));
} else {
await SupersetClient.post({
endpoint: '/api/v1/semantic_layer/',
jsonPayload: { name, type: selectedType, configuration: formData },
});
addSuccessToast(t('Semantic layer created'));
}
onHide();
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast(
clientError.error ||
(isEditMode
? t('An error occurred while updating the semantic layer')
: t('An error occurred while creating the semantic layer')),
);
} finally {
setSaving(false);
}
};
const handleSave = () => {
if (step === 'type') {
handleStepAdvance();
} else {
// Trigger validation UI and submit only from explicit save action.
setValidationMode('ValidateAndShow');
if (errorsRef.current.length === 0) {
handleCreate();
}
}
};
const maybeRefreshSchema = useCallback(
(data: Record<string, unknown>) => {
if (!selectedType) return;
const dynamicDeps = dynamicDepsRef.current;
if (Object.keys(dynamicDeps).length === 0) return;
// Check if any dynamic field has all dependencies satisfied
const hasSatisfiedDeps = Object.values(dynamicDeps).some(deps =>
areDependenciesSatisfied(deps, data, configSchema ?? undefined),
);
if (!hasSatisfiedDeps) return;
// Only re-fetch if dependency values actually changed
const snapshot = serializeDependencyValues(dynamicDeps, data);
if (snapshot === lastDepSnapshotRef.current) return;
lastDepSnapshotRef.current = snapshot;
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current);
debounceTimerRef.current = setTimeout(() => {
fetchConfigSchema(selectedType, data);
}, SCHEMA_REFRESH_DEBOUNCE_MS);
},
[selectedType, fetchConfigSchema, configSchema],
);
const handleFormChange = useCallback(
({
data,
errors,
}: {
data: Record<string, unknown>;
errors?: ErrorObject[];
}) => {
setFormData(data);
errorsRef.current = errors ?? [];
setHasErrors(errorsRef.current.length > 0);
maybeRefreshSchema(data);
},
[maybeRefreshSchema],
);
const selectedTypeName =
types.find(type => type.id === selectedType)?.name ?? '';
const title = isEditMode
? t('Edit %s', selectedTypeName || t('Semantic Layer'))
: step === 'type'
? t('New Semantic Layer')
: t('Configure %s', selectedTypeName);
return (
<StandardModal
show={show}
onHide={onHide}
onSave={handleSave}
title={title}
icon={isEditMode ? <Icons.EditOutlined /> : <Icons.PlusOutlined />}
width={step === 'type' ? MODAL_STANDARD_WIDTH : MODAL_MEDIUM_WIDTH}
saveDisabled={
step === 'type' ? !selectedType : saving || !name.trim() || hasErrors
}
saveText={
step === 'type' ? undefined : isEditMode ? t('Save') : t('Create')
}
saveLoading={saving}
contentLoading={loading}
>
<ModalContent>
{step === 'type' ? (
<ModalFormField label={t('Type')}>
<Select
ariaLabel={t('Semantic layer type')}
placeholder={t('Select a semantic layer type')}
value={selectedType}
onChange={value => setSelectedType(value as string)}
options={types.map(type => ({
value: type.id,
label: type.name,
}))}
getPopupContainer={() => document.body}
dropdownAlign={{
points: ['tl', 'bl'],
offset: [0, 4],
overflow: { adjustX: 0, adjustY: 1 },
}}
/>
</ModalFormField>
) : (
<>
{!isEditMode && (
<Button
buttonStyle="link"
icon={<Icons.CaretLeftOutlined iconSize="s" />}
onClick={handleBack}
>
{t('Back')}
</Button>
)}
<ModalFormField label={t('Name')} required>
<Input
value={name}
onChange={e => setName(e.target.value)}
placeholder={t('Name of the semantic layer')}
/>
</ModalFormField>
{configSchema && (
// Wrap in a form with autocomplete="off" so browsers do not
// autofill credential fields (service token, account, etc.).
// eslint-disable-next-line jsx-a11y/no-redundant-roles
<form
role="presentation"
autoComplete="off"
onSubmit={e => e.preventDefault()}
>
<JsonForms
schema={configSchema}
uischema={uiSchema}
data={formData}
renderers={renderers}
cells={cellRegistryEntries}
config={{ refreshingSchema, formData }}
validationMode={validationMode}
onChange={handleFormChange}
/>
</form>
)}
</>
)}
</ModalContent>
</StandardModal>
);
}

View File

@@ -0,0 +1,150 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import type { JsonSchema } from '@jsonforms/core';
import {
areDependenciesSatisfied,
sanitizeSchema,
buildUiSchema,
getDynamicDependencies,
serializeDependencyValues,
} from './jsonFormsHelpers';
test('areDependenciesSatisfied returns true for present dependency values', () => {
expect(
areDependenciesSatisfied(['database', 'schema'], {
database: 'examples',
schema: 'public',
}),
).toBe(true);
});
test('areDependenciesSatisfied treats empty object dependencies as unsatisfied', () => {
expect(
areDependenciesSatisfied(['auth'], {
auth: {},
}),
).toBe(false);
});
test('areDependenciesSatisfied uses schema defaults for untouched fields', () => {
const schema: JsonSchema = {
type: 'object',
properties: {
database: {
type: 'string',
default: 'analytics',
},
},
};
expect(areDependenciesSatisfied(['database'], {}, schema)).toBe(true);
});
test('sanitizeSchema removes empty enums and preserves other properties', () => {
const schema: JsonSchema = {
type: 'object',
properties: {
environment: {
type: 'string',
enum: [],
},
warehouse: {
type: 'string',
enum: ['xsmall', 'small'],
},
},
};
const sanitized = sanitizeSchema(schema);
const sanitizedProperties =
(sanitized.properties as Record<string, JsonSchema>) ?? {};
expect(sanitizedProperties.environment?.enum).toBeUndefined();
expect(sanitizedProperties.warehouse?.enum).toEqual(['xsmall', 'small']);
});
test('buildUiSchema respects x-propertyOrder and includes placeholders/tooltips', () => {
const schema = {
type: 'object',
properties: {
database: {
type: 'string',
description: 'Target database',
examples: ['examples'],
},
schema: {
type: 'string',
},
},
'x-propertyOrder': ['schema', 'database'],
} as JsonSchema;
const uiSchema = buildUiSchema(schema) as {
type: string;
elements: Array<Record<string, unknown>>;
};
expect(uiSchema.type).toBe('VerticalLayout');
expect(uiSchema.elements[0].scope).toBe('#/properties/schema');
expect(uiSchema.elements[1].scope).toBe('#/properties/database');
expect(uiSchema.elements[1].options).toEqual({
placeholderText: 'examples',
tooltip: 'Target database',
});
});
test('getDynamicDependencies extracts x-dynamic dependency mapping', () => {
const schema = {
type: 'object',
properties: {
schema: {
type: 'string',
'x-dynamic': true,
'x-dependsOn': ['database'],
},
database: {
type: 'string',
},
warehouse: {
type: 'string',
'x-dynamic': true,
},
},
} as JsonSchema;
expect(getDynamicDependencies(schema)).toEqual({ schema: ['database'] });
});
test('serializeDependencyValues is stable and sorted by key', () => {
const dynamicDeps = {
schema: ['database'],
role: ['warehouse', 'database'],
};
const data = {
warehouse: 'compute_wh',
database: 'analytics',
ignored: 'x',
};
expect(serializeDependencyValues(dynamicDeps, data)).toBe(
JSON.stringify({ database: 'analytics', warehouse: 'compute_wh' }),
);
});

View File

@@ -0,0 +1,386 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useEffect } from 'react';
import { t } from '@apache-superset/core/translation';
import { Spin, Select, Form } from 'antd';
import { withJsonFormsControlProps } from '@jsonforms/react';
import type {
JsonSchema,
UISchemaElement,
ControlProps,
} from '@jsonforms/core';
import {
rankWith,
and,
isStringControl,
formatIs,
schemaMatches,
} from '@jsonforms/core';
import {
rendererRegistryEntries,
TextControl,
} from '@great-expectations/jsonforms-antd-renderers';
export const SCHEMA_REFRESH_DEBOUNCE_MS = 500;
/**
* Custom renderer that renders `Input.Password` for fields with
* `format: "password"` in the JSON Schema (e.g. Pydantic `SecretStr`).
*/
function PasswordControl(props: ControlProps) {
const uischema = {
...props.uischema,
options: {
...props.uischema.options,
type: 'password',
inputProps: {
...((props.uischema.options?.inputProps as Record<string, unknown>) ??
{}),
// Prevent browsers from autofilling stored login passwords into
// service-token fields. 'new-password' is respected even when
// 'off' is ignored (Chrome ≥ 34).
autoComplete: 'new-password',
},
},
};
return TextControl({ ...props, uischema });
}
const PasswordRenderer = withJsonFormsControlProps(PasswordControl);
const passwordEntry = {
tester: rankWith(3, and(isStringControl, formatIs('password'))),
renderer: PasswordRenderer,
};
/**
* Renderer for `const` properties (e.g. Pydantic discriminator fields).
* Renders nothing visually but ensures the const value is set in form data,
* so discriminated unions resolve correctly on the backend.
*/
function ConstControl({ data, handleChange, path, schema }: ControlProps) {
const constValue = (schema as Record<string, unknown>).const;
useEffect(() => {
if (constValue !== undefined && data !== constValue) {
handleChange(path, constValue);
}
}, [constValue, data, handleChange, path]);
return null;
}
const ConstRenderer = withJsonFormsControlProps(ConstControl);
const constEntry = {
tester: rankWith(
10,
schemaMatches(
s =>
s !== undefined &&
'const' in s &&
!(s as Record<string, unknown>).readOnly,
),
),
renderer: ConstRenderer,
};
/**
* Renderer for read-only fields (e.g. a fixed database that the admin locked).
* Renders a disabled input showing the current value. Also ensures the default
* value is injected into form data (like ConstControl does for hidden fields).
*/
function ReadOnlyControl({
data,
handleChange,
path,
schema,
...rest
}: ControlProps) {
const defaultValue =
(schema as Record<string, unknown>).const ??
(schema as Record<string, unknown>).default;
useEffect(() => {
if (defaultValue !== undefined && data !== defaultValue) {
handleChange(path, defaultValue);
}
}, [defaultValue, data, handleChange, path]);
return TextControl({
...rest,
data,
handleChange,
path,
schema,
enabled: false,
});
}
const ReadOnlyRenderer = withJsonFormsControlProps(ReadOnlyControl);
const readOnlyEntry = {
tester: rankWith(
11,
schemaMatches(
s => s !== undefined && (s as Record<string, unknown>).readOnly === true,
),
),
renderer: ReadOnlyRenderer,
};
/**
* Checks whether all dependency values are filled (non-empty).
* Handles nested objects (like auth) by checking they have at least one key.
*
* Fields that have a `default` in the schema are considered satisfied even
* when the user has not explicitly touched them yet — JsonForms does not
* write default values into `data` until a field is interacted with, so
* without this fallback a field like `admin_host` (which ships with a
* sensible default) would permanently block the refresh.
*/
export function areDependenciesSatisfied(
dependencies: string[],
data: Record<string, unknown>,
schema?: JsonSchema,
): boolean {
return dependencies.every(dep => {
const value = data[dep];
if (value !== null && value !== undefined && value !== '') {
if (typeof value === 'object' && Object.keys(value).length === 0)
return false;
return true;
}
// Fall back to the schema default when the field hasn't been touched yet.
const defaultValue = schema?.properties?.[dep]?.default;
return (
defaultValue !== null && defaultValue !== undefined && defaultValue !== ''
);
});
}
/**
* Renderer for fields marked `x-dynamic` in the JSON Schema.
* Shows a loading spinner inside the input while the schema is being
* refreshed with dynamic values from the backend.
*/
function DynamicFieldControl(props: ControlProps) {
const { refreshingSchema, formData: cfgData } = props.config ?? {};
const deps = (props.schema as Record<string, unknown>)?.['x-dependsOn'];
const refreshing =
refreshingSchema &&
Array.isArray(deps) &&
areDependenciesSatisfied(
deps as string[],
(cfgData as Record<string, unknown>) ?? {},
props.rootSchema,
);
if (!refreshing) {
return TextControl(props);
}
const uischema = {
...props.uischema,
options: {
...props.uischema.options,
placeholderText: t('Loading...'),
inputProps: { suffix: <Spin size="small" /> },
},
};
return TextControl({ ...props, uischema, enabled: false });
}
const DynamicFieldRenderer = withJsonFormsControlProps(DynamicFieldControl);
const dynamicFieldEntry = {
tester: rankWith(
3,
and(
isStringControl,
schemaMatches(
s => (s as Record<string, unknown>)?.['x-dynamic'] === true,
),
),
),
renderer: DynamicFieldRenderer,
};
/**
* Renderer for fields that carry an ``x-enumNames`` array alongside their
* ``enum`` values. Renders as an Antd Select showing human-readable labels
* (from ``x-enumNames``) while storing the underlying enum values in form
* data. Used for MetricFlow's integer-ID fields (account, project,
* environment) where the backend provides both IDs and display names.
*/
function EnumNamesControl(props: ControlProps) {
const { refreshingSchema } = props.config ?? {};
const schema = props.schema as Record<string, unknown>;
const enumValues = (schema.enum as unknown[]) ?? [];
const enumNames =
(schema['x-enumNames'] as string[]) ?? enumValues.map(String);
const options = enumValues.map((value, index) => ({
value,
label: enumNames[index] ?? String(value),
}));
const tooltip = (props.uischema?.options as Record<string, unknown>)
?.tooltip as string | undefined;
return (
<Form.Item label={props.label} tooltip={tooltip}>
<Select
value={props.data ?? null}
onChange={value => props.handleChange(props.path, value)}
options={options}
style={{ width: '100%' }}
disabled={!props.enabled}
allowClear
loading={!!refreshingSchema}
placeholder={
(props.uischema?.options as Record<string, unknown>)
?.placeholderText as string | undefined
}
/>
</Form.Item>
);
}
const EnumNamesRenderer = withJsonFormsControlProps(EnumNamesControl);
const enumNamesEntry = {
// Rank 5: higher than the default string renderer (23) so this fires
// whenever x-enumNames is present, regardless of the underlying type.
tester: rankWith(
5,
schemaMatches(s => {
const names = (s as Record<string, unknown>)['x-enumNames'];
return Array.isArray(names) && (names as unknown[]).length > 0;
}),
),
renderer: EnumNamesRenderer,
};
export const renderers = [
...rendererRegistryEntries,
passwordEntry,
constEntry,
readOnlyEntry,
enumNamesEntry,
dynamicFieldEntry,
];
/**
* Removes empty `enum` arrays from schema properties. The JSON Schema spec
* requires `enum` to have at least one item, and AJV rejects empty arrays.
* Fields with empty enums are rendered as plain text inputs instead.
*/
export function sanitizeSchema(schema: JsonSchema): JsonSchema {
if (!schema.properties) return schema;
const properties: Record<string, JsonSchema> = {};
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'enum' in prop &&
Array.isArray(prop.enum) &&
prop.enum.length === 0
) {
const { enum: _empty, ...rest } = prop;
properties[key] = rest;
} else {
properties[key] = prop as JsonSchema;
}
}
return { ...schema, properties } as JsonSchema;
}
/**
* Builds a JSON Forms UI schema from a JSON Schema, using the first
* `examples` entry as placeholder text for each string property.
*/
export function buildUiSchema(schema: JsonSchema): UISchemaElement | undefined {
if (!schema.properties) return undefined;
// Use explicit property order from backend if available,
// otherwise fall back to the JSON object key order
const propertyOrder: string[] =
((schema as Record<string, unknown>)['x-propertyOrder'] as string[]) ??
Object.keys(schema.properties);
const elements = propertyOrder
.filter(key => key in (schema.properties ?? {}))
.map(key => {
const prop = schema.properties![key];
const control: Record<string, unknown> = {
type: 'Control',
scope: `#/properties/${key}`,
};
if (typeof prop === 'object' && prop !== null) {
const options: Record<string, unknown> = {};
if (
'examples' in prop &&
Array.isArray(prop.examples) &&
prop.examples.length > 0
) {
options.placeholderText = String(prop.examples[0]);
}
if ('description' in prop && typeof prop.description === 'string') {
options.tooltip = prop.description;
}
if (Object.keys(options).length > 0) {
control.options = options;
}
}
return control;
});
return { type: 'VerticalLayout', elements } as UISchemaElement;
}
/**
* Extracts dynamic field dependency mappings from the schema.
* Returns a map of field name -> list of dependency field names.
*/
export function getDynamicDependencies(
schema: JsonSchema,
): Record<string, string[]> {
const deps: Record<string, string[]> = {};
if (!schema.properties) return deps;
for (const [key, prop] of Object.entries(schema.properties)) {
if (
typeof prop === 'object' &&
prop !== null &&
'x-dynamic' in prop &&
'x-dependsOn' in prop &&
Array.isArray((prop as Record<string, unknown>)['x-dependsOn'])
) {
deps[key] = (prop as Record<string, unknown>)['x-dependsOn'] as string[];
}
}
return deps;
}
/**
* Serializes the dependency values for a set of fields into a stable string
* for comparison, so we only re-fetch when dependency values actually change.
*/
export function serializeDependencyValues(
dynamicDeps: Record<string, string[]>,
data: Record<string, unknown>,
): string {
const allDepKeys = new Set<string>();
for (const deps of Object.values(dynamicDeps)) {
for (const dep of deps) {
allDepKeys.add(dep);
}
}
const snapshot: Record<string, unknown> = {};
for (const key of [...allDepKeys].sort()) {
snapshot[key] = data[key];
}
return JSON.stringify(snapshot);
}

View File

@@ -0,0 +1,65 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { isFeatureEnabled, FeatureFlag } from '@superset-ui/core';
import { t } from '@apache-superset/core/translation';
const SEMANTIC_LAYERS_FLAG = 'SEMANTIC_LAYERS' as FeatureFlag;
/**
* When the SEMANTIC_LAYERS feature flag is enabled the UI broadens
* "dataset" → "datasource" and "database" → "data connection" so
* that semantic views and semantic layers feel like first-class
* citizens alongside traditional datasets and database connections.
*/
function sl<T>(legacy: T, semantic: T): T {
return isFeatureEnabled(SEMANTIC_LAYERS_FLAG) ? semantic : legacy;
}
// ---------------------------------------------------------------------------
// "dataset" family
// ---------------------------------------------------------------------------
/** Capitalized singular: "Dataset" / "Datasource" */
export const datasetLabel = () => sl(t('Dataset'), t('Datasource'));
/** Lower-case singular: "dataset" / "datasource" */
export const datasetLabelLower = () => sl(t('dataset'), t('datasource'));
/** Capitalized plural: "Datasets" / "Datasources" */
export const datasetsLabel = () => sl(t('Datasets'), t('Datasources'));
/** Lower-case plural: "datasets" / "datasources" */
export const datasetsLabelLower = () => sl(t('datasets'), t('datasources'));
// ---------------------------------------------------------------------------
// "database" family
// ---------------------------------------------------------------------------
/** Capitalized singular: "Database" / "Data connection" */
export const databaseLabel = () => sl(t('Database'), t('Data connection'));
/** Lower-case singular: "database" / "data connection" */
export const databaseLabelLower = () => sl(t('database'), t('data connection'));
/** Capitalized plural: "Databases" / "Data connections" */
export const databasesLabel = () => sl(t('Databases'), t('Data connections'));
/** Lower-case plural: "databases" / "data connections" */
export const databasesLabelLower = () =>
sl(t('databases'), t('data connections'));

View File

@@ -0,0 +1,264 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
render,
screen,
waitFor,
userEvent,
} from 'spec/helpers/testing-library';
import { SupersetClient } from '@superset-ui/core';
import AddSemanticViewModal from './AddSemanticViewModal';
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
get: jest.fn(),
post: jest.fn(),
},
}));
const mockedGet = SupersetClient.get as jest.Mock;
const mockedPost = SupersetClient.post as jest.Mock;
const createProps = () => ({
show: true,
onHide: jest.fn(),
onSuccess: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
});
const selectOption = async (name: string, optionLabel: string) => {
const select = await screen.findByRole('combobox', { name });
await userEvent.click(select);
await userEvent.click(await screen.findByText(optionLabel));
};
beforeEach(() => {
mockedGet.mockReset();
mockedPost.mockReset();
});
test('loads layers on open and adds selected semantic views', async () => {
mockedGet.mockResolvedValue({
json: {
result: [{ uuid: 'layer-1', name: 'Snowflake SL' }],
},
});
mockedPost.mockImplementation(({ endpoint }: { endpoint: string }) => {
if (endpoint === '/api/v1/semantic_layer/layer-1/schema/runtime') {
return Promise.resolve({ json: { result: { properties: {} } } });
}
if (endpoint === '/api/v1/semantic_layer/layer-1/views') {
return Promise.resolve({
json: {
result: [
{ name: 'orders', already_added: false },
{ name: 'customers', already_added: true },
],
},
});
}
if (endpoint === '/api/v1/semantic_view/') {
return Promise.resolve({
json: {
result: {
created: [{ uuid: 'view-1', name: 'orders' }],
},
},
});
}
return Promise.reject(new Error(`Unexpected endpoint: ${endpoint}`));
});
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/',
});
});
await selectOption('Semantic layer', 'Snowflake SL');
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/layer-1/schema/runtime',
jsonPayload: {},
});
});
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_layer/layer-1/views',
jsonPayload: { runtime_data: {} },
});
});
await selectOption('Semantic views', 'orders');
await userEvent.click(
screen.getByRole('button', { name: /add 1 view\(s\)/i }),
);
await waitFor(() => {
expect(mockedPost).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_view/',
jsonPayload: {
views: [
{
name: 'orders',
semantic_layer_uuid: 'layer-1',
configuration: {},
},
],
},
});
});
expect(props.addSuccessToast).toHaveBeenCalledWith(
'1 semantic view(s) added',
);
expect(props.onSuccess).toHaveBeenCalled();
expect(props.onHide).toHaveBeenCalled();
});
test('shows partial success feedback when only some semantic views are created', async () => {
mockedGet.mockResolvedValue({
json: {
result: [{ uuid: 'layer-1', name: 'Snowflake SL' }],
},
});
mockedPost.mockImplementation(({ endpoint }: { endpoint: string }) => {
if (endpoint === '/api/v1/semantic_layer/layer-1/schema/runtime') {
return Promise.resolve({ json: { result: { properties: {} } } });
}
if (endpoint === '/api/v1/semantic_layer/layer-1/views') {
return Promise.resolve({
json: {
result: [
{ name: 'orders', already_added: false },
{ name: 'customers', already_added: false },
],
},
});
}
if (endpoint === '/api/v1/semantic_view/') {
return Promise.resolve({
json: {
result: {
created: [{ uuid: 'view-1', name: 'orders' }],
errors: [{ name: 'customers', error: 'create failed' }],
},
},
});
}
return Promise.reject(new Error(`Unexpected endpoint: ${endpoint}`));
});
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await selectOption('Semantic layer', 'Snowflake SL');
await waitFor(() => {
expect(
screen.getByRole('combobox', { name: 'Semantic views' }),
).toBeInTheDocument();
});
await selectOption('Semantic views', 'orders');
await selectOption('Semantic views', 'customers');
await userEvent.click(
screen.getByRole('button', { name: /add 2 view\(s\)/i }),
);
await waitFor(() => {
expect(props.addSuccessToast).toHaveBeenCalledWith(
'1 semantic view(s) added',
);
expect(props.addDangerToast).toHaveBeenCalledWith(
'1 semantic view(s) failed to add: customers',
);
});
expect(props.onSuccess).not.toHaveBeenCalled();
expect(props.onHide).not.toHaveBeenCalled();
});
test('shows toast when loading semantic layers fails', async () => {
mockedGet.mockRejectedValue(new Error('boom'));
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await waitFor(() => {
expect(props.addDangerToast).toHaveBeenCalledWith(
'An error occurred while fetching semantic layers',
);
});
});
test('shows toast when add semantic views fails', async () => {
mockedGet.mockResolvedValue({
json: {
result: [{ uuid: 'layer-1', name: 'Snowflake SL' }],
},
});
mockedPost.mockImplementation(({ endpoint }: { endpoint: string }) => {
if (endpoint === '/api/v1/semantic_layer/layer-1/schema/runtime') {
return Promise.resolve({ json: { result: { properties: {} } } });
}
if (endpoint === '/api/v1/semantic_layer/layer-1/views') {
return Promise.resolve({
json: {
result: [{ name: 'orders', already_added: false }],
},
});
}
if (endpoint === '/api/v1/semantic_view/') {
return Promise.reject(new Error('save failed'));
}
return Promise.reject(new Error(`Unexpected endpoint: ${endpoint}`));
});
const props = createProps();
render(<AddSemanticViewModal {...props} />);
await selectOption('Semantic layer', 'Snowflake SL');
await waitFor(() => {
expect(
screen.getByRole('combobox', { name: 'Semantic views' }),
).toBeInTheDocument();
});
await selectOption('Semantic views', 'orders');
await userEvent.click(
screen.getByRole('button', { name: /add 1 view\(s\)/i }),
);
await waitFor(() => {
expect(props.addDangerToast).toHaveBeenCalledWith(
'An error occurred while adding semantic views',
);
});
});

View File

@@ -0,0 +1,541 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useState, useEffect, useCallback, useRef } from 'react';
import { t } from '@apache-superset/core/translation';
import { styled } from '@apache-superset/core/theme';
import { SupersetClient } from '@superset-ui/core';
import { Spin } from 'antd';
import { Select } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import { JsonForms } from '@jsonforms/react';
import type { JsonSchema, UISchemaElement } from '@jsonforms/core';
import { cellRegistryEntries } from '@great-expectations/jsonforms-antd-renderers';
import type { ErrorObject } from 'ajv';
import {
StandardModal,
ModalFormField,
MODAL_STANDARD_WIDTH,
} from 'src/components/Modal';
import {
renderers,
sanitizeSchema,
buildUiSchema,
getDynamicDependencies,
areDependenciesSatisfied,
serializeDependencyValues,
SCHEMA_REFRESH_DEBOUNCE_MS,
} from 'src/features/semanticLayers/jsonFormsHelpers';
interface SemanticLayerOption {
uuid: string;
name: string;
}
interface AvailableView {
name: string;
already_added: boolean;
}
const ModalContent = styled.div`
padding: ${({ theme }) => theme.sizeUnit * 4}px;
`;
const LoadingContainer = styled.div`
display: flex;
justify-content: center;
padding: ${({ theme }) => theme.sizeUnit * 4}px;
`;
const SectionLabel = styled.div`
color: ${({ theme }) => theme.colorText};
font-size: ${({ theme }) => theme.fontSize}px;
margin-top: ${({ theme }) => theme.sizeUnit}px;
margin-bottom: ${({ theme }) => theme.sizeUnit * 2}px;
`;
const VerticalFormFields = styled.div`
margin-bottom: ${({ theme }) => theme.sizeUnit * 4}px;
/* The antd renderer's VerticalLayout creates its own <Form> —
force flex-column so gap controls spacing between fields */
&& form {
display: flex;
flex-direction: column;
gap: ${({ theme }) => theme.sizeUnit * 4}px;
}
/* Reset antd default margins so gap controls all spacing */
&& .ant-form-item {
margin-bottom: 0;
}
/* Override ant-form-item-horizontal: stack label above control */
&& .ant-form-item-row {
flex-direction: column;
align-items: stretch;
}
&& .ant-form-item-label {
text-align: left;
max-width: 100%;
flex: none;
padding-bottom: ${({ theme }) => theme.sizeUnit}px;
}
&& .ant-form-item-control {
max-width: 100%;
flex: auto;
}
&& .ant-form-item-label > label {
color: ${({ theme }) => theme.colorText};
font-size: ${({ theme }) => theme.fontSize}px;
}
`;
interface AddSemanticViewModalProps {
show: boolean;
onHide: () => void;
onSuccess: () => void;
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
}
export default function AddSemanticViewModal({
show,
onHide,
onSuccess,
addDangerToast,
addSuccessToast,
}: AddSemanticViewModalProps) {
// --- Layer ---
const [layers, setLayers] = useState<SemanticLayerOption[]>([]);
const [selectedLayerUuid, setSelectedLayerUuid] = useState<string | null>(
null,
);
const [loadingLayers, setLoadingLayers] = useState(false);
// --- Runtime config ---
const [runtimeSchema, setRuntimeSchema] = useState<JsonSchema | null>(null);
const [runtimeUiSchema, setRuntimeUiSchema] = useState<
UISchemaElement | undefined
>();
const [runtimeData, setRuntimeData] = useState<Record<string, unknown>>({});
const [loadingRuntime, setLoadingRuntime] = useState(false);
const [refreshingSchema, setRefreshingSchema] = useState(false);
const errorsRef = useRef<ErrorObject[]>([]);
const dynamicDepsRef = useRef<Record<string, string[]>>({});
const lastDepSnapshotRef = useRef('');
const schemaTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
// --- Views ---
const [availableViews, setAvailableViews] = useState<AvailableView[]>([]);
const [selectedViewNames, setSelectedViewNames] = useState<string[]>([]);
const [loadingViews, setLoadingViews] = useState(false);
const viewsTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const lastViewsKeyRef = useRef('');
// --- Misc ---
const [saving, setSaving] = useState(false);
const fetchGenRef = useRef(0);
// =========================================================================
// Fetch helpers
// =========================================================================
const fetchLayers = async () => {
setLoadingLayers(true);
try {
const { json } = await SupersetClient.get({
endpoint: '/api/v1/semantic_layer/',
});
setLayers(
(json.result ?? []).map((l: { uuid: string; name: string }) => ({
uuid: l.uuid,
name: l.name,
})),
);
} catch {
addDangerToast(t('An error occurred while fetching semantic layers'));
} finally {
setLoadingLayers(false);
}
};
const fetchViews = useCallback(
async (uuid: string, rData: Record<string, unknown>, gen: number) => {
setLoadingViews(true);
setAvailableViews([]);
setSelectedViewNames([]);
try {
const { json } = await SupersetClient.post({
endpoint: `/api/v1/semantic_layer/${uuid}/views`,
jsonPayload: { runtime_data: rData },
});
if (gen !== fetchGenRef.current) return;
setAvailableViews(json.result ?? []);
} catch {
if (gen !== fetchGenRef.current) return;
addDangerToast(t('An error occurred while fetching available views'));
} finally {
if (gen === fetchGenRef.current) setLoadingViews(false);
}
},
[addDangerToast],
);
const applyRuntimeSchema = useCallback((rawSchema: JsonSchema) => {
const schema = sanitizeSchema(rawSchema);
setRuntimeSchema(schema);
setRuntimeUiSchema(buildUiSchema(schema));
dynamicDepsRef.current = getDynamicDependencies(rawSchema);
}, []);
const scheduleFetchViews = useCallback(
(uuid: string, data: Record<string, unknown>) => {
const key = JSON.stringify(data);
if (key === lastViewsKeyRef.current) return;
lastViewsKeyRef.current = key;
if (viewsTimerRef.current) clearTimeout(viewsTimerRef.current);
viewsTimerRef.current = setTimeout(() => {
fetchViews(uuid, data, fetchGenRef.current);
}, SCHEMA_REFRESH_DEBOUNCE_MS);
},
[fetchViews],
);
// =========================================================================
// Layer change — fetch runtime schema, clear downstream state
// =========================================================================
const handleLayerChange = useCallback(
async (uuid: string) => {
fetchGenRef.current += 1;
const gen = fetchGenRef.current;
setSelectedLayerUuid(uuid);
if (schemaTimerRef.current) clearTimeout(schemaTimerRef.current);
if (viewsTimerRef.current) clearTimeout(viewsTimerRef.current);
setRuntimeSchema(null);
setRuntimeUiSchema(undefined);
setRuntimeData({});
errorsRef.current = [];
dynamicDepsRef.current = {};
lastDepSnapshotRef.current = '';
setAvailableViews([]);
setSelectedViewNames([]);
lastViewsKeyRef.current = '';
setLoadingRuntime(true);
try {
const { json } = await SupersetClient.post({
endpoint: `/api/v1/semantic_layer/${uuid}/schema/runtime`,
jsonPayload: {},
});
if (gen !== fetchGenRef.current) return;
const schema = json.result;
if (
!schema?.properties ||
Object.keys(schema.properties).length === 0
) {
// No runtime config needed — fetch views right away
fetchViews(uuid, {}, gen);
} else {
applyRuntimeSchema(schema);
}
} catch {
if (gen !== fetchGenRef.current) return;
addDangerToast(
t('An error occurred while fetching the runtime schema'),
);
} finally {
if (gen === fetchGenRef.current) setLoadingRuntime(false);
}
},
[applyRuntimeSchema, fetchViews, addDangerToast],
);
// =========================================================================
// Runtime form change — refresh dynamic fields or auto-fetch views
// =========================================================================
const handleRuntimeFormChange = useCallback(
({
data,
errors,
}: {
data: Record<string, unknown>;
errors?: ErrorObject[];
}) => {
setRuntimeData(data);
errorsRef.current = errors ?? [];
if (!selectedLayerUuid) return;
const gen = fetchGenRef.current;
// Dynamic deps changed → refresh schema (e.g. database → schema)
const dynamicDeps = dynamicDepsRef.current;
if (Object.keys(dynamicDeps).length > 0) {
const hasSatisfiedDeps = Object.values(dynamicDeps).some(deps =>
areDependenciesSatisfied(deps, data),
);
if (hasSatisfiedDeps) {
const snapshot = serializeDependencyValues(dynamicDeps, data);
if (snapshot !== lastDepSnapshotRef.current) {
lastDepSnapshotRef.current = snapshot;
// Config is changing — clear views
setAvailableViews([]);
setSelectedViewNames([]);
lastViewsKeyRef.current = '';
if (schemaTimerRef.current) clearTimeout(schemaTimerRef.current);
const uuid = selectedLayerUuid;
schemaTimerRef.current = setTimeout(async () => {
setRefreshingSchema(true);
try {
const { json } = await SupersetClient.post({
endpoint: `/api/v1/semantic_layer/${uuid}/schema/runtime`,
jsonPayload: { runtime_data: data },
});
if (gen !== fetchGenRef.current) return;
applyRuntimeSchema(json.result);
} catch {
// Silent fail on refresh — form still works
} finally {
if (gen === fetchGenRef.current) setRefreshingSchema(false);
}
}, SCHEMA_REFRESH_DEBOUNCE_MS);
return;
}
}
}
// No schema refresh needed — fetch views if form is valid
if (errorsRef.current.length === 0) {
scheduleFetchViews(selectedLayerUuid, data);
}
},
[selectedLayerUuid, applyRuntimeSchema, scheduleFetchViews],
);
// After a schema refresh settles, JSON Forms re-validates and fires
// onChange → handleRuntimeFormChange handles view fetching. As a fallback
// (in case onChange doesn't fire), try once refreshingSchema flips false.
const prevRefreshingRef = useRef(false);
useEffect(() => {
if (prevRefreshingRef.current && !refreshingSchema && selectedLayerUuid) {
const timer = setTimeout(() => {
if (errorsRef.current.length === 0) {
scheduleFetchViews(selectedLayerUuid, runtimeData);
}
}, 100);
prevRefreshingRef.current = false;
return () => clearTimeout(timer);
}
prevRefreshingRef.current = refreshingSchema;
return undefined;
}, [refreshingSchema, selectedLayerUuid, runtimeData, scheduleFetchViews]);
// =========================================================================
// Modal open / close
// =========================================================================
useEffect(() => {
if (show) {
fetchLayers();
} else {
fetchGenRef.current += 1;
if (schemaTimerRef.current) clearTimeout(schemaTimerRef.current);
if (viewsTimerRef.current) clearTimeout(viewsTimerRef.current);
setLayers([]);
setSelectedLayerUuid(null);
setLoadingLayers(false);
setRuntimeSchema(null);
setRuntimeUiSchema(undefined);
setRuntimeData({});
setLoadingRuntime(false);
setRefreshingSchema(false);
errorsRef.current = [];
dynamicDepsRef.current = {};
lastDepSnapshotRef.current = '';
setAvailableViews([]);
setSelectedViewNames([]);
setLoadingViews(false);
setSaving(false);
lastViewsKeyRef.current = '';
}
}, [show]); // eslint-disable-line react-hooks/exhaustive-deps
// =========================================================================
// Save
// =========================================================================
const newViewCount = availableViews.filter(
v => selectedViewNames.includes(v.name) && !v.already_added,
).length;
const handleSave = async () => {
if (!selectedLayerUuid || newViewCount === 0) return;
setSaving(true);
try {
const viewsToCreate = availableViews
.filter(v => selectedViewNames.includes(v.name) && !v.already_added)
.map(v => ({
name: v.name,
semantic_layer_uuid: selectedLayerUuid,
configuration: runtimeData,
}));
const { json } = await SupersetClient.post({
endpoint: '/api/v1/semantic_view/',
jsonPayload: { views: viewsToCreate },
});
const created = Array.isArray(json?.result?.created)
? json.result.created
: [];
const errors = Array.isArray(json?.result?.errors)
? json.result.errors
: [];
if (created.length > 0) {
addSuccessToast(t('%s semantic view(s) added', created.length));
}
if (errors.length > 0) {
const failedNames = errors
.map((error: { name?: string }) => error?.name)
.filter((name: string | undefined): name is string => !!name);
addDangerToast(
failedNames.length > 0
? t(
'%s semantic view(s) failed to add: %s',
errors.length,
failedNames.join(', '),
)
: t('%s semantic view(s) failed to add', errors.length),
);
}
if (created.length > 0 && errors.length === 0) {
onSuccess();
onHide();
} else if (errors.length === 0) {
addDangerToast(t('An error occurred while adding semantic views'));
}
} catch {
addDangerToast(t('An error occurred while adding semantic views'));
} finally {
setSaving(false);
}
};
// =========================================================================
// Render
// =========================================================================
const hasRuntimeFields =
runtimeSchema?.properties &&
Object.keys(runtimeSchema.properties).length > 0;
const viewsDisabled =
loadingViews || (!loadingViews && availableViews.length === 0);
return (
<StandardModal
show={show}
onHide={onHide}
onSave={handleSave}
title={t('Add Semantic View')}
icon={<Icons.PlusOutlined />}
width={MODAL_STANDARD_WIDTH}
saveDisabled={newViewCount === 0 || saving}
saveText={newViewCount > 0 ? t('Add %s view(s)', newViewCount) : t('Add')}
saveLoading={saving}
>
<ModalContent>
{/* Semantic Layer */}
<ModalFormField label={t('Semantic Layer')}>
<Select
ariaLabel={t('Semantic layer')}
placeholder={t('Select a semantic layer')}
loading={loadingLayers}
value={selectedLayerUuid}
onChange={value => handleLayerChange(value as string)}
options={layers.map(l => ({
value: l.uuid,
label: l.name,
}))}
getPopupContainer={() => document.body}
/>
</ModalFormField>
{/* Loading runtime schema */}
{loadingRuntime && (
<LoadingContainer>
<Spin size="small" />
</LoadingContainer>
)}
{/* Source location (runtime config fields) */}
{hasRuntimeFields && !loadingRuntime && (
<>
<SectionLabel>{t('Source location')}</SectionLabel>
<VerticalFormFields>
<JsonForms
schema={runtimeSchema!}
uischema={runtimeUiSchema}
data={runtimeData}
renderers={renderers}
cells={cellRegistryEntries}
config={{ refreshingSchema, formData: runtimeData }}
validationMode="ValidateAndHide"
onChange={handleRuntimeFormChange}
/>
</VerticalFormFields>
</>
)}
{/* Semantic Views — always visible once a layer is selected */}
{selectedLayerUuid && !loadingRuntime && (
<ModalFormField label={t('Semantic Views')}>
<Select
ariaLabel={t('Semantic views')}
placeholder={t('Select semantic views')}
mode="multiple"
loading={loadingViews}
disabled={viewsDisabled}
value={selectedViewNames}
onChange={values => setSelectedViewNames(values as string[])}
options={availableViews
.sort((a, b) => a.name.localeCompare(b.name))
.map(v => ({
value: v.name,
label: v.already_added
? `${v.name} (${t('already added')})`
: v.name,
disabled: v.already_added,
}))}
getPopupContainer={() => document.body}
/>
</ModalFormField>
)}
</ModalContent>
</StandardModal>
);
}

View File

@@ -0,0 +1,267 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import userEvent from '@testing-library/user-event';
import { render, screen, waitFor } from 'spec/helpers/testing-library';
import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import SemanticViewEditModal from './SemanticViewEditModal';
jest.mock('@superset-ui/core', () => ({
...jest.requireActual('@superset-ui/core'),
SupersetClient: {
...jest.requireActual('@superset-ui/core').SupersetClient,
put: jest.fn(),
get: jest.fn(),
},
getClientErrorObject: jest.fn(() => Promise.resolve({ error: '' })),
}));
const mockedPut = SupersetClient.put as jest.Mock;
const mockedGet = SupersetClient.get as jest.Mock;
const mockedGetClientErrorObject = getClientErrorObject as jest.Mock;
const MOCK_STRUCTURE = {
result: {
dimensions: [
{
name: 'order_date',
type: 'timestamp[us]',
definition: 'ordered_at',
description: 'Date of the order',
grain: 'Day',
},
{
name: 'customer_id',
type: 'int64',
definition: null,
description: null,
grain: null,
},
],
metrics: [
{
name: 'orders',
type: 'double',
definition: 'SIMPLE',
description: 'Order count',
},
],
},
};
const createProps = () => ({
show: true,
onHide: jest.fn(),
onSave: jest.fn(),
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
semanticView: {
id: 7,
table_name: 'orders_semantic_view',
description: 'old description',
cache_timeout: 60,
},
});
beforeEach(() => {
mockedPut.mockReset();
mockedGet.mockReset();
mockedGetClientErrorObject.mockReset();
mockedGetClientErrorObject.mockResolvedValue({ error: '' });
mockedGet.mockResolvedValue({ json: MOCK_STRUCTURE });
});
test('saves semantic view and refreshes list', async () => {
mockedPut.mockResolvedValue({});
const props = createProps();
render(<SemanticViewEditModal {...props} />);
// Wait for structure fetch to complete so save button is enabled
await waitFor(() => {
expect(mockedGet).toHaveBeenCalled();
});
// Wait for the tab content to render (structure loaded)
await waitFor(() => {
expect(screen.getByRole('tab', { name: /details/i })).toBeInTheDocument();
});
await userEvent.click(screen.getByRole('button', { name: /save/i }));
await waitFor(() => {
expect(mockedPut).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_view/7',
jsonPayload: {
description: 'old description',
cache_timeout: 60,
},
});
});
expect(props.addSuccessToast).toHaveBeenCalledWith('Semantic view updated');
expect(props.onSave).toHaveBeenCalled();
expect(props.onHide).toHaveBeenCalled();
});
test('shows backend error toast when save fails', async () => {
mockedPut.mockRejectedValue(new Error('save failed'));
mockedGetClientErrorObject.mockResolvedValue({
error: 'Semantic view failed to save',
});
const props = createProps();
render(<SemanticViewEditModal {...props} />);
// Wait for structure fetch to complete so save button is enabled
await waitFor(() => {
expect(screen.getByRole('tab', { name: /details/i })).toBeInTheDocument();
});
// Reset the mock so we only catch the save error, not the structure fetch
mockedGetClientErrorObject.mockResolvedValue({
error: 'Semantic view failed to save',
});
await userEvent.click(screen.getByRole('button', { name: /save/i }));
await waitFor(() => {
expect(props.addDangerToast).toHaveBeenCalledWith(
'Semantic view failed to save',
);
});
});
test('fetches structure on mount', async () => {
const props = createProps();
render(<SemanticViewEditModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_view/7/structure',
});
});
});
test('fetches and displays dimensions tab', async () => {
const props = createProps();
render(<SemanticViewEditModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalled();
});
const dimensionsTab = screen.getByRole('tab', { name: /dimensions/i });
expect(dimensionsTab).toBeInTheDocument();
expect(dimensionsTab).toHaveTextContent('2');
await userEvent.click(dimensionsTab);
await waitFor(() => {
expect(screen.getByText('order_date')).toBeInTheDocument();
});
expect(screen.getByText('customer_id')).toBeInTheDocument();
expect(screen.getByText('timestamp[us]')).toBeInTheDocument();
});
test('fetches and displays metrics tab', async () => {
const props = createProps();
render(<SemanticViewEditModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalled();
});
const metricsTab = screen.getByRole('tab', { name: /metrics/i });
expect(metricsTab).toBeInTheDocument();
expect(metricsTab).toHaveTextContent('1');
await userEvent.click(metricsTab);
await waitFor(() => {
expect(screen.getByText('orders')).toBeInTheDocument();
});
expect(screen.getByText('SIMPLE')).toBeInTheDocument();
expect(screen.getByText('Order count')).toBeInTheDocument();
});
test('shows info alert in structure tabs', async () => {
const props = createProps();
render(<SemanticViewEditModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalled();
});
await userEvent.click(screen.getByRole('tab', { name: /dimensions/i }));
await waitFor(() => {
expect(
screen.getByText(
'Structure is managed by the upstream semantic layer and is read-only.',
),
).toBeInTheDocument();
});
});
test('handles structure fetch error', async () => {
mockedGet.mockRejectedValue(new Error('fetch failed'));
mockedGetClientErrorObject.mockResolvedValue({
error: 'Failed to load structure',
});
const props = createProps();
render(<SemanticViewEditModal {...props} />);
await waitFor(() => {
expect(props.addDangerToast).toHaveBeenCalledWith(
'Failed to load structure',
);
});
});
test('details tab save still works after viewing structure tabs', async () => {
mockedPut.mockResolvedValue({});
const props = createProps();
render(<SemanticViewEditModal {...props} />);
await waitFor(() => {
expect(mockedGet).toHaveBeenCalled();
});
// Navigate to dimensions tab and back to details
await userEvent.click(screen.getByRole('tab', { name: /dimensions/i }));
await userEvent.click(screen.getByRole('tab', { name: /details/i }));
await userEvent.click(screen.getByRole('button', { name: /save/i }));
await waitFor(() => {
expect(mockedPut).toHaveBeenCalledWith({
endpoint: '/api/v1/semantic_view/7',
jsonPayload: {
description: 'old description',
cache_timeout: 60,
},
});
});
expect(props.addSuccessToast).toHaveBeenCalledWith('Semantic view updated');
});

View File

@@ -0,0 +1,241 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useState, useEffect } from 'react';
import { t } from '@apache-superset/core/translation';
import { SupersetClient, getClientErrorObject } from '@superset-ui/core';
import { Input, InputNumber } from '@superset-ui/core/components';
import { Icons } from '@superset-ui/core/components/Icons';
import Tabs from '@superset-ui/core/components/Tabs';
import {
Table,
type ColumnsType,
TableSize,
} from '@superset-ui/core/components/Table';
import { Alert } from '@apache-superset/core/components';
import { styled } from '@apache-superset/core/theme';
import {
StandardModal,
ModalFormField,
MODAL_LARGE_WIDTH,
} from 'src/components/Modal';
const ModalContent = styled.div`
padding: ${({ theme }) => theme.sizeUnit * 4}px;
`;
type InputNumberValue = number | null;
interface SemanticDimension {
name: string;
type: string;
definition: string | null;
description: string | null;
grain: string | null;
}
interface SemanticMetric {
name: string;
type: string;
definition: string;
description: string | null;
}
interface SemanticViewStructure {
dimensions: SemanticDimension[];
metrics: SemanticMetric[];
}
interface SemanticViewEditModalProps {
show: boolean;
onHide: () => void;
onSave: () => void;
addDangerToast?: (msg: string) => void;
addSuccessToast?: (msg: string) => void;
semanticView: {
id: number;
table_name: string;
description?: string | null;
cache_timeout?: number | null;
} | null;
}
const DIMENSION_COLUMNS: ColumnsType<SemanticDimension> = [
{ title: t('Name'), dataIndex: 'name', key: 'name' },
{ title: t('Type'), dataIndex: 'type', key: 'type' },
{ title: t('Grain'), dataIndex: 'grain', key: 'grain' },
{ title: t('Description'), dataIndex: 'description', key: 'description' },
{ title: t('Expression'), dataIndex: 'definition', key: 'definition' },
];
const METRIC_COLUMNS: ColumnsType<SemanticMetric> = [
{ title: t('Name'), dataIndex: 'name', key: 'name' },
{ title: t('Type'), dataIndex: 'type', key: 'type' },
{ title: t('Description'), dataIndex: 'description', key: 'description' },
{ title: t('Definition'), dataIndex: 'definition', key: 'definition' },
];
const STRUCTURE_INFO_MESSAGE = t(
'Structure is managed by the upstream semantic layer and is read-only.',
);
export default function SemanticViewEditModal({
show,
onHide,
onSave,
addDangerToast,
addSuccessToast,
semanticView,
}: SemanticViewEditModalProps) {
const [description, setDescription] = useState<string>('');
const [cacheTimeout, setCacheTimeout] = useState<number | null>(null);
const [saving, setSaving] = useState(false);
const [structure, setStructure] = useState<SemanticViewStructure | null>(
null,
);
const [structureLoading, setStructureLoading] = useState(false);
useEffect(() => {
if (semanticView) {
setDescription(semanticView.description || '');
setCacheTimeout(semanticView.cache_timeout ?? null);
}
}, [semanticView]);
useEffect(() => {
if (show && semanticView) {
setStructureLoading(true);
SupersetClient.get({
endpoint: `/api/v1/semantic_view/${semanticView.id}/structure`,
})
.then(({ json }) => {
setStructure(json.result);
})
.catch(async error => {
const clientError = await getClientErrorObject(error);
addDangerToast?.(
clientError.error ||
t('An error occurred while fetching the semantic view structure'),
);
})
.finally(() => {
setStructureLoading(false);
});
} else {
setStructure(null);
}
}, [show, semanticView]); // eslint-disable-line react-hooks/exhaustive-deps
const handleSave = async () => {
if (!semanticView) return;
setSaving(true);
try {
await SupersetClient.put({
endpoint: `/api/v1/semantic_view/${semanticView.id}`,
jsonPayload: {
description: description || null,
cache_timeout: cacheTimeout,
},
});
addSuccessToast?.(t('Semantic view updated'));
onSave();
onHide();
} catch (error) {
const clientError = await getClientErrorObject(error);
addDangerToast?.(
clientError.error ||
t('An error occurred while saving the semantic view'),
);
} finally {
setSaving(false);
}
};
const dimensions = structure?.dimensions ?? [];
const metrics = structure?.metrics ?? [];
return (
<StandardModal
show={show}
onHide={onHide}
onSave={handleSave}
title={t('Edit %s', semanticView?.table_name || '')}
icon={<Icons.EditOutlined />}
isEditMode
width={MODAL_LARGE_WIDTH}
saveLoading={saving}
contentLoading={structureLoading}
>
<ModalContent>
<Tabs>
<Tabs.TabPane tab={t('Details')} key="details">
<ModalFormField label={t('Description')}>
<Input.TextArea
value={description}
onChange={e => setDescription(e.target.value)}
rows={4}
/>
</ModalFormField>
<ModalFormField label={t('Cache timeout')}>
<InputNumber
value={cacheTimeout}
onChange={value => setCacheTimeout(value as InputNumberValue)}
min={0}
placeholder={t('Duration in seconds')}
style={{ width: '100%' }}
/>
</ModalFormField>
</Tabs.TabPane>
<Tabs.TabPane
tab={t('Dimensions (%s)', dimensions.length)}
key="dimensions"
>
<Alert
type="info"
message={STRUCTURE_INFO_MESSAGE}
showIcon
css={{ marginBottom: 16 }}
/>
<Table<SemanticDimension>
data={dimensions}
columns={DIMENSION_COLUMNS}
size={TableSize.Small}
rowKey="name"
usePagination={false}
/>
</Tabs.TabPane>
<Tabs.TabPane tab={t('Metrics (%s)', metrics.length)} key="metrics">
<Alert
type="info"
message={STRUCTURE_INFO_MESSAGE}
showIcon
css={{ marginBottom: 16 }}
/>
<Table<SemanticMetric>
data={metrics}
columns={METRIC_COLUMNS}
size={TableSize.Small}
rowKey="name"
usePagination={false}
/>
</Tabs.TabPane>
</Tabs>
</ModalContent>
</StandardModal>
);
}

View File

@@ -44,6 +44,10 @@ import {
DatasetSelectLabel,
} from 'src/features/datasets/DatasetSelectLabel';
import { Icons } from '@superset-ui/core/components/Icons';
import {
datasetLabel,
datasetLabelLower,
} from 'src/features/semanticLayers/label';
export interface ChartCreationProps extends RouteComponentProps {
user: UserWithPermissionsAndRoles;
@@ -332,18 +336,22 @@ export class ChartCreation extends PureComponent<
<h3>{t('Create a new chart')}</h3>
<Steps direction="vertical" size="small">
<Steps.Step
title={<StyledStepTitle>{t('Choose a dataset')}</StyledStepTitle>}
title={
<StyledStepTitle>
{t('Choose a %s', datasetLabelLower())}
</StyledStepTitle>
}
status={this.state.datasource?.value ? 'finish' : 'process'}
description={
<StyledStepDescription className="dataset">
<AsyncSelect
autoFocus
ariaLabel={t('Dataset')}
ariaLabel={datasetLabel()}
name="select-datasource"
onChange={this.changeDatasource}
options={this.loadDatasources}
optionFilterProps={['id', 'table_name']}
placeholder={t('Choose a dataset')}
placeholder={t('Choose a %s', datasetLabelLower())}
showSearch
value={this.state.datasource}
/>
@@ -370,7 +378,10 @@ export class ChartCreation extends PureComponent<
<div className="footer">
{isButtonDisabled && (
<span>
{t('Please select both a Dataset and a Chart type to proceed')}
{t(
'Please select both a %s and a Chart type to proceed',
datasetLabel(),
)}
</span>
)}
<Button

View File

@@ -83,6 +83,7 @@ import { findPermission } from 'src/utils/findPermission';
import { QueryObjectColumns } from 'src/views/CRUD/types';
import { WIDER_DROPDOWN_WIDTH } from 'src/components/ListView/utils';
import { Tag } from 'src/components/Tag';
import { datasetLabel } from 'src/features/semanticLayers/label';
const FlexRowContainer = styled.div`
align-items: center;
@@ -430,7 +431,7 @@ function ChartList(props: ChartListProps) {
</Tooltip>
);
},
Header: t('Dataset'),
Header: datasetLabel(),
accessor: 'datasource_id',
disableSortBy: true,
size: 'xl',
@@ -658,7 +659,7 @@ function ChartList(props: ChartListProps) {
}),
},
{
Header: t('Dataset'),
Header: datasetLabel(),
key: 'dataset',
id: 'datasource_id',
input: 'select',

View File

@@ -17,9 +17,15 @@
* under the License.
*/
import { t } from '@apache-superset/core/translation';
import { getExtensionsRegistry, SupersetClient } from '@superset-ui/core';
import { styled } from '@apache-superset/core/theme';
import {
getExtensionsRegistry,
SupersetClient,
isFeatureEnabled,
FeatureFlag,
} from '@superset-ui/core';
import { css, styled, useTheme } from '@apache-superset/core/theme';
import { useState, useMemo, useEffect, useCallback } from 'react';
import type { CellProps } from 'react-table';
import rison from 'rison';
import { useSelector } from 'react-redux';
import { useQueryParams, BooleanParam } from 'use-query-params';
@@ -33,7 +39,9 @@ import {
import withToasts from 'src/components/MessageToasts/withToasts';
import SubMenu, { SubMenuProps } from 'src/features/home/SubMenu';
import {
Button,
DeleteModal,
Dropdown,
Tooltip,
List,
Loading,
@@ -43,6 +51,7 @@ import {
ListView,
ListViewFilterOperator as FilterOperator,
ListViewFilters,
type ListViewFetchDataConfig,
} from 'src/components';
import { Typography } from '@superset-ui/core/components/Typography';
import { getUrlParam } from 'src/utils/urlUtils';
@@ -55,10 +64,17 @@ import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes';
import type { MenuObjectProps } from 'src/types/bootstrapTypes';
import DatabaseModal from 'src/features/databases/DatabaseModal';
import UploadDataModal from 'src/features/databases/UploadDataModel';
import SemanticLayerModal from 'src/features/semanticLayers/SemanticLayerModal';
import { DatabaseObject } from 'src/features/databases/types';
import { QueryObjectColumns } from 'src/views/CRUD/types';
import { WIDER_DROPDOWN_WIDTH } from 'src/components/ListView/utils';
import { ModalTitleWithIcon } from 'src/components/ModalTitleWithIcon';
import type Owner from 'src/types/Owner';
import {
databaseLabel,
databaseLabelLower,
databasesLabel,
} from 'src/features/semanticLayers/label';
const extensionsRegistry = getExtensionsRegistry();
const DatabaseDeleteRelatedExtension = extensionsRegistry.get(
@@ -69,6 +85,14 @@ const dbConfigExtraExtension = extensionsRegistry.get(
);
const PAGE_SIZE = 25;
const SEMANTIC_LAYERS_FLAG = 'SEMANTIC_LAYERS' as FeatureFlag;
type ConnectionItem = DatabaseObject & {
source_type?: 'database' | 'semantic_layer';
sl_type?: string;
changed_by?: Owner;
changed_on_delta_humanized?: string;
};
interface DatabaseDeleteObject extends DatabaseObject {
charts: any;
@@ -108,20 +132,106 @@ function DatabaseList({
addSuccessToast,
user,
}: DatabaseListProps) {
const theme = useTheme();
const showSemanticLayers = isFeatureEnabled(SEMANTIC_LAYERS_FLAG);
// Standard database list view resource (used when SL flag is OFF)
const {
state: {
loading,
resourceCount: databaseCount,
resourceCollection: databases,
loading: dbLoading,
resourceCount: dbCount,
resourceCollection: dbCollection,
},
hasPerm,
fetchData,
refreshData,
fetchData: dbFetchData,
refreshData: dbRefreshData,
} = useListViewResource<DatabaseObject>(
'database',
t('database'),
databaseLabelLower(),
addDangerToast,
);
// Combined endpoint state (used when SL flag is ON)
const [combinedItems, setCombinedItems] = useState<ConnectionItem[]>([]);
const [combinedCount, setCombinedCount] = useState(0);
const [combinedLoading, setCombinedLoading] = useState(true);
const [lastFetchConfig, setLastFetchConfig] =
useState<ListViewFetchDataConfig | null>(null);
const combinedFetchData = useCallback(
(config: ListViewFetchDataConfig) => {
setLastFetchConfig(config);
setCombinedLoading(true);
const { pageIndex, pageSize, sortBy, filters: filterValues } = config;
const sourceTypeFilter = filterValues.find(f => f.id === 'source_type');
const otherFilters = filterValues
.filter(f => f.id !== 'source_type')
.filter(
({ value }) => value !== '' && value !== null && value !== undefined,
)
.map(({ id, operator: opr, value }) => ({
col: id,
opr,
value:
value && typeof value === 'object' && 'value' in value
? value.value
: value,
}));
const sourceTypeValue =
sourceTypeFilter?.value && typeof sourceTypeFilter.value === 'object'
? (sourceTypeFilter.value as { value: string }).value
: (sourceTypeFilter?.value as string | undefined);
if (sourceTypeValue) {
otherFilters.push({
col: 'source_type',
opr: 'eq',
value: sourceTypeValue,
});
}
const queryParams = rison.encode_uri({
order_column: sortBy[0].id,
order_direction: sortBy[0].desc ? 'desc' : 'asc',
page: pageIndex,
page_size: pageSize,
...(otherFilters.length ? { filters: otherFilters } : {}),
});
return SupersetClient.get({
endpoint: `/api/v1/semantic_layer/connections/?q=${queryParams}`,
})
.then(({ json = {} }) => {
setCombinedItems(json.result);
setCombinedCount(json.count);
})
.catch(() => {
addDangerToast(t('An error occurred while fetching connections'));
})
.finally(() => {
setCombinedLoading(false);
});
},
[addDangerToast],
);
const combinedRefreshData = useCallback(() => {
if (lastFetchConfig) {
return combinedFetchData(lastFetchConfig);
}
return undefined;
}, [lastFetchConfig, combinedFetchData]);
// Select the right data source based on feature flag
const loading = showSemanticLayers ? combinedLoading : dbLoading;
const databaseCount = showSemanticLayers ? combinedCount : dbCount;
const databases: ConnectionItem[] = showSemanticLayers
? combinedItems
: dbCollection;
const fetchData = showSemanticLayers ? combinedFetchData : dbFetchData;
const refreshData = showSemanticLayers ? combinedRefreshData : dbRefreshData;
const fullUser = useSelector<any, UserWithPermissionsAndRoles>(
state => state.user,
);
@@ -148,6 +258,13 @@ function DatabaseList({
useState<boolean>(false);
const [columnarUploadDataModalOpen, setColumnarUploadDataModalOpen] =
useState<boolean>(false);
const [semanticLayerModalOpen, setSemanticLayerModalOpen] =
useState<boolean>(false);
const [slCurrentlyEditing, setSlCurrentlyEditing] = useState<string | null>(
null,
);
const [slCurrentlyDeleting, setSlCurrentlyDeleting] =
useState<ConnectionItem | null>(null);
const [allowUploads, setAllowUploads] = useState<boolean>(false);
const isAdmin = isUserAdmin(fullUser);
@@ -316,22 +433,67 @@ function DatabaseList({
const menuData: SubMenuProps = {
activeChild: 'Databases',
dropDownLinks: filteredDropDown,
name: t('Databases'),
name: databasesLabel(),
};
if (canCreate) {
menuData.buttons = [
{
'data-test': 'btn-create-database',
icon: <Icons.PlusOutlined iconSize="m" />,
name: t('Database'),
buttonStyle: 'primary',
onClick: () => {
// Ensure modal will be opened in add mode
handleDatabaseEditModal({ modalOpen: true });
const openDatabaseModal = () =>
handleDatabaseEditModal({ modalOpen: true });
if (isFeatureEnabled(SEMANTIC_LAYERS_FLAG)) {
menuData.buttons = [
{
name: t('New'),
buttonStyle: 'primary',
component: (
<Dropdown
menu={{
items: [
{
key: 'database',
label: t('Database'),
onClick: openDatabaseModal,
},
{
key: 'semantic-layer',
label: t('Semantic Layer'),
onClick: () => {
setSemanticLayerModalOpen(true);
},
},
],
}}
trigger={['click']}
>
<Button
data-test="btn-create-new"
buttonStyle="primary"
icon={<Icons.PlusOutlined iconSize="m" />}
>
{t('New')}
<Icons.DownOutlined
iconSize="s"
css={css`
margin-left: ${theme.sizeUnit * 1.5}px;
margin-right: -${theme.sizeUnit * 2}px;
`}
/>
</Button>
</Dropdown>
),
},
},
];
];
} else {
menuData.buttons = [
{
'data-test': 'btn-create-database',
icon: <Icons.PlusOutlined iconSize="m" />,
name: databaseLabel(),
buttonStyle: 'primary',
onClick: openDatabaseModal,
},
];
}
}
const handleDatabaseExport = useCallback(
@@ -345,9 +507,11 @@ function DatabaseList({
await handleResourceExport('database', [database.id], () => {
setPreparingExport(false);
});
} catch (error) {
} catch {
setPreparingExport(false);
addDangerToast(t('There was an issue exporting the database'));
addDangerToast(
t('There was an issue exporting the %s', databaseLabelLower()),
);
}
},
[addDangerToast, setPreparingExport],
@@ -401,6 +565,23 @@ function DatabaseList({
const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }];
function handleSemanticLayerDelete(item: ConnectionItem) {
SupersetClient.delete({
endpoint: `/api/v1/semantic_layer/${item.uuid}`,
}).then(
() => {
refreshData();
addSuccessToast(t('Deleted: %s', item.database_name));
setSlCurrentlyDeleting(null);
},
createErrorHandler(errMsg =>
addDangerToast(
t('There was an issue deleting %s: %s', item.database_name, errMsg),
),
),
);
}
const columns = useMemo(
() => [
{
@@ -413,7 +594,7 @@ function DatabaseList({
accessor: 'backend',
Header: t('Backend'),
size: 'xl',
disableSortBy: true, // TODO: api support for sorting by 'backend'
disableSortBy: true,
id: 'backend',
},
{
@@ -427,13 +608,12 @@ function DatabaseList({
<span>{t('AQE')}</span>
</Tooltip>
),
Cell: ({
row: {
original: { allow_run_async: allowRunAsync },
},
}: {
row: { original: { allow_run_async: boolean } };
}) => <BooleanDisplay value={allowRunAsync} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_run_async)} />
),
size: 'sm',
id: 'allow_run_async',
},
@@ -448,33 +628,36 @@ function DatabaseList({
<span>{t('DML')}</span>
</Tooltip>
),
Cell: ({
row: {
original: { allow_dml: allowDML },
},
}: any) => <BooleanDisplay value={allowDML} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_dml)} />
),
size: 'sm',
id: 'allow_dml',
},
{
accessor: 'allow_file_upload',
Header: t('File upload'),
Cell: ({
row: {
original: { allow_file_upload: allowFileUpload },
},
}: any) => <BooleanDisplay value={allowFileUpload} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.allow_file_upload)} />
),
size: 'md',
id: 'allow_file_upload',
},
{
accessor: 'expose_in_sqllab',
Header: t('Expose in SQL Lab'),
Cell: ({
row: {
original: { expose_in_sqllab: exposeInSqllab },
},
}: any) => <BooleanDisplay value={exposeInSqllab} />,
Cell: ({ row: { original } }: CellProps<ConnectionItem>) =>
original.source_type === 'semantic_layer' ? (
<span></span>
) : (
<BooleanDisplay value={Boolean(original.expose_in_sqllab)} />
),
size: 'md',
id: 'expose_in_sqllab',
},
@@ -486,14 +669,60 @@ function DatabaseList({
changed_on_delta_humanized: changedOn,
},
},
}: any) => <ModifiedInfo date={changedOn} user={changedBy} />,
}: CellProps<ConnectionItem>) => (
<ModifiedInfo date={changedOn || ''} user={changedBy} />
),
Header: t('Last modified'),
accessor: 'changed_on_delta_humanized',
size: 'xl',
id: 'changed_on_delta_humanized',
},
{
Cell: ({ row: { original } }: any) => {
Cell: ({ row: { original } }: CellProps<ConnectionItem>) => {
const isSemanticLayer = original.source_type === 'semantic_layer';
if (isSemanticLayer) {
if (!canEdit && !canDelete) return null;
return (
<Actions className="actions">
{canDelete && (
<Tooltip
id="delete-action-tooltip"
title={t('Delete')}
placement="bottom"
>
<span
role="button"
tabIndex={0}
className="action-button"
onClick={() => setSlCurrentlyDeleting(original)}
>
<Icons.DeleteOutlined iconSize="l" />
</span>
</Tooltip>
)}
{canEdit && (
<Tooltip
id="edit-action-tooltip"
title={t('Edit')}
placement="bottom"
>
<span
role="button"
tabIndex={0}
className="action-button"
onClick={() =>
setSlCurrentlyEditing(original.uuid ?? null)
}
>
<Icons.EditOutlined iconSize="l" />
</span>
</Tooltip>
)}
</Actions>
);
}
const handleEdit = () =>
handleDatabaseEditModal({ database: original, modalOpen: true });
const handleDelete = () => openDatabaseDeleteModal(original);
@@ -564,7 +793,7 @@ function DatabaseList({
>
<Tooltip
id="delete-action-tooltip"
title={t('Delete database')}
title={t('Delete %s', databaseLabelLower())}
placement="bottom"
>
<Icons.DeleteOutlined iconSize="l" />
@@ -579,6 +808,12 @@ function DatabaseList({
hidden: !canEdit && !canDelete,
disableSortBy: true,
},
{
accessor: 'source_type',
hidden: true,
disableSortBy: true,
id: 'source_type',
},
{
accessor: QueryObjectColumns.ChangedBy,
hidden: true,
@@ -596,8 +831,8 @@ function DatabaseList({
],
);
const filters: ListViewFilters = useMemo(
() => [
const filters: ListViewFilters = useMemo(() => {
const baseFilters: ListViewFilters = [
{
Header: t('Name'),
key: 'search',
@@ -605,62 +840,84 @@ function DatabaseList({
input: 'search',
operator: FilterOperator.Contains,
},
{
Header: t('Expose in SQL Lab'),
key: 'expose_in_sql_lab',
id: 'expose_in_sqllab',
];
if (showSemanticLayers) {
baseFilters.push({
Header: t('Source'),
key: 'source_type',
id: 'source_type',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
{ label: t('Database'), value: 'database' },
{ label: t('Semantic Layer'), value: 'semantic_layer' },
],
},
{
Header: (
<Tooltip
id="allow-run-async-filter-header-tooltip"
title={t('Asynchronous query execution')}
placement="top"
>
<span>{t('AQE')}</span>
</Tooltip>
),
key: 'allow_run_async',
id: 'allow_run_async',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: t('Modified by'),
key: 'changed_by',
id: 'changed_by',
input: 'select',
operator: FilterOperator.RelationOneMany,
unfilteredLabel: t('All'),
fetchSelects: createFetchRelated(
'database',
'changed_by',
createErrorHandler(errMsg =>
t(
'An error occurred while fetching dataset datasource values: %s',
errMsg,
),
});
}
if (!showSemanticLayers) {
baseFilters.push(
{
Header: t('Expose in SQL Lab'),
key: 'expose_in_sql_lab',
id: 'expose_in_sqllab',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: (
<Tooltip
id="allow-run-async-filter-header-tooltip"
title={t('Asynchronous query execution')}
placement="top"
>
<span>{t('AQE')}</span>
</Tooltip>
),
user,
),
paginate: true,
popupStyle: { minWidth: WIDER_DROPDOWN_WIDTH },
},
],
[user],
);
key: 'allow_run_async',
id: 'allow_run_async',
input: 'select',
operator: FilterOperator.Equals,
unfilteredLabel: t('All'),
selects: [
{ label: t('Yes'), value: true },
{ label: t('No'), value: false },
],
},
{
Header: t('Modified by'),
key: 'changed_by',
id: 'changed_by',
input: 'select',
operator: FilterOperator.RelationOneMany,
unfilteredLabel: t('All'),
fetchSelects: createFetchRelated(
'database',
'changed_by',
createErrorHandler(errMsg =>
t(
'An error occurred while fetching %s values: %s',
databaseLabelLower(),
errMsg,
),
),
user,
),
paginate: true,
popupStyle: { minWidth: WIDER_DROPDOWN_WIDTH },
},
);
}
return baseFilters;
}, [showSemanticLayers]);
return (
<>
@@ -703,12 +960,54 @@ function DatabaseList({
allowedExtensions={COLUMNAR_EXTENSIONS}
type="columnar"
/>
<SemanticLayerModal
show={semanticLayerModalOpen}
onHide={() => {
setSemanticLayerModalOpen(false);
refreshData();
}}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
/>
<SemanticLayerModal
show={!!slCurrentlyEditing}
onHide={() => {
setSlCurrentlyEditing(null);
refreshData();
}}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
semanticLayerUuid={slCurrentlyEditing ?? undefined}
/>
{slCurrentlyDeleting && (
<DeleteModal
description={
<p>
{t('Are you sure you want to delete')}{' '}
<b>{slCurrentlyDeleting.database_name}</b>?
</p>
}
onConfirm={() => {
if (slCurrentlyDeleting) {
handleSemanticLayerDelete(slCurrentlyDeleting);
}
}}
onHide={() => setSlCurrentlyDeleting(null)}
open
title={
<ModalTitleWithIcon
icon={<Icons.DeleteOutlined />}
title={t('Delete Semantic Layer?')}
/>
}
/>
)}
{databaseCurrentlyDeleting && (
<DeleteModal
description={
<>
<p>
{t('The database')}{' '}
{t('The %s', databaseLabelLower())}{' '}
<b>{databaseCurrentlyDeleting.database_name}</b>{' '}
{t(
'is linked to %s charts that appear on %s dashboards and users have %s SQL Lab tabs using this database open. Are you sure you want to continue? Deleting the database will break those objects.',
@@ -816,7 +1115,7 @@ function DatabaseList({
title={
<ModalTitleWithIcon
icon={<Icons.DeleteOutlined />}
title={t('Delete Database?')}
title={t('Delete %s?', databaseLabel())}
/>
}
/>

View File

@@ -31,6 +31,7 @@ import {
mockRelatedCharts,
mockRelatedDashboards,
mockHandleResourceExport,
mockDatasetListEndpoints,
API_ENDPOINTS,
} from './DatasetList.testHelpers';
@@ -98,7 +99,7 @@ test('typing in search triggers debounced API call with search filter', async ()
// Record initial API calls
const initialCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Type search query and submit with Enter to trigger the debounced fetch
@@ -107,14 +108,16 @@ test('typing in search triggers debounced API call with search filter', async ()
// Wait for debounced API call
await waitFor(
() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(initialCallCount);
},
{ timeout: 5000 },
);
// Verify the latest API call includes search filter in URL
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED);
const latestCall = calls[calls.length - 1];
const { url } = latestCall;
@@ -136,8 +139,7 @@ test('typing in search triggers debounced API call with search filter', async ()
test('500 error triggers danger toast with error message', async () => {
const addDangerToast = jest.fn();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
status: 500,
body: { message: 'Internal Server Error' },
});
@@ -173,8 +175,7 @@ test('500 error triggers danger toast with error message', async () => {
test('network timeout triggers danger toast', async () => {
const addDangerToast = jest.fn();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
throws: new Error('Network timeout'),
});
@@ -213,8 +214,7 @@ test('clicking delete opens modal with related objects count', async () => {
// Set up delete mocks
setupDeleteMocks(datasetToDelete.id);
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetToDelete],
count: 1,
});
@@ -254,8 +254,7 @@ test('clicking delete opens modal with related objects count', async () => {
test('clicking export calls handleResourceExport with dataset ID', async () => {
const datasetToExport = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetToExport],
count: 1,
});
@@ -288,8 +287,7 @@ test('clicking duplicate opens modal and submits duplicate request', async () =>
kind: 'virtual',
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetToDuplicate],
count: 1,
});
@@ -312,7 +310,7 @@ test('clicking duplicate opens modal and submits duplicate request', async () =>
// Track initial dataset list API calls BEFORE duplicate action
const initialDatasetCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
const row = screen.getByText(datasetToDuplicate.table_name).closest('tr');
@@ -355,7 +353,9 @@ test('clicking duplicate opens modal and submits duplicate request', async () =>
// Verify refreshData() is called (observable via new dataset list API call)
await waitFor(
() => {
const datasetCalls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const datasetCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(datasetCalls.length).toBeGreaterThan(initialDatasetCallCount);
},
{ timeout: 3000 },
@@ -376,8 +376,7 @@ test('certified dataset shows badge and tooltip with certification details', asy
}),
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [certifiedDataset],
count: 1,
});
@@ -417,8 +416,7 @@ test('dataset with warning shows icon and tooltip with markdown content', async
}),
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithWarning],
count: 1,
});
@@ -452,8 +450,7 @@ test('dataset with warning shows icon and tooltip with markdown content', async
test('dataset name links to Explore with correct URL and accessible label', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);

View File

@@ -27,6 +27,7 @@ import {
mockAdminUser,
mockDatasets,
setupBulkDeleteMocks,
mockDatasetListEndpoints,
API_ENDPOINTS,
} from './DatasetList.testHelpers';
@@ -72,8 +73,7 @@ test('ListView provider correctly merges filter + sort + pagination state on ref
// the ListView provider correctly merges them for the API call.
// Component tests verify individual pieces persist; this verifies they COMBINE correctly.
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: mockDatasets,
count: mockDatasets.length,
});
@@ -91,31 +91,33 @@ test('ListView provider correctly merges filter + sort + pagination state on ref
});
const callsBeforeSort = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
await userEvent.click(nameHeader);
// Wait for sort-triggered refetch to complete before applying filter
await waitFor(() => {
expect(
fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS).length,
fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED).length,
).toBeGreaterThan(callsBeforeSort);
});
// 2. Apply a filter using selectOption helper
const beforeFilterCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
await selectOption('Virtual', 'Type');
// Wait for filter API call to complete
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(beforeFilterCallCount);
});
// 3. Verify the final API call contains ALL three state pieces merged correctly
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED);
const latestCall = calls[calls.length - 1];
const { url } = latestCall;
@@ -151,8 +153,7 @@ test('bulk action orchestration: selection → action → cleanup cycle works co
setupBulkDeleteMocks();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: mockDatasets,
count: mockDatasets.length,
});
@@ -218,7 +219,7 @@ test('bulk action orchestration: selection → action → cleanup cycle works co
// Capture datasets call count before confirming
const datasetsCallCountBeforeDelete = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
const confirmButton = within(modal)
@@ -242,7 +243,7 @@ test('bulk action orchestration: selection → action → cleanup cycle works co
// Wait for datasets refetch after delete
await waitFor(() => {
const datasetsCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
expect(datasetsCallCount).toBeGreaterThan(datasetsCallCountBeforeDelete);
});

View File

@@ -16,7 +16,13 @@
* specific language governing permissions and limitations
* under the License.
*/
import { act, screen, waitFor, within } from '@testing-library/react';
import {
act,
fireEvent,
screen,
waitFor,
within,
} from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import fetchMock from 'fetch-mock';
import rison from 'rison';
@@ -33,6 +39,7 @@ import {
mockHandleResourceExport,
assertOnlyExpectedCalls,
API_ENDPOINTS,
mockDatasetListEndpoints,
getDeleteRouteName,
} from './DatasetList.testHelpers';
@@ -113,8 +120,7 @@ const setupErrorTestScenario = ({
});
// Configure fetchMock to return single dataset
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
// Render component with toast mocks
renderDatasetList(mockAdminUser, {
@@ -157,7 +163,7 @@ test('required API endpoints are called and no unmocked calls on initial render'
// assertOnlyExpectedCalls checks: 1) no unmatched calls, 2) each expected endpoint was called
assertOnlyExpectedCalls([
API_ENDPOINTS.DATASETS_INFO, // Permission check
API_ENDPOINTS.DATASETS, // Main dataset list data
API_ENDPOINTS.DATASOURCE_COMBINED, // Main dataset list data
]);
});
@@ -197,8 +203,7 @@ test('renders all required column headers', async () => {
test('displays dataset name in Name column', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -211,8 +216,7 @@ test('displays dataset type as Physical or Virtual', async () => {
const physicalDataset = mockDatasets[0]; // kind: 'physical'
const virtualDataset = mockDatasets[1]; // kind: 'virtual'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [physicalDataset, virtualDataset],
count: 2,
});
@@ -229,8 +233,7 @@ test('displays dataset type as Physical or Virtual', async () => {
test('displays database name in Database column', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -244,8 +247,7 @@ test('displays database name in Database column', async () => {
test('displays schema name in Schema column', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -257,8 +259,7 @@ test('displays schema name in Schema column', async () => {
test('displays last modified date in humanized format', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -283,7 +284,7 @@ test('sorting by Name column updates API call with sort parameter', async () =>
// Record initial calls
const initialCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Click Name header to sort
@@ -291,12 +292,14 @@ test('sorting by Name column updates API call with sort parameter', async () =>
// Wait for new API call
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(initialCalls);
});
// Verify latest call includes sort parameter
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED);
const latestCall = calls[calls.length - 1];
const { url } = latestCall;
@@ -317,17 +320,19 @@ test('sorting by Database column updates sort parameter', async () => {
});
const initialCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
await userEvent.click(databaseHeader);
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(initialCalls);
});
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED);
const { url } = calls[calls.length - 1];
expect(url).toMatch(/order_column|sort/);
});
@@ -345,17 +350,19 @@ test('sorting by Last modified column updates sort parameter', async () => {
});
const initialCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
await userEvent.click(modifiedHeader);
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(initialCalls);
});
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED);
const { url } = calls[calls.length - 1];
expect(url).toMatch(/order_column|sort/);
});
@@ -363,8 +370,7 @@ test('sorting by Last modified column updates sort parameter', async () => {
test('export button triggers handleResourceExport with dataset ID', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -392,8 +398,7 @@ test('delete button opens modal with dataset details', async () => {
setupDeleteMocks(dataset.id);
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -415,8 +420,7 @@ test('delete action successfully deletes dataset and refreshes list', async () =
const datasetToDelete = mockDatasets[0];
setupDeleteMocks(datasetToDelete.id);
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetToDelete],
count: 1,
});
@@ -442,7 +446,7 @@ test('delete action successfully deletes dataset and refreshes list', async () =
// Track API calls before confirm
const callsBefore = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Click confirm - find the danger button (last delete button in modal)
@@ -468,7 +472,7 @@ test('delete action successfully deletes dataset and refreshes list', async () =
// List refreshes
await waitFor(() => {
expect(
fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS).length,
fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED).length,
).toBeGreaterThan(callsBefore);
});
});
@@ -477,8 +481,7 @@ test('delete action cancel closes modal without deleting', async () => {
const dataset = mockDatasets[0];
setupDeleteMocks(dataset.id);
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -518,8 +521,7 @@ test('duplicate action successfully duplicates virtual dataset', async () => {
const virtualDataset = mockDatasets[1]; // Virtual dataset (kind: 'virtual')
setupDuplicateMocks();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [virtualDataset], count: 1 });
mockDatasetListEndpoints({ result: [virtualDataset], count: 1 });
renderDatasetList(mockAdminUser, {
addSuccessToast: mockAddSuccessToast,
@@ -542,7 +544,7 @@ test('duplicate action successfully duplicates virtual dataset', async () => {
// Track API calls before submit
const callsBefore = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Submit
@@ -564,7 +566,7 @@ test('duplicate action successfully duplicates virtual dataset', async () => {
// List refreshes
await waitFor(() => {
expect(
fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS).length,
fetchMock.callHistory.calls(API_ENDPOINTS.DATASOURCE_COMBINED).length,
).toBeGreaterThan(callsBefore);
});
});
@@ -573,8 +575,7 @@ test('duplicate button visible only for virtual datasets', async () => {
const physicalDataset = mockDatasets[0]; // kind: 'physical'
const virtualDataset = mockDatasets[1]; // kind: 'virtual'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [physicalDataset, virtualDataset],
count: 2,
});
@@ -633,8 +634,7 @@ test('bulk select enables checkboxes', async () => {
}, 30000);
test('selecting all datasets shows correct count in toolbar', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: mockDatasets,
count: mockDatasets.length,
});
@@ -673,8 +673,7 @@ test('selecting all datasets shows correct count in toolbar', async () => {
}, 30000);
test('bulk export triggers export with selected IDs', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [mockDatasets[0]],
count: 1,
});
@@ -690,7 +689,7 @@ test('bulk export triggers export with selected IDs', async () => {
await userEvent.click(bulkSelectButton);
// Wait for bulk select controls container to appear first (fast query)
await screen.findByTestId('bulk-select-controls');
const bulkSelectControls = await screen.findByTestId('bulk-select-controls');
// Wait for table checkboxes to render (findAllByRole is faster than waitFor with getAll)
const table = screen.getByTestId('listview-table');
@@ -704,20 +703,32 @@ test('bulk export triggers export with selected IDs', async () => {
expect(datasetRow).toBeInTheDocument();
await userEvent.click(within(datasetRow!).getByRole('checkbox'));
// Find and click bulk export button (fail-fast if not found)
const exportButton = await screen.findByRole('button', { name: /export/i });
// Wait for selection state to register before triggering export
await waitFor(() => {
expect(screen.getByTestId('bulk-select-copy')).toHaveTextContent(
/1 Selected/i,
);
});
// Scope to bulk toolbar to avoid matching row-level export actions
const exportButton = await within(bulkSelectControls).findByRole('button', {
name: /export/i,
});
await userEvent.click(exportButton);
await waitFor(() => {
expect(mockHandleResourceExport).toHaveBeenCalled();
expect(mockHandleResourceExport).toHaveBeenCalledWith(
'dataset',
[mockDatasets[0].id],
expect.any(Function),
);
});
});
}, 30000);
test('bulk delete opens confirmation modal', async () => {
setupBulkDeleteMocks();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [mockDatasets[0]],
count: 1,
});
@@ -823,8 +834,7 @@ test('certified badge appears for certified datasets', async () => {
}),
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [certifiedDataset],
count: 1,
});
@@ -854,8 +864,7 @@ test('warning icon appears for datasets with warnings', async () => {
}),
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithWarning],
count: 1,
});
@@ -883,8 +892,7 @@ test('info tooltip appears for datasets with descriptions', async () => {
description: 'Sales data from Q4 2024',
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithDescription],
count: 1,
});
@@ -909,8 +917,7 @@ test('info tooltip appears for datasets with descriptions', async () => {
test('dataset name links to Explore page', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -930,8 +937,7 @@ test('dataset name links to Explore page', async () => {
test('physical dataset shows delete, export, and edit actions (no duplicate)', async () => {
const physicalDataset = mockDatasets[0]; // kind: 'physical'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [physicalDataset],
count: 1,
});
@@ -962,8 +968,7 @@ test('physical dataset shows delete, export, and edit actions (no duplicate)', a
test('virtual dataset shows delete, export, edit, and duplicate actions', async () => {
const virtualDataset = mockDatasets[1]; // kind: 'virtual'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [virtualDataset], count: 1 });
mockDatasetListEndpoints({ result: [virtualDataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -992,8 +997,7 @@ test('edit action is enabled for dataset owner', async () => {
owners: [{ id: mockAdminUser.userId, username: 'admin' }],
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -1016,8 +1020,7 @@ test('edit action is disabled for non-owner', async () => {
owners: [{ id: 999, username: 'other_user' }], // Different user
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
// Use a non-admin user to test ownership check
const regularUser = {
@@ -1046,8 +1049,7 @@ test('all action buttons are clickable and enabled for admin user', async () =>
owners: [{ id: mockAdminUser.userId, username: 'admin' }],
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [virtualDataset], count: 1 });
mockDatasetListEndpoints({ result: [virtualDataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -1082,8 +1084,7 @@ test('all action buttons are clickable and enabled for admin user', async () =>
});
test('displays error when initial dataset fetch fails with 500', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
status: 500,
body: { message: 'Internal Server Error' },
});
@@ -1104,8 +1105,7 @@ test('displays error when initial dataset fetch fails with 500', async () => {
});
test('displays error when initial dataset fetch fails with 403 permission denied', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
status: 403,
body: { message: 'Access Denied' },
});
@@ -1119,9 +1119,9 @@ test('displays error when initial dataset fetch fails with 403 permission denied
expect(mockAddDangerToast).toHaveBeenCalled();
});
// Verify toast message contains the 403-specific "Access Denied" text
// Verify toast message contains the generic error text
const toastMessage = String(mockAddDangerToast.mock.calls[0][0]);
expect(toastMessage).toContain('Access Denied');
expect(toastMessage).toContain('An error occurred while fetching datasets');
// No dataset names from mockDatasets should appear in the document
mockDatasets.forEach(dataset => {
@@ -1373,7 +1373,7 @@ test('sort order persists after deleting a dataset', async () => {
// Record initial API calls count
const initialCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Click Name header to sort
@@ -1381,12 +1381,16 @@ test('sort order persists after deleting a dataset', async () => {
// Wait for new API call with sort parameter
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(initialCalls);
});
// Record the sort parameter from the API call after sorting
const callsAfterSort = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const callsAfterSort = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
const sortedUrl = callsAfterSort[callsAfterSort.length - 1].url;
expect(sortedUrl).toMatch(/order_column|sort/);
@@ -1399,14 +1403,13 @@ test('sort order persists after deleting a dataset', async () => {
const modal = await screen.findByRole('dialog');
await within(modal).findByText(datasetToDelete.table_name);
// Enable the danger button by typing DELETE
// Enable the danger button quickly (avoids slow character-by-character typing)
const confirmInput = within(modal).getByTestId('delete-modal-input');
await userEvent.clear(confirmInput);
await userEvent.type(confirmInput, 'DELETE');
fireEvent.change(confirmInput, { target: { value: 'DELETE' } });
// Record call count before delete to track refetch
const callsBeforeDelete = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
const confirmButton = within(modal)
@@ -1427,7 +1430,7 @@ test('sort order persists after deleting a dataset', async () => {
// Wait for list refetch to complete (prevents async cleanup error)
await waitFor(() => {
const currentCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
expect(currentCalls).toBeGreaterThan(callsBeforeDelete);
});
@@ -1452,8 +1455,7 @@ test('sort order persists after deleting a dataset', async () => {
// test. Component tests here focus on individual behaviors.
test('bulk selection clears when filter changes', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: mockDatasets,
count: mockDatasets.length,
});
@@ -1505,7 +1507,7 @@ test('bulk selection clears when filter changes', async () => {
// Record API call count before filter
const beforeFilterCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Wait for filter combobox to be ready before applying filter
@@ -1516,13 +1518,15 @@ test('bulk selection clears when filter changes', async () => {
// Wait for filter API call to complete
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(beforeFilterCallCount);
});
// Verify filter was applied by decoding URL payload
const urlAfterFilter = fetchMock.callHistory
.calls(API_ENDPOINTS.DATASETS)
.calls(API_ENDPOINTS.DATASOURCE_COMBINED)
.at(-1)?.url;
const risonAfterFilter = new URL(
urlAfterFilter!,
@@ -1557,7 +1561,7 @@ test('type filter API call includes correct filter parameter', async () => {
// Snapshot call count before filter
const callsBeforeFilter = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Apply Type filter
@@ -1565,12 +1569,16 @@ test('type filter API call includes correct filter parameter', async () => {
// Wait for filter API call to complete
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(callsBeforeFilter);
});
// Verify the latest API call includes the Type filter
const url = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS).at(-1)?.url;
const url = fetchMock.callHistory
.calls(API_ENDPOINTS.DATASOURCE_COMBINED)
.at(-1)?.url;
expect(url).toContain('filters');
// searchParams.get() already URL-decodes, so pass directly to rison.decode
@@ -1603,7 +1611,7 @@ test('type filter persists after duplicating a dataset', async () => {
// Snapshot call count before filter
const callsBeforeFilter = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Apply Type filter
@@ -1611,13 +1619,15 @@ test('type filter persists after duplicating a dataset', async () => {
// Wait for filter API call to complete
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(callsBeforeFilter);
});
// Verify filter is present by checking the latest API call
const urlAfterFilter = fetchMock.callHistory
.calls(API_ENDPOINTS.DATASETS)
.calls(API_ENDPOINTS.DATASOURCE_COMBINED)
.at(-1)?.url;
const risonAfterFilter = new URL(
urlAfterFilter!,
@@ -1637,7 +1647,7 @@ test('type filter persists after duplicating a dataset', async () => {
// Capture datasets API call count BEFORE any duplicate operations
const datasetsCallCountBeforeDuplicate = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Now duplicate the dataset
@@ -1673,14 +1683,14 @@ test('type filter persists after duplicating a dataset', async () => {
// Wait for datasets refetch to occur (proves duplicate triggered a refresh)
await waitFor(() => {
const datasetsCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
expect(datasetsCallCount).toBeGreaterThan(datasetsCallCountBeforeDuplicate);
});
// Verify Type filter persisted in the NEW datasets API call after duplication
const urlAfterDuplicate = fetchMock.callHistory
.calls(API_ENDPOINTS.DATASETS)
.calls(API_ENDPOINTS.DATASOURCE_COMBINED)
.at(-1)?.url;
const risonAfterDuplicate = new URL(
urlAfterDuplicate!,
@@ -1715,8 +1725,7 @@ test('edit action shows error toast when dataset fetch fails', async () => {
],
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [ownedDataset], count: 1 });
mockDatasetListEndpoints({ result: [ownedDataset], count: 1 });
// Mock SupersetClient.get to fail for the specific dataset endpoint
jest.spyOn(SupersetClient, 'get').mockImplementation(async request => {
@@ -1759,8 +1768,7 @@ test('bulk export error shows toast and clears loading state', async () => {
// Mock handleResourceExport to throw an error
mockHandleResourceExport.mockRejectedValueOnce(new Error('Export failed'));
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [mockDatasets[0]],
count: 1,
});
@@ -1824,8 +1832,7 @@ test('bulk delete error shows toast without refreshing list', async () => {
body: { message: 'Bulk delete failed' },
});
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [mockDatasets[0]],
count: 1,
});
@@ -1901,8 +1908,7 @@ test('bulk select shows "N Selected (Virtual)" for virtual-only selection', asyn
// Use only virtual datasets
const virtualDatasets = mockDatasets.filter(d => d.kind === 'virtual');
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: virtualDatasets,
count: virtualDatasets.length,
});
@@ -1948,8 +1954,7 @@ test('bulk select shows "N Selected (Physical)" for physical-only selection', as
// Use only physical datasets
const physicalDatasets = mockDatasets.filter(d => d.kind === 'physical');
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: physicalDatasets,
count: physicalDatasets.length,
});
@@ -1999,8 +2004,7 @@ test('bulk select shows mixed count for virtual and physical selection', async (
mockDatasets.find(d => d.kind === 'virtual')!,
];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: mixedDatasets,
count: mixedDatasets.length,
});
@@ -2063,8 +2067,7 @@ test('delete modal shows affected dashboards with overflow for >10 items', async
title: `Dashboard ${i + 1}`,
}));
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
fetchMock.get(`glob:*/api/v1/dataset/${dataset.id}/related_objects*`, {
charts: { count: 0, result: [] },
@@ -2101,8 +2104,7 @@ test('delete modal shows affected dashboards with overflow for >10 items', async
test('delete modal hides affected dashboards section when count is zero', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
fetchMock.get(`glob:*/api/v1/dataset/${dataset.id}/related_objects*`, {
charts: { count: 2, result: [{ id: 1, slice_name: 'Chart 1' }] },
@@ -2140,8 +2142,7 @@ test('delete modal shows affected charts with overflow for >10 items', async ()
slice_name: `Chart ${i + 1}`,
}));
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
fetchMock.get(`glob:*/api/v1/dataset/${dataset.id}/related_objects*`, {
charts: { count: 12, result: manyCharts },

View File

@@ -27,7 +27,7 @@ import {
mockWriteUser,
mockExportOnlyUser,
mockDatasets,
API_ENDPOINTS,
mockDatasetListEndpoints,
} from './DatasetList.testHelpers';
// Increase default timeout for tests that involve multiple async operations
@@ -238,8 +238,7 @@ test('action buttons respect user permissions', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);
@@ -265,8 +264,7 @@ test('read-only user sees no delete or duplicate buttons in row', async () => {
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockReadOnlyUser);
@@ -301,8 +299,7 @@ test('write user sees edit, delete, and export actions', async () => {
owners: [{ id: mockWriteUser.userId, username: 'writeuser' }],
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockWriteUser);
@@ -337,8 +334,7 @@ test('export-only user has no Actions column (no write/duplicate permissions)',
const dataset = mockDatasets[0];
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockExportOnlyUser);
@@ -371,8 +367,7 @@ test('user with can_duplicate sees duplicate button only for virtual datasets',
const physicalDataset = mockDatasets[0]; // kind: 'physical'
const virtualDataset = mockDatasets[1]; // kind: 'virtual'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [physicalDataset, virtualDataset],
count: 2,
});

View File

@@ -29,6 +29,7 @@ import {
mockExportOnlyUser,
mockDatasets,
mockApiError403,
mockDatasetListEndpoints,
API_ENDPOINTS,
RisonFilter,
} from './DatasetList.testHelpers';
@@ -68,13 +69,17 @@ test('shows loading state during initial data fetch', () => {
// Use fake timers to avoid leaving real timers running after test
jest.useFakeTimers();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(
API_ENDPOINTS.DATASETS,
new Promise(resolve =>
setTimeout(() => resolve({ result: [], count: 0 }), 10000),
),
const delayedResponse = new Promise(resolve =>
setTimeout(() => resolve({ result: [], count: 0 }), 10000),
);
fetchMock.removeRoutes({
names: [
API_ENDPOINTS.DATASOURCE_COMBINED,
API_ENDPOINTS.DATASOURCE_COMBINED,
],
});
fetchMock.get(API_ENDPOINTS.DATASOURCE_COMBINED, delayedResponse);
fetchMock.get(API_ENDPOINTS.DATASOURCE_COMBINED, delayedResponse);
renderDatasetList(mockAdminUser);
@@ -87,13 +92,17 @@ test('maintains component structure during loading', () => {
// Use fake timers to avoid leaving real timers running after test
jest.useFakeTimers();
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(
API_ENDPOINTS.DATASETS,
new Promise(resolve =>
setTimeout(() => resolve({ result: [], count: 0 }), 10000),
),
const delayedResponse = new Promise(resolve =>
setTimeout(() => resolve({ result: [], count: 0 }), 10000),
);
fetchMock.removeRoutes({
names: [
API_ENDPOINTS.DATASOURCE_COMBINED,
API_ENDPOINTS.DATASOURCE_COMBINED,
],
});
fetchMock.get(API_ENDPOINTS.DATASOURCE_COMBINED, delayedResponse);
fetchMock.get(API_ENDPOINTS.DATASOURCE_COMBINED, delayedResponse);
renderDatasetList(mockAdminUser);
@@ -214,8 +223,7 @@ test('handles datasets with missing fields and renders gracefully', async () =>
sql: null,
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithMissingFields],
count: 1,
});
@@ -241,8 +249,7 @@ test('handles datasets with missing fields and renders gracefully', async () =>
});
test('handles empty results (shows empty state)', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [], count: 0 });
mockDatasetListEndpoints({ result: [], count: 0 });
renderDatasetList(mockAdminUser);
@@ -254,7 +261,9 @@ test('makes correct initial API call on load', async () => {
renderDatasetList(mockAdminUser);
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(0);
});
});
@@ -263,7 +272,9 @@ test('API call includes correct page size', async () => {
renderDatasetList(mockAdminUser);
await waitFor(() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(0);
const { url } = calls[0];
expect(url).toContain('page_size');
@@ -278,7 +289,7 @@ test('typing in name filter updates input value and triggers API with decoded se
// Record initial API calls
const initialCallCount = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASETS,
API_ENDPOINTS.DATASOURCE_COMBINED,
).length;
// Type in search box and press Enter to trigger search
@@ -292,7 +303,9 @@ test('typing in name filter updates input value and triggers API with decoded se
// Wait for API call after Enter key press
await waitFor(
() => {
const calls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const calls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
expect(calls.length).toBeGreaterThan(initialCallCount);
// Get latest API call
@@ -346,8 +359,7 @@ test('toggling bulk select mode shows checkboxes', async () => {
}, 30000);
test('handles 500 error on initial load without crashing', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
throws: new Error('Internal Server Error'),
});
@@ -385,8 +397,7 @@ test('handles 403 error on _info endpoint and disables create actions', async ()
});
test('handles network timeout without crashing', async () => {
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
throws: new Error('Network timeout'),
});
@@ -414,7 +425,9 @@ test('component requires explicit mocks for all API endpoints', async () => {
await waitForDatasetsPageReady();
// Verify that critical endpoints were called and had mocks available
const newDatasetsCalls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS);
const newDatasetsCalls = fetchMock.callHistory.calls(
API_ENDPOINTS.DATASOURCE_COMBINED,
);
const newInfoCalls = fetchMock.callHistory.calls(API_ENDPOINTS.DATASETS_INFO);
// These should have been called during render
@@ -446,8 +459,7 @@ test('renders datasets with certification data', async () => {
}),
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [certifiedDataset],
count: 1,
});
@@ -474,8 +486,7 @@ test('displays datasets with warning_markdown', async () => {
}),
};
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithWarning],
count: 1,
});
@@ -496,8 +507,7 @@ test('displays datasets with warning_markdown', async () => {
test('displays dataset with multiple owners', async () => {
const datasetWithOwners = mockDatasets[1]; // Has 2 owners: Jane Smith, Bob Jones
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithOwners],
count: 1,
});
@@ -518,8 +528,7 @@ test('displays dataset with multiple owners', async () => {
test('displays ModifiedInfo with humanized date', async () => {
const datasetWithModified = mockDatasets[0]; // changed_by_name: 'John Doe', changed_on: '1 day ago'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, {
mockDatasetListEndpoints({
result: [datasetWithModified],
count: 1,
});
@@ -541,8 +550,7 @@ test('displays ModifiedInfo with humanized date', async () => {
test('dataset name links to Explore with correct explore_url', async () => {
const dataset = mockDatasets[0]; // explore_url: '/explore/?datasource=1__table'
fetchMock.removeRoutes({ names: [API_ENDPOINTS.DATASETS] });
fetchMock.get(API_ENDPOINTS.DATASETS, { result: [dataset], count: 1 });
mockDatasetListEndpoints({ result: [dataset], count: 1 });
renderDatasetList(mockAdminUser);

View File

@@ -318,6 +318,7 @@ export const mockApiError404 = {
export const API_ENDPOINTS = {
DATASETS_INFO: 'glob:*/api/v1/dataset/_info*',
DATASETS: 'glob:*/api/v1/dataset/?*',
DATASOURCE_COMBINED: 'glob:*/api/v1/datasource/?*',
DATASET_GET: 'glob:*/api/v1/dataset/[0-9]*',
DATASET_RELATED_OBJECTS: 'glob:*/api/v1/dataset/*/related_objects*',
DATASET_DELETE: 'glob:*/api/v1/dataset/[0-9]*',
@@ -499,6 +500,24 @@ export const assertOnlyExpectedCalls = (expectedEndpoints: string[]) => {
});
};
/**
* Helper to mock the dataset list endpoints.
* The component fetches from /api/v1/datasource/ (combined endpoint).
* Some tests also need the legacy /api/v1/dataset/ endpoint for
* other operations (delete, bulk delete) that still use it.
*/
export const mockDatasetListEndpoints = (response: Record<string, unknown>) => {
fetchMock.removeRoutes({
names: [API_ENDPOINTS.DATASETS, API_ENDPOINTS.DATASOURCE_COMBINED],
});
fetchMock.get(API_ENDPOINTS.DATASETS, response, {
name: API_ENDPOINTS.DATASETS,
});
fetchMock.get(API_ENDPOINTS.DATASOURCE_COMBINED, response, {
name: API_ENDPOINTS.DATASOURCE_COMBINED,
});
};
// MSW setup using fetch-mock (following ChartList pattern)
// Routes are named using the API_ENDPOINTS constant values so they can be
// removed by name using removeRoutes({ names: [API_ENDPOINTS.X] })
@@ -511,11 +530,10 @@ export const setupMocks = () => {
{ name: API_ENDPOINTS.DATASETS_INFO },
);
fetchMock.get(
API_ENDPOINTS.DATASETS,
{ result: mockDatasets, count: mockDatasets.length },
{ name: API_ENDPOINTS.DATASETS },
);
mockDatasetListEndpoints({
result: mockDatasets,
count: mockDatasets.length,
});
fetchMock.get(
API_ENDPOINTS.DATASET_FAVORITE_STATUS,

File diff suppressed because it is too large Load Diff

View File

@@ -30,6 +30,8 @@ const mockAddDangerToast = jest.fn();
const mockAddSuccessToast = jest.fn();
const mockHistoryPush = jest.fn();
jest.setTimeout(60000);
type ToastInjectedProps = {
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
@@ -113,8 +115,6 @@ type LaunchQueue = {
) => void;
};
const pendingTimerIds = new Set<ReturnType<typeof setTimeout>>();
const setupLaunchQueue = (fileHandle: MockFileHandle | null = null) => {
let savedConsumer:
| ((params: { files?: MockFileHandle[] }) => void | Promise<void>)
@@ -123,18 +123,22 @@ const setupLaunchQueue = (fileHandle: MockFileHandle | null = null) => {
setConsumer: (consumer: (params: { files?: MockFileHandle[] }) => void) => {
savedConsumer = consumer;
if (fileHandle) {
const id = setTimeout(() => {
pendingTimerIds.delete(id);
consumer({
files: [fileHandle],
});
}, 0);
pendingTimerIds.add(id);
consumer({ files: [fileHandle] });
}
},
};
return {
triggerConsumer: async (params: { files?: MockFileHandle[] }) => {
// In slower CI runners, useEffect may not have registered the consumer yet.
// Wait briefly for it before triggering.
let attempts = 0;
while (!savedConsumer && attempts < 50) {
// eslint-disable-next-line no-await-in-loop
await new Promise(resolve => {
setTimeout(resolve, 0);
});
attempts += 1;
}
await savedConsumer?.(params);
},
};
@@ -146,8 +150,6 @@ beforeEach(() => {
});
afterEach(() => {
pendingTimerIds.forEach(id => clearTimeout(id));
pendingTimerIds.clear();
delete (window as any).launchQueue;
});
@@ -232,7 +234,7 @@ test('handles Excel (.xls) file correctly', async () => {
test('handles Excel (.xlsx) file correctly', async () => {
const fileHandle = createMockFileHandle('test.xlsx');
setupLaunchQueue(fileHandle);
const { triggerConsumer } = setupLaunchQueue();
render(
<MemoryRouter initialEntries={['/superset/file-handler']}>
@@ -243,11 +245,13 @@ test('handles Excel (.xlsx) file correctly', async () => {
{ useRedux: true },
);
await triggerConsumer({ files: [fileHandle] });
const modal = await screen.findByTestId('upload-modal');
expect(modal).toBeInTheDocument();
expect(screen.getByTestId('modal-type')).toHaveTextContent('excel');
expect(screen.getByTestId('modal-extensions')).toHaveTextContent('xls,xlsx');
});
}, 60000);
test('handles Parquet file correctly', async () => {
const fileHandle = createMockFileHandle('test.parquet');

View File

@@ -111,7 +111,7 @@ export class ThemeController {
defaultTheme = (supersetThemeObject.theme as AnyThemeConfig) ?? {},
onChange = undefined,
initialMode = undefined,
}: ThemeControllerOptions = {}) {
}: ThemeControllerOptions & { initialMode?: ThemeMode } = {}) {
this.storage = storage;
this.modeStorageKey = modeStorageKey;
this.initialMode = initialMode;

View File

@@ -25,11 +25,18 @@ export default interface Dataset {
database: {
id: string;
database_name: string;
};
} | null;
kind: string;
source_type?: 'database' | 'semantic_layer';
explore_url: string;
id: number;
owners: Array<Owner>;
schema: string;
schema: string | null;
catalog?: string | null;
table_name: string;
description?: string | null;
cache_timeout?: number | null;
default_endpoint?: string | null;
is_sqllab_view?: boolean;
is_managed_externally?: boolean;
}

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,245 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Command for the combined dataset + semantic view list endpoint."""
from __future__ import annotations
import logging
from typing import Any, cast
from sqlalchemy import union_all
from superset.commands.base import BaseCommand
from superset.connectors.sqla.models import SqlaTable
from superset.daos.datasource import DatasourceDAO
from superset.datasource.schemas import DatasetListSchema, SemanticViewListSchema
from superset.semantic_layers.models import SemanticView
logger = logging.getLogger(__name__)
_dataset_schema = DatasetListSchema()
_semantic_view_schema = SemanticViewListSchema()
class GetCombinedDatasourceListCommand(BaseCommand):
"""
Fetch and serialize a paginated, combined list of datasets and semantic views.
Callers are responsible for checking access permissions before constructing
this command and for passing the appropriate ``can_read_*`` flags.
"""
def __init__(
self,
args: dict[str, Any],
can_read_datasets: bool,
can_read_semantic_views: bool,
) -> None:
self._args = args
self._can_read_datasets = can_read_datasets
self._can_read_semantic_views = can_read_semantic_views
def run(self) -> dict[str, Any]:
self.validate()
page = self._args.get("page", 0)
page_size = self._args.get("page_size", 25)
order_column = self._args.get("order_column", "changed_on")
order_direction = self._args.get("order_direction", "desc")
filters = self._args.get("filters", [])
(
source_type,
name_filter,
sql_filter,
type_filter,
database_id,
semantic_layer_uuid,
) = self._parse_filters(filters)
source_type = self._resolve_connection_source_type(
source_type,
database_id,
semantic_layer_uuid,
)
source_type = self._resolve_source_type(source_type, sql_filter, type_filter)
if source_type == "empty":
return {"count": 0, "result": []}
combined = self._build_combined_query(
source_type,
name_filter,
sql_filter,
database_id,
semantic_layer_uuid,
)
total_count, rows = DatasourceDAO.paginate_combined_query(
combined, order_column, order_direction, page, page_size
)
result = self._serialize_rows(rows)
return {"count": total_count, "result": result}
@staticmethod
def _resolve_connection_source_type(
source_type: str,
database_id: int | None,
semantic_layer_uuid: str | None,
) -> str:
# A connection filter implicitly narrows the source type: selecting a
# database ID means "show only datasets", and selecting a semantic layer
# UUID means "show only semantic views". Only apply the implicit
# narrowing when the user hasn't already set an explicit source_type.
if source_type == "all":
if database_id is not None:
return "database"
elif semantic_layer_uuid is not None:
return "semantic_layer"
return source_type
@staticmethod
def _build_combined_query(
source_type: str,
name_filter: str | None,
sql_filter: bool | None,
database_id: int | None,
semantic_layer_uuid: str | None,
) -> Any:
ds_q = DatasourceDAO.build_dataset_query(name_filter, sql_filter, database_id)
sv_q = DatasourceDAO.build_semantic_view_query(name_filter, semantic_layer_uuid)
if source_type == "database":
return ds_q.subquery()
if source_type == "semantic_layer":
return sv_q.subquery()
return union_all(ds_q, sv_q).subquery()
@staticmethod
def _serialize_rows(rows: list[Any]) -> list[dict[str, Any]]:
datasets_map = DatasourceDAO.fetch_datasets_by_ids(
[r.item_id for r in rows if r.source_type == "database"]
)
sv_map = DatasourceDAO.fetch_semantic_views_by_ids(
[r.item_id for r in rows if r.source_type == "semantic_layer"]
)
result: list[dict[str, Any]] = []
for row in rows:
if row.source_type == "database":
ds_obj = cast(SqlaTable | None, datasets_map.get(row.item_id))
if ds_obj:
result.append(_dataset_schema.dump(ds_obj))
else:
sv_obj = cast(SemanticView | None, sv_map.get(row.item_id))
if sv_obj:
result.append(_semantic_view_schema.dump(sv_obj))
return result
def validate(self) -> None:
pass # access checks are performed by the caller (API layer)
def _resolve_source_type(
self,
source_type: str,
sql_filter: bool | None,
type_filter: str | None,
) -> str:
"""Narrow source_type based on access flags, sql filter, and type filter.
Returns one of: "database", "semantic_layer", "all", or "empty".
"empty" signals that the caller should short-circuit and return no results
(used when the user explicitly requests semantic views but lacks access).
"""
if not self._can_read_semantic_views:
# If the user explicitly asked for semantic views but cannot read them,
# return "empty" so the caller yields zero results rather than silently
# falling back to the full dataset list.
if source_type == "semantic_layer" or type_filter == "semantic_view":
return "empty"
return "database"
if not self._can_read_datasets:
return "semantic_layer"
# An explicit source_type selection ("database" or "semantic_layer") always
# wins. This prevents e.g. Type="Semantic View" from overriding an explicit
# Source="Database" filter and showing inconsistent results.
if source_type in ("database", "semantic_layer"):
return source_type
# sql_filter (physical/virtual toggle) only applies to datasets
if sql_filter is not None:
return "database"
# Explicit semantic-view type filter (only reached when source_type="all")
if type_filter == "semantic_view":
return "semantic_layer"
return source_type
@staticmethod
def _parse_filters(
filters: list[dict[str, Any]],
) -> tuple[str, str | None, bool | None, str | None, int | None, str | None]:
"""
Translate raw rison filter dicts into typed query parameters.
Returns:
source_type: "all" | "database" | "semantic_layer"
name_filter: substring to match against name/table_name
sql_filter: True → physical only, False → virtual only, None → both
type_filter: "semantic_view" when caller wants only
semantic views
database_id: filter datasets to a specific database ID
semantic_layer_uuid: filter semantic views to a specific semantic layer UUID
"""
source_type = "all"
name_filter: str | None = None
sql_filter: bool | None = None
type_filter: str | None = None
database_id: int | None = None
semantic_layer_uuid: str | None = None
for f in filters:
col = f.get("col")
opr = f.get("opr")
value = f.get("value")
if col == "source_type":
source_type = value or "all"
elif col == "table_name" and f.get("opr") == "ct":
name_filter = value
elif col == "sql":
if opr == "dataset_is_null_or_empty" and value == "semantic_view":
type_filter = "semantic_view"
elif opr == "dataset_is_null_or_empty" and isinstance(value, bool):
sql_filter = value
elif col == "database" and value is not None:
try:
database_id = int(value)
except (TypeError, ValueError):
pass
elif col == "semantic_layer_uuid" and value is not None:
semantic_layer_uuid = str(value)
return (
source_type,
name_filter,
sql_filter,
type_filter,
database_id,
semantic_layer_uuid,
)

View File

@@ -126,7 +126,11 @@ class GetExploreCommand(BaseCommand, ABC):
security_manager.raise_for_access(datasource=datasource)
viz_type = form_data.get("viz_type")
if not viz_type and datasource and datasource.default_endpoint:
if (
not viz_type
and datasource
and getattr(datasource, "default_endpoint", None)
):
raise WrongEndpointError(redirect=datasource.default_endpoint)
form_data["datasource"] = (

View File

@@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@@ -0,0 +1,104 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from functools import partial
from typing import Any
from flask_appbuilder.models.sqla import Model
from sqlalchemy.exc import SQLAlchemyError
from superset.commands.base import BaseCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerCreateFailedError,
SemanticLayerInvalidError,
SemanticLayerNotFoundError,
SemanticViewCreateFailedError,
)
from superset.daos.semantic_layer import SemanticLayerDAO, SemanticViewDAO
from superset.semantic_layers.registry import registry
from superset.utils import json
from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__)
class CreateSemanticLayerCommand(BaseCommand):
def __init__(self, data: dict[str, Any]):
self._properties = data.copy()
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError, ValueError),
reraise=SemanticLayerCreateFailedError,
)
)
def run(self) -> Model:
self.validate()
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticLayerDAO.create(attributes=self._properties)
def validate(self) -> None:
sl_type = self._properties.get("type")
if sl_type not in registry:
raise SemanticLayerInvalidError(f"Unknown type: {sl_type}")
name: str = self._properties.get("name", "")
if not SemanticLayerDAO.validate_uniqueness(name):
raise SemanticLayerInvalidError(f"Name already exists: {name}")
# Validate configuration against the plugin
cls = registry[sl_type]
cls.from_configuration(self._properties["configuration"])
class CreateSemanticViewCommand(BaseCommand):
def __init__(self, data: dict[str, Any]):
self._properties = data.copy()
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError, ValueError),
reraise=SemanticViewCreateFailedError,
)
)
def run(self) -> Model:
self.validate()
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticViewDAO.create(attributes=self._properties)
def validate(self) -> None:
layer_uuid: str = self._properties.get("semantic_layer_uuid", "")
if not SemanticLayerDAO.find_by_uuid(layer_uuid):
raise SemanticLayerNotFoundError()
name: str = self._properties.get("name", "")
configuration: dict[str, Any] = self._properties.get("configuration") or {}
if not SemanticViewDAO.validate_uniqueness(name, layer_uuid, configuration):
raise ValueError(
f"Semantic view '{name}' already exists for this layer"
" and configuration"
)

View File

@@ -0,0 +1,115 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from functools import partial
from sqlalchemy.exc import SQLAlchemyError
from superset import security_manager
from superset.commands.base import BaseCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerDeleteFailedError,
SemanticLayerNotFoundError,
SemanticViewDeleteFailedError,
SemanticViewForbiddenError,
SemanticViewNotFoundError,
)
from superset.daos.semantic_layer import SemanticLayerDAO, SemanticViewDAO
from superset.exceptions import SupersetSecurityException
from superset.semantic_layers.models import SemanticLayer, SemanticView
from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__)
class DeleteSemanticLayerCommand(BaseCommand):
def __init__(self, uuid: str):
self._uuid = uuid
self._model: SemanticLayer | None = None
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError,),
reraise=SemanticLayerDeleteFailedError,
)
)
def run(self) -> None:
self.validate()
assert self._model
SemanticLayerDAO.delete([self._model])
def validate(self) -> None:
self._model = SemanticLayerDAO.find_by_uuid(self._uuid)
if not self._model:
raise SemanticLayerNotFoundError()
class DeleteSemanticViewCommand(BaseCommand):
def __init__(self, pk: int):
self._pk = pk
self._model: SemanticView | None = None
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError,),
reraise=SemanticViewDeleteFailedError,
)
)
def run(self) -> None:
self.validate()
assert self._model
SemanticViewDAO.delete([self._model])
def validate(self) -> None:
self._model = SemanticViewDAO.find_by_id(self._pk, id_column="id")
if not self._model:
raise SemanticViewNotFoundError()
try:
security_manager.raise_for_ownership(self._model)
except SupersetSecurityException as ex:
raise SemanticViewForbiddenError() from ex
class BulkDeleteSemanticViewCommand(BaseCommand):
def __init__(self, model_ids: list[int]):
self._model_ids = model_ids
self._models: list[SemanticView] = []
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError,),
reraise=SemanticViewDeleteFailedError,
)
)
def run(self) -> None:
self.validate()
SemanticViewDAO.delete(self._models)
def validate(self) -> None:
self._models = SemanticViewDAO.find_by_ids(self._model_ids, id_column="id")
if len(self._models) != len(self._model_ids):
raise SemanticViewNotFoundError()
for model in self._models:
try:
security_manager.raise_for_ownership(model)
except SupersetSecurityException as ex:
raise SemanticViewForbiddenError() from ex

View File

@@ -0,0 +1,76 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask_babel import lazy_gettext as _
from superset.commands.exceptions import (
CommandException,
CommandInvalidError,
CreateFailedError,
DeleteFailedError,
ForbiddenError,
UpdateFailedError,
)
class SemanticViewNotFoundError(CommandException):
status = 404
message = _("Semantic view does not exist")
class SemanticViewForbiddenError(ForbiddenError):
message = _("Changing this semantic view is forbidden")
class SemanticViewInvalidError(CommandInvalidError):
message = _("Semantic view parameters are invalid.")
class SemanticViewUpdateFailedError(UpdateFailedError):
message = _("Semantic view could not be updated.")
class SemanticLayerNotFoundError(CommandException):
status = 404
message = _("Semantic layer does not exist")
class SemanticLayerForbiddenError(ForbiddenError):
message = _("Changing this semantic layer is forbidden")
class SemanticLayerInvalidError(CommandInvalidError):
message = _("Semantic layer parameters are invalid.")
class SemanticLayerCreateFailedError(CreateFailedError):
message = _("Semantic layer could not be created.")
class SemanticLayerUpdateFailedError(UpdateFailedError):
message = _("Semantic layer could not be updated.")
class SemanticLayerDeleteFailedError(DeleteFailedError):
message = _("Semantic layer could not be deleted.")
class SemanticViewCreateFailedError(CreateFailedError):
message = _("Semantic view could not be created.")
class SemanticViewDeleteFailedError(DeleteFailedError):
message = _("Semantic view could not be deleted.")

View File

@@ -0,0 +1,126 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from functools import partial
from typing import Any
from flask_appbuilder.models.sqla import Model
from sqlalchemy.exc import SQLAlchemyError
from superset import security_manager
from superset.commands.base import BaseCommand
from superset.commands.semantic_layer.exceptions import (
SemanticLayerInvalidError,
SemanticLayerNotFoundError,
SemanticLayerUpdateFailedError,
SemanticViewForbiddenError,
SemanticViewNotFoundError,
SemanticViewUpdateFailedError,
)
from superset.daos.semantic_layer import SemanticLayerDAO, SemanticViewDAO
from superset.exceptions import SupersetSecurityException
from superset.semantic_layers.models import SemanticLayer, SemanticView
from superset.semantic_layers.registry import registry
from superset.utils import json
from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__)
class UpdateSemanticViewCommand(BaseCommand):
def __init__(self, model_id: int, data: dict[str, Any]):
self._model_id = model_id
self._properties = data.copy()
self._model: SemanticView | None = None
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError, ValueError),
reraise=SemanticViewUpdateFailedError,
)
)
def run(self) -> Model:
self.validate()
assert self._model
return SemanticViewDAO.update(self._model, attributes=self._properties)
def validate(self) -> None:
self._model = SemanticViewDAO.find_by_id(self._model_id)
if not self._model:
raise SemanticViewNotFoundError()
try:
security_manager.raise_for_ownership(self._model)
except SupersetSecurityException as ex:
raise SemanticViewForbiddenError() from ex
name = self._properties.get("name", self._model.name)
layer_uuid = str(self._model.semantic_layer_uuid)
configuration = self._properties.get(
"configuration",
json.loads(self._model.configuration),
)
if not SemanticViewDAO.validate_update_uniqueness(
view_uuid=str(self._model.uuid),
name=name,
layer_uuid=layer_uuid,
configuration=configuration,
):
raise ValueError(
f"A semantic view with name '{name}' and the same "
"configuration already exists in this semantic layer."
)
class UpdateSemanticLayerCommand(BaseCommand):
def __init__(self, uuid: str, data: dict[str, Any]):
self._uuid = uuid
self._properties = data.copy()
self._model: SemanticLayer | None = None
@transaction(
on_error=partial(
on_error,
catches=(SQLAlchemyError, ValueError),
reraise=SemanticLayerUpdateFailedError,
)
)
def run(self) -> Model:
self.validate()
assert self._model
if isinstance(self._properties.get("configuration"), dict):
self._properties["configuration"] = json.dumps(
self._properties["configuration"]
)
return SemanticLayerDAO.update(self._model, attributes=self._properties)
def validate(self) -> None:
self._model = SemanticLayerDAO.find_by_uuid(self._uuid)
if not self._model:
raise SemanticLayerNotFoundError()
name = self._properties.get("name")
if name and not SemanticLayerDAO.validate_update_uniqueness(self._uuid, name):
raise SemanticLayerInvalidError(f"Name already exists: {name}")
if configuration := self._properties.get("configuration"):
sl_type = self._model.type
cls = registry[sl_type]
cls.from_configuration(configuration)

View File

@@ -55,7 +55,11 @@ from superset.constants import CHANGE_ME_SECRET_KEY
from superset.jinja_context import BaseTemplateProcessor
from superset.key_value.types import JsonKeyValueCodec
from superset.stats_logger import DummyStatsLogger
from superset.superset_typing import CacheConfig
from superset.superset_typing import (
CacheConfig,
DBConnectionMutator,
EngineContextManager,
)
from superset.tasks.types import ExecutorType
from superset.themes.types import Theme
from superset.utils import core as utils
@@ -573,6 +577,9 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
# can_copy_clipboard) instead of the single can_csv permission
# @lifecycle: development
"GRANULAR_EXPORT_CONTROLS": False,
# Enable semantic layers and show semantic views alongside datasets
# @lifecycle: development
"SEMANTIC_LAYERS": False,
# Enables advanced data type support
# @lifecycle: development
"ENABLE_ADVANCED_DATA_TYPES": False,
@@ -828,7 +835,6 @@ DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
# FIREWALL (only port 22 is open)
# ----------------------------------------------------------------------
SSH_TUNNEL_MANAGER_CLASS = "superset.extensions.ssh.SSHManager"
SSH_TUNNEL_LOCAL_BIND_ADDRESS = "127.0.0.1"
#: Timeout (seconds) for tunnel connection (open_channel timeout)
SSH_TUNNEL_TIMEOUT_SEC = 10.0
@@ -1717,7 +1723,7 @@ def engine_context_manager( # pylint: disable=unused-argument
yield None
ENGINE_CONTEXT_MANAGER = engine_context_manager
ENGINE_CONTEXT_MANAGER: EngineContextManager = engine_context_manager
# A callable that allows altering the database connection URL and params
# on the fly, at runtime. This allows for things like impersonation or
@@ -1734,7 +1740,7 @@ ENGINE_CONTEXT_MANAGER = engine_context_manager
#
# Note that the returned uri and params are passed directly to sqlalchemy's
# as such `create_engine(url, **params)`
DB_CONNECTION_MUTATOR = None
DB_CONNECTION_MUTATOR: DBConnectionMutator | None = None
# A callable that is invoked for every invocation of DB Engine Specs

View File

@@ -108,6 +108,8 @@ from superset.sql.parse import Table
from superset.superset_typing import (
AdhocColumn,
AdhocMetric,
DatasetColumnData,
DatasetMetricData,
ExplorableData,
Metric,
QueryObjectDict,
@@ -268,6 +270,26 @@ class BaseDatasource(
# Check if all requested columns are drillable
return set(column_names).issubset(drillable_columns)
def get_compatible_metrics(
self,
selected_metrics: list[str],
selected_dimensions: list[str],
) -> list[str]:
"""
SQL datasets have no compatibility constraints — return all metrics.
"""
return [m.metric_name for m in self.metrics]
def get_compatible_dimensions(
self,
selected_metrics: list[str],
selected_dimensions: list[str],
) -> list[str]:
"""
SQL datasets have no compatibility constraints — return all columns.
"""
return [c.column_name for c in self.columns]
def get_time_grains(self) -> list[TimeGrainDict]:
"""
Get available time granularities from the database.
@@ -448,6 +470,7 @@ class BaseDatasource(
"column_formats": self.column_formats,
"description": self.description,
"database": self.database.data, # pylint: disable=no-member
"parent": {"name": self.database.data["name"]}, # pylint: disable=no-member
"default_endpoint": self.default_endpoint,
"filter_select": self.filter_select_enabled, # TODO deprecate
"filter_select_enabled": self.filter_select_enabled,
@@ -465,8 +488,8 @@ class BaseDatasource(
# sqla-specific
"sql": self.sql,
# one to many
"columns": [o.data for o in self.columns],
"metrics": [o.data for o in self.metrics],
"columns": [cast(DatasetColumnData, o.data) for o in self.columns],
"metrics": [cast(DatasetMetricData, o.data) for o in self.metrics],
"folders": self.folders,
# TODO deprecate, move logic to JS
"order_by_choices": self.order_by_choices,

View File

@@ -229,6 +229,40 @@ def inject_model_session_implementation() -> None:
core_models_module.get_session = get_session
def inject_semantic_layer_implementations() -> None:
"""
Replace abstract semantic layer decorator in
superset_core.semantic_layers.decorators with a concrete implementation
that registers classes in the contributions registry.
"""
import superset_core.semantic_layers.decorators as core_sl_module
import superset.extensions.context as context_module
from superset.semantic_layers.registry import registry
def semantic_layer_impl(
id: str,
name: str,
description: str | None = None,
) -> Callable[[Any], Any]:
def decorator(cls: Any) -> Any:
if context := context_module.get_current_extension_context():
manifest = context.manifest
prefixed_id = f"extensions.{manifest.publisher}.{manifest.name}.{id}"
else:
prefixed_id = id
cls.name = name
cls.description = description
cls._semantic_layer_id = prefixed_id
registry[prefixed_id] = cls
return cls
return decorator
core_sl_module.semantic_layer = semantic_layer_impl # type: ignore[assignment]
def initialize_core_api_dependencies() -> None:
"""
Initialize all dependency injections for the superset-core API.
@@ -242,3 +276,4 @@ def initialize_core_api_dependencies() -> None:
inject_query_implementations()
inject_task_implementations()
inject_rest_api_implementations()
inject_semantic_layer_implementations()

Some files were not shown because too many files have changed in this diff Show More