mirror of
https://github.com/apache/superset.git
synced 2026-04-14 05:34:38 +00:00
fix(mcp): batch fix for execute_sql crashes, null timestamps, and deck.gl errors (#38977)
This commit is contained in:
@@ -25,6 +25,7 @@ import difflib
|
||||
from datetime import datetime, timezone
|
||||
from typing import Annotated, Any, Dict, List, Literal, Protocol
|
||||
|
||||
import humanize
|
||||
from pydantic import (
|
||||
AliasChoices,
|
||||
AliasPath,
|
||||
@@ -272,6 +273,13 @@ class GetChartInfoRequest(BaseModel):
|
||||
return self
|
||||
|
||||
|
||||
def _humanize_timestamp(dt: datetime | None) -> str | None:
|
||||
"""Convert a datetime to a humanized string like '2 hours ago'."""
|
||||
if dt is None:
|
||||
return None
|
||||
return humanize.naturaltime(datetime.now() - dt)
|
||||
|
||||
|
||||
def serialize_chart_object(chart: ChartLike | None) -> ChartInfo | None:
|
||||
if not chart:
|
||||
return None
|
||||
@@ -297,11 +305,11 @@ def serialize_chart_object(chart: ChartLike | None) -> ChartInfo | None:
|
||||
or (str(chart.changed_by) if getattr(chart, "changed_by", None) else None),
|
||||
changed_by_name=getattr(chart, "changed_by_name", None),
|
||||
changed_on=getattr(chart, "changed_on", None),
|
||||
changed_on_humanized=getattr(chart, "changed_on_humanized", None),
|
||||
changed_on_humanized=_humanize_timestamp(getattr(chart, "changed_on", None)),
|
||||
created_by=getattr(chart, "created_by_name", None)
|
||||
or (str(chart.created_by) if getattr(chart, "created_by", None) else None),
|
||||
created_on=getattr(chart, "created_on", None),
|
||||
created_on_humanized=getattr(chart, "created_on_humanized", None),
|
||||
created_on_humanized=_humanize_timestamp(getattr(chart, "created_on", None)),
|
||||
uuid=str(getattr(chart, "uuid", "")) if getattr(chart, "uuid", None) else None,
|
||||
tags=[
|
||||
TagInfo.model_validate(tag, from_attributes=True)
|
||||
|
||||
@@ -369,6 +369,29 @@ async def get_chart_data( # noqa: C901
|
||||
# Bubble charts use x/y/size as separate metric fields.
|
||||
viz_type = chart.viz_type or ""
|
||||
|
||||
# Deck.gl chart types store spatial data (lat/lon)
|
||||
# rather than traditional metrics/groupby. They
|
||||
# require a saved query_context to retrieve data.
|
||||
# Match by prefix to cover all current and future
|
||||
# deck.gl viz types (deck_arc, deck_scatter, etc.).
|
||||
if viz_type.startswith("deck_"):
|
||||
await ctx.warning(
|
||||
"Chart %s is a deck.gl visualization (%s) with no "
|
||||
"saved query_context. Data retrieval requires "
|
||||
"re-saving the chart in Superset." % (chart.id, viz_type)
|
||||
)
|
||||
return ChartError(
|
||||
error=(
|
||||
f"Chart {chart.id} is a deck.gl visualization "
|
||||
f"(type: {viz_type}) with no saved query_context. "
|
||||
f"Deck.gl charts use spatial data (lat/lon) that "
|
||||
f"cannot be reconstructed from form_data alone. "
|
||||
f"Please open this chart in Superset and re-save "
|
||||
f"it to generate a query_context."
|
||||
),
|
||||
error_type="MissingQueryContext",
|
||||
)
|
||||
|
||||
singular_metric_no_groupby = (
|
||||
"big_number",
|
||||
"big_number_total",
|
||||
|
||||
@@ -47,6 +47,7 @@ DEFAULT_CHART_COLUMNS = [
|
||||
"slice_name",
|
||||
"viz_type",
|
||||
"url",
|
||||
"changed_on",
|
||||
"changed_on_humanized",
|
||||
]
|
||||
|
||||
|
||||
@@ -68,6 +68,7 @@ from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from typing import Annotated, Any, Dict, List, Literal, TYPE_CHECKING
|
||||
|
||||
import humanize
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
@@ -515,6 +516,13 @@ def dashboard_serializer(dashboard: "Dashboard") -> DashboardInfo:
|
||||
)
|
||||
|
||||
|
||||
def _humanize_timestamp(dt: datetime | None) -> str | None:
|
||||
"""Convert a datetime to a humanized string like '2 hours ago'."""
|
||||
if dt is None:
|
||||
return None
|
||||
return humanize.naturaltime(datetime.now() - dt)
|
||||
|
||||
|
||||
def serialize_dashboard_object(dashboard: Any) -> DashboardInfo:
|
||||
"""Simple dashboard serializer that safely handles object attributes."""
|
||||
from superset.mcp_service.utils.url_utils import get_superset_base_url
|
||||
@@ -537,10 +545,14 @@ def serialize_dashboard_object(dashboard: Any) -> DashboardInfo:
|
||||
published=getattr(dashboard, "published", None),
|
||||
changed_by=getattr(dashboard, "changed_by_name", None),
|
||||
changed_on=getattr(dashboard, "changed_on", None),
|
||||
changed_on_humanized=getattr(dashboard, "changed_on_humanized", None),
|
||||
changed_on_humanized=_humanize_timestamp(
|
||||
getattr(dashboard, "changed_on", None)
|
||||
),
|
||||
created_by=getattr(dashboard, "created_by_name", None),
|
||||
created_on=getattr(dashboard, "created_on", None),
|
||||
created_on_humanized=getattr(dashboard, "created_on_humanized", None),
|
||||
created_on_humanized=_humanize_timestamp(
|
||||
getattr(dashboard, "created_on", None)
|
||||
),
|
||||
description=getattr(dashboard, "description", None),
|
||||
css=getattr(dashboard, "css", None),
|
||||
certified_by=getattr(dashboard, "certified_by", None),
|
||||
|
||||
@@ -49,6 +49,7 @@ DEFAULT_DASHBOARD_COLUMNS = [
|
||||
"dashboard_title",
|
||||
"slug",
|
||||
"url",
|
||||
"changed_on",
|
||||
"changed_on_humanized",
|
||||
]
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ from __future__ import annotations
|
||||
from datetime import datetime
|
||||
from typing import Annotated, Any, Dict, List, Literal
|
||||
|
||||
import humanize
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
@@ -307,6 +308,13 @@ def _parse_json_field(obj: Any, field_name: str) -> Dict[str, Any] | None:
|
||||
return value
|
||||
|
||||
|
||||
def _humanize_timestamp(dt: datetime | None) -> str | None:
|
||||
"""Convert a datetime to a humanized string like '2 hours ago'."""
|
||||
if dt is None:
|
||||
return None
|
||||
return humanize.naturaltime(datetime.now() - dt)
|
||||
|
||||
|
||||
def serialize_dataset_object(dataset: Any) -> DatasetInfo | None:
|
||||
if not dataset:
|
||||
return None
|
||||
@@ -349,11 +357,11 @@ def serialize_dataset_object(dataset: Any) -> DatasetInfo | None:
|
||||
changed_by=getattr(dataset, "changed_by_name", None)
|
||||
or (str(dataset.changed_by) if getattr(dataset, "changed_by", None) else None),
|
||||
changed_on=getattr(dataset, "changed_on", None),
|
||||
changed_on_humanized=getattr(dataset, "changed_on_humanized", None),
|
||||
changed_on_humanized=_humanize_timestamp(getattr(dataset, "changed_on", None)),
|
||||
created_by=getattr(dataset, "created_by_name", None)
|
||||
or (str(dataset.created_by) if getattr(dataset, "created_by", None) else None),
|
||||
created_on=getattr(dataset, "created_on", None),
|
||||
created_on_humanized=getattr(dataset, "created_on_humanized", None),
|
||||
created_on_humanized=_humanize_timestamp(getattr(dataset, "created_on", None)),
|
||||
tags=[
|
||||
TagInfo.model_validate(tag, from_attributes=True)
|
||||
for tag in getattr(dataset, "tags", [])
|
||||
|
||||
@@ -48,6 +48,7 @@ DEFAULT_DATASET_COLUMNS = [
|
||||
"id",
|
||||
"table_name",
|
||||
"schema",
|
||||
"changed_on",
|
||||
"changed_on_humanized",
|
||||
]
|
||||
|
||||
|
||||
@@ -36,8 +36,7 @@ from superset_core.queries.types import (
|
||||
QueryStatus,
|
||||
)
|
||||
|
||||
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
||||
from superset.exceptions import SupersetErrorException, SupersetSecurityException
|
||||
from superset.errors import SupersetErrorType
|
||||
from superset.extensions import event_logger
|
||||
from superset.mcp_service.sql_lab.schemas import (
|
||||
ColumnInfo,
|
||||
@@ -91,21 +90,23 @@ async def execute_sql(request: ExecuteSqlRequest, ctx: Context) -> ExecuteSqlRes
|
||||
db.session.query(Database).filter_by(id=request.database_id).first()
|
||||
)
|
||||
if not database:
|
||||
raise SupersetErrorException(
|
||||
SupersetError(
|
||||
message=f"Database with ID {request.database_id} not found",
|
||||
error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR,
|
||||
level=ErrorLevel.ERROR,
|
||||
)
|
||||
await ctx.error(
|
||||
"Database not found: database_id=%s" % request.database_id
|
||||
)
|
||||
return ExecuteSqlResponse(
|
||||
success=False,
|
||||
error=f"Database with ID {request.database_id} not found",
|
||||
error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR.value,
|
||||
)
|
||||
|
||||
if not security_manager.can_access_database(database):
|
||||
raise SupersetSecurityException(
|
||||
SupersetError(
|
||||
message=(f"Access denied to database {database.database_name}"),
|
||||
error_type=SupersetErrorType.DATABASE_SECURITY_ACCESS_ERROR,
|
||||
level=ErrorLevel.ERROR,
|
||||
)
|
||||
await ctx.error(
|
||||
"Access denied to database: %s" % database.database_name
|
||||
)
|
||||
return ExecuteSqlResponse(
|
||||
success=False,
|
||||
error=f"Access denied to database {database.database_name}",
|
||||
error_type=SupersetErrorType.DATABASE_SECURITY_ACCESS_ERROR.value,
|
||||
)
|
||||
|
||||
# 2. Build QueryOptions and execute query
|
||||
|
||||
@@ -1236,6 +1236,7 @@ class TestDatasetDefaultColumnFiltering:
|
||||
"id",
|
||||
"table_name",
|
||||
"schema",
|
||||
"changed_on",
|
||||
"changed_on_humanized",
|
||||
}
|
||||
|
||||
@@ -1319,7 +1320,13 @@ class TestDatasetDefaultColumnFiltering:
|
||||
dataset_item = data["datasets"][0]
|
||||
|
||||
# Verify ONLY default columns are present in the response item
|
||||
expected_keys = {"id", "table_name", "schema", "changed_on_humanized"}
|
||||
expected_keys = {
|
||||
"id",
|
||||
"table_name",
|
||||
"schema",
|
||||
"changed_on",
|
||||
"changed_on_humanized",
|
||||
}
|
||||
actual_keys = set(dataset_item.keys())
|
||||
|
||||
# The response should only contain the default columns, NOT all columns
|
||||
@@ -1335,7 +1342,6 @@ class TestDatasetDefaultColumnFiltering:
|
||||
"description",
|
||||
"database_name",
|
||||
"changed_by",
|
||||
"changed_on",
|
||||
"columns",
|
||||
"metrics",
|
||||
]
|
||||
|
||||
@@ -237,7 +237,7 @@ class TestExecuteSql:
|
||||
mock_security_manager, # noqa: PT019
|
||||
mcp_server,
|
||||
):
|
||||
"""Test error when database is not found."""
|
||||
"""Test graceful error when database is not found."""
|
||||
# mock_security_manager is patched but not used (error happens first)
|
||||
del mock_security_manager # Silence unused variable warning
|
||||
mock_db.session.query.return_value.filter_by.return_value.first.return_value = (
|
||||
@@ -251,8 +251,10 @@ class TestExecuteSql:
|
||||
}
|
||||
|
||||
async with Client(mcp_server) as client:
|
||||
with pytest.raises(ToolError, match="Database with ID 999 not found"):
|
||||
await client.call_tool("execute_sql", {"request": request})
|
||||
result = await client.call_tool("execute_sql", {"request": request})
|
||||
data = result.structured_content
|
||||
assert data["success"] is False
|
||||
assert "Database with ID 999 not found" in data["error"]
|
||||
|
||||
@patch("superset.security_manager", new_callable=MagicMock)
|
||||
@patch("superset.db")
|
||||
@@ -274,8 +276,10 @@ class TestExecuteSql:
|
||||
}
|
||||
|
||||
async with Client(mcp_server) as client:
|
||||
with pytest.raises(ToolError, match="Access denied to database"):
|
||||
await client.call_tool("execute_sql", {"request": request})
|
||||
result = await client.call_tool("execute_sql", {"request": request})
|
||||
data = result.structured_content
|
||||
assert data["success"] is False
|
||||
assert "Access denied to database" in data["error"]
|
||||
|
||||
@patch("superset.security_manager")
|
||||
@patch("superset.db")
|
||||
|
||||
Reference in New Issue
Block a user