fix: dataset update with invalid SQL query (#35543)

(cherry picked from commit 50a5854b25)
This commit is contained in:
Daniel Vaz Gaspar
2025-10-08 12:22:38 +01:00
committed by Michael S. Molina
parent 102df5f4d3
commit 2eb9f4ca08
2 changed files with 95 additions and 32 deletions

View File

@@ -33,6 +33,7 @@ import {
QueryResponse, QueryResponse,
QueryFormData, QueryFormData,
VizType, VizType,
getClientErrorObject,
} from '@superset-ui/core'; } from '@superset-ui/core';
import { useSelector, useDispatch } from 'react-redux'; import { useSelector, useDispatch } from 'react-redux';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
@@ -204,39 +205,50 @@ export const SaveDatasetModal = ({
} }
setLoading(true); setLoading(true);
const [, key] = await Promise.all([ try {
updateDataset( const [, key] = await Promise.all([
datasource?.dbId, updateDataset(
datasetToOverwrite?.datasetid, datasource?.dbId,
datasource?.sql, datasetToOverwrite?.datasetid,
datasource?.columns?.map( datasource?.sql,
(d: { column_name: string; type: string; is_dttm: boolean }) => ({ datasource?.columns?.map(
column_name: d.column_name, (d: { column_name: string; type: string; is_dttm: boolean }) => ({
type: d.type, column_name: d.column_name,
is_dttm: d.is_dttm, type: d.type,
}), is_dttm: d.is_dttm,
}),
),
datasetToOverwrite?.owners?.map((o: DatasetOwner) => o.id),
true,
), ),
datasetToOverwrite?.owners?.map((o: DatasetOwner) => o.id), postFormData(datasetToOverwrite.datasetid, 'table', {
true, ...formDataWithDefaults,
), datasource: `${datasetToOverwrite.datasetid}__table`,
postFormData(datasetToOverwrite.datasetid, 'table', { ...(defaultVizType === VizType.Table && {
...formDataWithDefaults, all_columns: datasource?.columns?.map(column => column.column_name),
datasource: `${datasetToOverwrite.datasetid}__table`, }),
...(defaultVizType === VizType.Table && {
all_columns: datasource?.columns?.map(column => column.column_name),
}), }),
}), ]);
]); setLoading(false);
setLoading(false);
const url = mountExploreUrl(null, { const url = mountExploreUrl(null, {
[URL_PARAMS.formDataKey.name]: key, [URL_PARAMS.formDataKey.name]: key,
}); });
createWindow(url); createWindow(url);
setShouldOverwriteDataset(false); setShouldOverwriteDataset(false);
setDatasetName(getDefaultDatasetName()); setDatasetName(getDefaultDatasetName());
onHide(); onHide();
} catch (error) {
setLoading(false);
getClientErrorObject(error).then((e: { error: string }) => {
dispatch(
addDangerToast(
e.error || t('An error occurred while overwriting the dataset'),
),
);
});
}
}; };
const loadDatasetOverwriteOptions = useCallback( const loadDatasetOverwriteOptions = useCallback(

View File

@@ -17,7 +17,7 @@
import logging import logging
from collections import Counter from collections import Counter
from functools import partial from functools import partial
from typing import Any, Optional from typing import Any, cast, Optional
from flask_appbuilder.models.sqla import Model from flask_appbuilder.models.sqla import Model
from marshmallow import ValidationError from marshmallow import ValidationError
@@ -27,9 +27,11 @@ from superset import security_manager
from superset.commands.base import BaseCommand, UpdateMixin from superset.commands.base import BaseCommand, UpdateMixin
from superset.commands.dataset.exceptions import ( from superset.commands.dataset.exceptions import (
DatabaseChangeValidationError, DatabaseChangeValidationError,
DatabaseNotFoundValidationError,
DatasetColumnNotFoundValidationError, DatasetColumnNotFoundValidationError,
DatasetColumnsDuplicateValidationError, DatasetColumnsDuplicateValidationError,
DatasetColumnsExistsValidationError, DatasetColumnsExistsValidationError,
DatasetDataAccessIsNotAllowed,
DatasetExistsValidationError, DatasetExistsValidationError,
DatasetForbiddenError, DatasetForbiddenError,
DatasetInvalidError, DatasetInvalidError,
@@ -41,8 +43,9 @@ from superset.commands.dataset.exceptions import (
) )
from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.models import SqlaTable
from superset.daos.dataset import DatasetDAO from superset.daos.dataset import DatasetDAO
from superset.exceptions import SupersetSecurityException from superset.exceptions import SupersetParseError, SupersetSecurityException
from superset.sql_parse import Table from superset.models.core import Database
from superset.sql.parse import Table
from superset.utils.decorators import on_error, transaction from superset.utils.decorators import on_error, transaction
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -99,12 +102,27 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
self._model.database.get_default_catalog() self._model.database.get_default_catalog()
) )
new_db_connection: Database | None = None
if database_id and database_id != self._model.database.id:
if new_db_connection := DatasetDAO.get_database_by_id(database_id):
self._properties["database"] = new_db_connection
else:
exceptions.append(DatabaseNotFoundValidationError())
db = new_db_connection or self._model.database
table = Table( table = Table(
self._properties.get("table_name"), # type: ignore self._properties.get("table_name"), # type: ignore
self._properties.get("schema"), self._properties.get("schema"),
catalog, catalog,
) )
schema = (
self._properties["schema"]
if "schema" in self._properties
else self._model.schema
)
# Validate uniqueness # Validate uniqueness
if not DatasetDAO.validate_update_uniqueness( if not DatasetDAO.validate_update_uniqueness(
self._model.database, self._model.database,
@@ -127,6 +145,9 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
except ValidationError as ex: except ValidationError as ex:
exceptions.append(ex) exceptions.append(ex)
# Validate SQL access
self._validate_sql_access(db, catalog, schema, exceptions)
# Validate columns # Validate columns
if columns := self._properties.get("columns"): if columns := self._properties.get("columns"):
self._validate_columns(columns, exceptions) self._validate_columns(columns, exceptions)
@@ -138,6 +159,36 @@ class UpdateDatasetCommand(UpdateMixin, BaseCommand):
if exceptions: if exceptions:
raise DatasetInvalidError(exceptions=exceptions) raise DatasetInvalidError(exceptions=exceptions)
def _validate_sql_access(
self,
db: Database,
catalog: str | None,
schema: str | None,
exceptions: list[ValidationError],
) -> None:
"""Validate SQL query access if SQL is being updated."""
# we know we have a valid model
self._model = cast(SqlaTable, self._model)
sql = self._properties.get("sql")
if sql and sql != self._model.sql:
try:
security_manager.raise_for_access(
database=db,
sql=sql,
catalog=catalog,
schema=schema,
)
except SupersetSecurityException as ex:
exceptions.append(DatasetDataAccessIsNotAllowed(ex.error.message))
except SupersetParseError as ex:
exceptions.append(
ValidationError(
f"Invalid SQL: {ex.error.message}",
field_name="sql",
)
)
def _validate_columns( def _validate_columns(
self, columns: list[dict[str, Any]], exceptions: list[ValidationError] self, columns: list[dict[str, Any]], exceptions: list[ValidationError]
) -> None: ) -> None: