mirror of
https://github.com/apache/superset.git
synced 2026-04-19 16:14:52 +00:00
chore(command): Organize Commands according to SIP-92 (#25850)
This commit is contained in:
@@ -30,17 +30,10 @@ from flask_babel import ngettext
|
||||
from marshmallow import ValidationError
|
||||
|
||||
from superset import event_logger, is_feature_enabled
|
||||
from superset.commands.exceptions import CommandException
|
||||
from superset.commands.importers.exceptions import NoValidFilesFoundError
|
||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.databases.filters import DatabaseFilter
|
||||
from superset.datasets.commands.create import CreateDatasetCommand
|
||||
from superset.datasets.commands.delete import DeleteDatasetCommand
|
||||
from superset.datasets.commands.duplicate import DuplicateDatasetCommand
|
||||
from superset.datasets.commands.exceptions import (
|
||||
from superset.commands.dataset.create import CreateDatasetCommand
|
||||
from superset.commands.dataset.delete import DeleteDatasetCommand
|
||||
from superset.commands.dataset.duplicate import DuplicateDatasetCommand
|
||||
from superset.commands.dataset.exceptions import (
|
||||
DatasetCreateFailedError,
|
||||
DatasetDeleteFailedError,
|
||||
DatasetForbiddenError,
|
||||
@@ -49,11 +42,18 @@ from superset.datasets.commands.exceptions import (
|
||||
DatasetRefreshFailedError,
|
||||
DatasetUpdateFailedError,
|
||||
)
|
||||
from superset.datasets.commands.export import ExportDatasetsCommand
|
||||
from superset.datasets.commands.importers.dispatcher import ImportDatasetsCommand
|
||||
from superset.datasets.commands.refresh import RefreshDatasetCommand
|
||||
from superset.datasets.commands.update import UpdateDatasetCommand
|
||||
from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand
|
||||
from superset.commands.dataset.export import ExportDatasetsCommand
|
||||
from superset.commands.dataset.importers.dispatcher import ImportDatasetsCommand
|
||||
from superset.commands.dataset.refresh import RefreshDatasetCommand
|
||||
from superset.commands.dataset.update import UpdateDatasetCommand
|
||||
from superset.commands.dataset.warm_up_cache import DatasetWarmUpCacheCommand
|
||||
from superset.commands.exceptions import CommandException
|
||||
from superset.commands.importers.exceptions import NoValidFilesFoundError
|
||||
from superset.commands.importers.v1.utils import get_contents_from_bundle
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.databases.filters import DatabaseFilter
|
||||
from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter
|
||||
from superset.datasets.schemas import (
|
||||
DatasetCacheWarmUpRequestSchema,
|
||||
|
||||
@@ -20,14 +20,14 @@ from flask import Response
|
||||
from flask_appbuilder.api import expose, permission_name, protect, safe
|
||||
from flask_appbuilder.models.sqla.interface import SQLAInterface
|
||||
|
||||
from superset.connectors.sqla.models import TableColumn
|
||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
|
||||
from superset.datasets.columns.commands.delete import DeleteDatasetColumnCommand
|
||||
from superset.datasets.columns.commands.exceptions import (
|
||||
from superset.commands.dataset.columns.delete import DeleteDatasetColumnCommand
|
||||
from superset.commands.dataset.columns.exceptions import (
|
||||
DatasetColumnDeleteFailedError,
|
||||
DatasetColumnForbiddenError,
|
||||
DatasetColumnNotFoundError,
|
||||
)
|
||||
from superset.connectors.sqla.models import TableColumn
|
||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
|
||||
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,60 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from superset import security_manager
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.connectors.sqla.models import TableColumn
|
||||
from superset.daos.dataset import DatasetColumnDAO, DatasetDAO
|
||||
from superset.daos.exceptions import DAODeleteFailedError
|
||||
from superset.datasets.columns.commands.exceptions import (
|
||||
DatasetColumnDeleteFailedError,
|
||||
DatasetColumnForbiddenError,
|
||||
DatasetColumnNotFoundError,
|
||||
)
|
||||
from superset.exceptions import SupersetSecurityException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DeleteDatasetColumnCommand(BaseCommand):
|
||||
def __init__(self, dataset_id: int, model_id: int):
|
||||
self._dataset_id = dataset_id
|
||||
self._model_id = model_id
|
||||
self._model: Optional[TableColumn] = None
|
||||
|
||||
def run(self) -> None:
|
||||
self.validate()
|
||||
assert self._model
|
||||
|
||||
try:
|
||||
DatasetColumnDAO.delete([self._model])
|
||||
except DAODeleteFailedError as ex:
|
||||
logger.exception(ex.exception)
|
||||
raise DatasetColumnDeleteFailedError() from ex
|
||||
|
||||
def validate(self) -> None:
|
||||
# Validate/populate model exists
|
||||
self._model = DatasetDAO.find_dataset_column(self._dataset_id, self._model_id)
|
||||
if not self._model:
|
||||
raise DatasetColumnNotFoundError()
|
||||
# Check ownership
|
||||
try:
|
||||
security_manager.raise_for_ownership(self._model)
|
||||
except SupersetSecurityException as ex:
|
||||
raise DatasetColumnForbiddenError() from ex
|
||||
@@ -1,31 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from flask_babel import lazy_gettext as _
|
||||
|
||||
from superset.commands.exceptions import CommandException
|
||||
|
||||
|
||||
class DatasetColumnNotFoundError(CommandException):
|
||||
message = _("Dataset column not found.")
|
||||
|
||||
|
||||
class DatasetColumnDeleteFailedError(CommandException):
|
||||
message = _("Dataset column delete failed.")
|
||||
|
||||
|
||||
class DatasetColumnForbiddenError(CommandException):
|
||||
message = _("Changing this dataset is forbidden.")
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,91 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
from flask_appbuilder.models.sqla import Model
|
||||
from marshmallow import ValidationError
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from superset.commands.base import BaseCommand, CreateMixin
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.daos.exceptions import DAOCreateFailedError
|
||||
from superset.datasets.commands.exceptions import (
|
||||
DatabaseNotFoundValidationError,
|
||||
DatasetCreateFailedError,
|
||||
DatasetExistsValidationError,
|
||||
DatasetInvalidError,
|
||||
TableNotFoundValidationError,
|
||||
)
|
||||
from superset.extensions import db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CreateDatasetCommand(CreateMixin, BaseCommand):
|
||||
def __init__(self, data: dict[str, Any]):
|
||||
self._properties = data.copy()
|
||||
|
||||
def run(self) -> Model:
|
||||
self.validate()
|
||||
try:
|
||||
# Creates SqlaTable (Dataset)
|
||||
dataset = DatasetDAO.create(attributes=self._properties, commit=False)
|
||||
|
||||
# Updates columns and metrics from the dataset
|
||||
dataset.fetch_metadata(commit=False)
|
||||
db.session.commit()
|
||||
except (SQLAlchemyError, DAOCreateFailedError) as ex:
|
||||
logger.warning(ex, exc_info=True)
|
||||
db.session.rollback()
|
||||
raise DatasetCreateFailedError() from ex
|
||||
return dataset
|
||||
|
||||
def validate(self) -> None:
|
||||
exceptions: list[ValidationError] = []
|
||||
database_id = self._properties["database"]
|
||||
table_name = self._properties["table_name"]
|
||||
schema = self._properties.get("schema", None)
|
||||
sql = self._properties.get("sql", None)
|
||||
owner_ids: Optional[list[int]] = self._properties.get("owners")
|
||||
|
||||
# Validate uniqueness
|
||||
if not DatasetDAO.validate_uniqueness(database_id, schema, table_name):
|
||||
exceptions.append(DatasetExistsValidationError(table_name))
|
||||
|
||||
# Validate/Populate database
|
||||
database = DatasetDAO.get_database_by_id(database_id)
|
||||
if not database:
|
||||
exceptions.append(DatabaseNotFoundValidationError())
|
||||
self._properties["database"] = database
|
||||
|
||||
# Validate table exists on dataset if sql is not provided
|
||||
# This should be validated when the dataset is physical
|
||||
if (
|
||||
database
|
||||
and not sql
|
||||
and not DatasetDAO.validate_table_exists(database, table_name, schema)
|
||||
):
|
||||
exceptions.append(TableNotFoundValidationError(table_name))
|
||||
|
||||
try:
|
||||
owners = self.populate_owners(owner_ids)
|
||||
self._properties["owners"] = owners
|
||||
except ValidationError as ex:
|
||||
exceptions.append(ex)
|
||||
if exceptions:
|
||||
raise DatasetInvalidError(exceptions=exceptions)
|
||||
@@ -1,60 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from superset import security_manager
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.daos.exceptions import DAODeleteFailedError
|
||||
from superset.datasets.commands.exceptions import (
|
||||
DatasetDeleteFailedError,
|
||||
DatasetForbiddenError,
|
||||
DatasetNotFoundError,
|
||||
)
|
||||
from superset.exceptions import SupersetSecurityException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DeleteDatasetCommand(BaseCommand):
|
||||
def __init__(self, model_ids: list[int]):
|
||||
self._model_ids = model_ids
|
||||
self._models: Optional[list[SqlaTable]] = None
|
||||
|
||||
def run(self) -> None:
|
||||
self.validate()
|
||||
assert self._models
|
||||
|
||||
try:
|
||||
DatasetDAO.delete(self._models)
|
||||
except DAODeleteFailedError as ex:
|
||||
logger.exception(ex.exception)
|
||||
raise DatasetDeleteFailedError() from ex
|
||||
|
||||
def validate(self) -> None:
|
||||
# Validate/populate model exists
|
||||
self._models = DatasetDAO.find_by_ids(self._model_ids)
|
||||
if not self._models or len(self._models) != len(self._model_ids):
|
||||
raise DatasetNotFoundError()
|
||||
# Check ownership
|
||||
for model in self._models:
|
||||
try:
|
||||
security_manager.raise_for_ownership(model)
|
||||
except SupersetSecurityException as ex:
|
||||
raise DatasetForbiddenError() from ex
|
||||
@@ -1,133 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from flask_appbuilder.models.sqla import Model
|
||||
from flask_babel import gettext as __
|
||||
from marshmallow import ValidationError
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from superset.commands.base import BaseCommand, CreateMixin
|
||||
from superset.commands.exceptions import DatasourceTypeInvalidError
|
||||
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.daos.exceptions import DAOCreateFailedError
|
||||
from superset.datasets.commands.exceptions import (
|
||||
DatasetDuplicateFailedError,
|
||||
DatasetExistsValidationError,
|
||||
DatasetInvalidError,
|
||||
DatasetNotFoundError,
|
||||
)
|
||||
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
|
||||
from superset.exceptions import SupersetErrorException
|
||||
from superset.extensions import db
|
||||
from superset.models.core import Database
|
||||
from superset.sql_parse import ParsedQuery
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DuplicateDatasetCommand(CreateMixin, BaseCommand):
|
||||
def __init__(self, data: dict[str, Any]) -> None:
|
||||
self._base_model: SqlaTable = SqlaTable()
|
||||
self._properties = data.copy()
|
||||
|
||||
def run(self) -> Model:
|
||||
self.validate()
|
||||
try:
|
||||
database_id = self._base_model.database_id
|
||||
table_name = self._properties["table_name"]
|
||||
owners = self._properties["owners"]
|
||||
database = db.session.query(Database).get(database_id)
|
||||
if not database:
|
||||
raise SupersetErrorException(
|
||||
SupersetError(
|
||||
message=__("The database was not found."),
|
||||
error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR,
|
||||
level=ErrorLevel.ERROR,
|
||||
),
|
||||
status=404,
|
||||
)
|
||||
table = SqlaTable(table_name=table_name, owners=owners)
|
||||
table.database = database
|
||||
table.schema = self._base_model.schema
|
||||
table.template_params = self._base_model.template_params
|
||||
table.normalize_columns = self._base_model.normalize_columns
|
||||
table.always_filter_main_dttm = self._base_model.always_filter_main_dttm
|
||||
table.is_sqllab_view = True
|
||||
table.sql = ParsedQuery(self._base_model.sql).stripped()
|
||||
db.session.add(table)
|
||||
cols = []
|
||||
for config_ in self._base_model.columns:
|
||||
column_name = config_.column_name
|
||||
col = TableColumn(
|
||||
column_name=column_name,
|
||||
verbose_name=config_.verbose_name,
|
||||
expression=config_.expression,
|
||||
filterable=True,
|
||||
groupby=True,
|
||||
is_dttm=config_.is_dttm,
|
||||
type=config_.type,
|
||||
description=config_.description,
|
||||
)
|
||||
cols.append(col)
|
||||
table.columns = cols
|
||||
mets = []
|
||||
for config_ in self._base_model.metrics:
|
||||
metric_name = config_.metric_name
|
||||
met = SqlMetric(
|
||||
metric_name=metric_name,
|
||||
verbose_name=config_.verbose_name,
|
||||
expression=config_.expression,
|
||||
metric_type=config_.metric_type,
|
||||
description=config_.description,
|
||||
)
|
||||
mets.append(met)
|
||||
table.metrics = mets
|
||||
db.session.commit()
|
||||
except (SQLAlchemyError, DAOCreateFailedError) as ex:
|
||||
logger.warning(ex, exc_info=True)
|
||||
db.session.rollback()
|
||||
raise DatasetDuplicateFailedError() from ex
|
||||
return table
|
||||
|
||||
def validate(self) -> None:
|
||||
exceptions: list[ValidationError] = []
|
||||
base_model_id = self._properties["base_model_id"]
|
||||
duplicate_name = self._properties["table_name"]
|
||||
|
||||
base_model = DatasetDAO.find_by_id(base_model_id)
|
||||
if not base_model:
|
||||
exceptions.append(DatasetNotFoundError())
|
||||
else:
|
||||
self._base_model = base_model
|
||||
|
||||
if self._base_model and self._base_model.kind != "virtual":
|
||||
exceptions.append(DatasourceTypeInvalidError())
|
||||
|
||||
if DatasetDAO.find_one_or_none(table_name=duplicate_name):
|
||||
exceptions.append(DatasetExistsValidationError(table_name=duplicate_name))
|
||||
|
||||
try:
|
||||
owners = self.populate_owners()
|
||||
self._properties["owners"] = owners
|
||||
except ValidationError as ex:
|
||||
exceptions.append(ex)
|
||||
|
||||
if exceptions:
|
||||
raise DatasetInvalidError(exceptions=exceptions)
|
||||
@@ -1,198 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from flask_babel import lazy_gettext as _
|
||||
from marshmallow.validate import ValidationError
|
||||
|
||||
from superset.commands.exceptions import (
|
||||
CommandException,
|
||||
CommandInvalidError,
|
||||
CreateFailedError,
|
||||
DeleteFailedError,
|
||||
ForbiddenError,
|
||||
ImportFailedError,
|
||||
UpdateFailedError,
|
||||
)
|
||||
|
||||
|
||||
def get_dataset_exist_error_msg(full_name: str) -> str:
|
||||
return _("Dataset %(name)s already exists", name=full_name)
|
||||
|
||||
|
||||
class DatabaseNotFoundValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error for database does not exist
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("Database does not exist")], field_name="database")
|
||||
|
||||
|
||||
class DatabaseChangeValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error database changes are not allowed on update
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("Database not allowed to change")], field_name="database")
|
||||
|
||||
|
||||
class DatasetExistsValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error for dataset already exists
|
||||
"""
|
||||
|
||||
def __init__(self, table_name: str) -> None:
|
||||
super().__init__(
|
||||
[get_dataset_exist_error_msg(table_name)], field_name="table_name"
|
||||
)
|
||||
|
||||
|
||||
class DatasetColumnNotFoundValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when dataset column for update does not exist
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("One or more columns do not exist")], field_name="columns")
|
||||
|
||||
|
||||
class DatasetColumnsDuplicateValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when dataset columns have a duplicate on the list
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
[_("One or more columns are duplicated")], field_name="columns"
|
||||
)
|
||||
|
||||
|
||||
class DatasetColumnsExistsValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when dataset columns already exist
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("One or more columns already exist")], field_name="columns")
|
||||
|
||||
|
||||
class DatasetMetricsNotFoundValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when dataset metric for update does not exist
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("One or more metrics do not exist")], field_name="metrics")
|
||||
|
||||
|
||||
class DatasetMetricsDuplicateValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when dataset metrics have a duplicate on the list
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
[_("One or more metrics are duplicated")], field_name="metrics"
|
||||
)
|
||||
|
||||
|
||||
class DatasetMetricsExistsValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when dataset metrics already exist
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("One or more metrics already exist")], field_name="metrics")
|
||||
|
||||
|
||||
class TableNotFoundValidationError(ValidationError):
|
||||
"""
|
||||
Marshmallow validation error when a table does not exist on the database
|
||||
"""
|
||||
|
||||
def __init__(self, table_name: str) -> None:
|
||||
super().__init__(
|
||||
[
|
||||
_(
|
||||
"Table [%(table_name)s] could not be found, "
|
||||
"please double check your "
|
||||
"database connection, schema, and "
|
||||
"table name",
|
||||
table_name=table_name,
|
||||
)
|
||||
],
|
||||
field_name="table_name",
|
||||
)
|
||||
|
||||
|
||||
class OwnersNotFoundValidationError(ValidationError):
|
||||
def __init__(self) -> None:
|
||||
super().__init__([_("Owners are invalid")], field_name="owners")
|
||||
|
||||
|
||||
class DatasetNotFoundError(CommandException):
|
||||
status = 404
|
||||
message = _("Dataset does not exist")
|
||||
|
||||
|
||||
class DatasetInvalidError(CommandInvalidError):
|
||||
message = _("Dataset parameters are invalid.")
|
||||
|
||||
|
||||
class DatasetCreateFailedError(CreateFailedError):
|
||||
message = _("Dataset could not be created.")
|
||||
|
||||
|
||||
class DatasetUpdateFailedError(UpdateFailedError):
|
||||
message = _("Dataset could not be updated.")
|
||||
|
||||
|
||||
class DatasetDeleteFailedError(DeleteFailedError):
|
||||
message = _("Datasets could not be deleted.")
|
||||
|
||||
|
||||
class DatasetRefreshFailedError(UpdateFailedError):
|
||||
message = _("Dataset could not be updated.")
|
||||
|
||||
|
||||
class DatasetSamplesFailedError(CommandInvalidError):
|
||||
message = _("Samples for dataset could not be retrieved.")
|
||||
|
||||
|
||||
class DatasetForbiddenError(ForbiddenError):
|
||||
message = _("Changing this dataset is forbidden")
|
||||
|
||||
|
||||
class DatasetImportError(ImportFailedError):
|
||||
message = _("Import dataset failed for an unknown reason")
|
||||
|
||||
|
||||
class DatasetAccessDeniedError(ForbiddenError):
|
||||
message = _("You don't have access to this dataset.")
|
||||
|
||||
|
||||
class DatasetDuplicateFailedError(CreateFailedError):
|
||||
message = _("Dataset could not be duplicated.")
|
||||
|
||||
|
||||
class DatasetForbiddenDataURI(ImportFailedError):
|
||||
message = _("Data URI is not allowed.")
|
||||
|
||||
|
||||
class WarmUpCacheTableNotFoundError(CommandException):
|
||||
status = 404
|
||||
message = _("The provided table was not found in the provided database")
|
||||
@@ -1,113 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# isort:skip_file
|
||||
|
||||
import json
|
||||
import logging
|
||||
from collections.abc import Iterator
|
||||
|
||||
import yaml
|
||||
|
||||
from superset.commands.export.models import ExportModelsCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.daos.database import DatabaseDAO
|
||||
from superset.datasets.commands.exceptions import DatasetNotFoundError
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.utils.dict_import_export import EXPORT_VERSION
|
||||
from superset.utils.file import get_filename
|
||||
from superset.utils.ssh_tunnel import mask_password_info
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
JSON_KEYS = {"params", "template_params", "extra"}
|
||||
|
||||
|
||||
class ExportDatasetsCommand(ExportModelsCommand):
|
||||
dao = DatasetDAO
|
||||
not_found = DatasetNotFoundError
|
||||
|
||||
@staticmethod
|
||||
def _export(
|
||||
model: SqlaTable, export_related: bool = True
|
||||
) -> Iterator[tuple[str, str]]:
|
||||
db_file_name = get_filename(
|
||||
model.database.database_name, model.database.id, skip_id=True
|
||||
)
|
||||
ds_file_name = get_filename(model.table_name, model.id, skip_id=True)
|
||||
file_path = f"datasets/{db_file_name}/{ds_file_name}.yaml"
|
||||
|
||||
payload = model.export_to_dict(
|
||||
recursive=True,
|
||||
include_parent_ref=False,
|
||||
include_defaults=True,
|
||||
export_uuids=True,
|
||||
)
|
||||
# TODO (betodealmeida): move this logic to export_to_dict once this
|
||||
# becomes the default export endpoint
|
||||
for key in JSON_KEYS:
|
||||
if payload.get(key):
|
||||
try:
|
||||
payload[key] = json.loads(payload[key])
|
||||
except json.decoder.JSONDecodeError:
|
||||
logger.info("Unable to decode `%s` field: %s", key, payload[key])
|
||||
for key in ("metrics", "columns"):
|
||||
for attributes in payload.get(key, []):
|
||||
if attributes.get("extra"):
|
||||
try:
|
||||
attributes["extra"] = json.loads(attributes["extra"])
|
||||
except json.decoder.JSONDecodeError:
|
||||
logger.info(
|
||||
"Unable to decode `extra` field: %s", attributes["extra"]
|
||||
)
|
||||
|
||||
payload["version"] = EXPORT_VERSION
|
||||
payload["database_uuid"] = str(model.database.uuid)
|
||||
|
||||
file_content = yaml.safe_dump(payload, sort_keys=False)
|
||||
yield file_path, file_content
|
||||
|
||||
# include database as well
|
||||
if export_related:
|
||||
file_path = f"databases/{db_file_name}.yaml"
|
||||
|
||||
payload = model.database.export_to_dict(
|
||||
recursive=False,
|
||||
include_parent_ref=False,
|
||||
include_defaults=True,
|
||||
export_uuids=True,
|
||||
)
|
||||
# TODO (betodealmeida): move this logic to export_to_dict once this
|
||||
# becomes the default export endpoint
|
||||
if payload.get("extra"):
|
||||
try:
|
||||
payload["extra"] = json.loads(payload["extra"])
|
||||
except json.decoder.JSONDecodeError:
|
||||
logger.info("Unable to decode `extra` field: %s", payload["extra"])
|
||||
|
||||
if ssh_tunnel := DatabaseDAO.get_ssh_tunnel(model.database.id):
|
||||
ssh_tunnel_payload = ssh_tunnel.export_to_dict(
|
||||
recursive=False,
|
||||
include_parent_ref=False,
|
||||
include_defaults=True,
|
||||
export_uuids=False,
|
||||
)
|
||||
payload["ssh_tunnel"] = mask_password_info(ssh_tunnel_payload)
|
||||
|
||||
payload["version"] = EXPORT_VERSION
|
||||
|
||||
file_content = yaml.safe_dump(payload, sort_keys=False)
|
||||
yield file_path, file_content
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,73 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from marshmallow.exceptions import ValidationError
|
||||
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.commands.exceptions import CommandInvalidError
|
||||
from superset.commands.importers.exceptions import IncorrectVersionError
|
||||
from superset.datasets.commands.importers import v0, v1
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# list of different import formats supported; v0 should be last because
|
||||
# the files are not versioned
|
||||
command_versions = [
|
||||
v1.ImportDatasetsCommand,
|
||||
v0.ImportDatasetsCommand,
|
||||
]
|
||||
|
||||
|
||||
class ImportDatasetsCommand(BaseCommand):
|
||||
"""
|
||||
Import datasets.
|
||||
|
||||
This command dispatches the import to different versions of the command
|
||||
until it finds one that matches.
|
||||
"""
|
||||
|
||||
def __init__(self, contents: dict[str, str], *args: Any, **kwargs: Any):
|
||||
self.contents = contents
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def run(self) -> None:
|
||||
# iterate over all commands until we find a version that can
|
||||
# handle the contents
|
||||
for version in command_versions:
|
||||
command = version(self.contents, *self.args, **self.kwargs)
|
||||
try:
|
||||
command.run()
|
||||
return
|
||||
except IncorrectVersionError:
|
||||
logger.debug("File not handled by command, skipping")
|
||||
except (CommandInvalidError, ValidationError) as exc:
|
||||
# found right version, but file is invalid
|
||||
logger.info("Command failed validation")
|
||||
raise exc
|
||||
except Exception as exc:
|
||||
# validation succeeded but something went wrong
|
||||
logger.exception("Error running import command")
|
||||
raise exc
|
||||
|
||||
raise CommandInvalidError("Could not find a valid command to import file")
|
||||
|
||||
def validate(self) -> None:
|
||||
pass
|
||||
@@ -1,296 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import yaml
|
||||
from flask_appbuilder import Model
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm.session import make_transient
|
||||
|
||||
from superset import db
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.commands.importers.exceptions import IncorrectVersionError
|
||||
from superset.connectors.sqla.models import (
|
||||
BaseDatasource,
|
||||
SqlaTable,
|
||||
SqlMetric,
|
||||
TableColumn,
|
||||
)
|
||||
from superset.databases.commands.exceptions import DatabaseNotFoundError
|
||||
from superset.datasets.commands.exceptions import DatasetInvalidError
|
||||
from superset.models.core import Database
|
||||
from superset.utils.dict_import_export import DATABASES_KEY
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def lookup_sqla_table(table: SqlaTable) -> Optional[SqlaTable]:
|
||||
return (
|
||||
db.session.query(SqlaTable)
|
||||
.join(Database)
|
||||
.filter(
|
||||
SqlaTable.table_name == table.table_name,
|
||||
SqlaTable.schema == table.schema,
|
||||
Database.id == table.database_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def lookup_sqla_database(table: SqlaTable) -> Optional[Database]:
|
||||
database = (
|
||||
db.session.query(Database)
|
||||
.filter_by(database_name=table.params_dict["database_name"])
|
||||
.one_or_none()
|
||||
)
|
||||
if database is None:
|
||||
raise DatabaseNotFoundError
|
||||
return database
|
||||
|
||||
|
||||
def import_dataset(
|
||||
i_datasource: BaseDatasource,
|
||||
database_id: Optional[int] = None,
|
||||
import_time: Optional[int] = None,
|
||||
) -> int:
|
||||
"""Imports the datasource from the object to the database.
|
||||
|
||||
Metrics and columns and datasource will be overridden if exists.
|
||||
This function can be used to import/export dashboards between multiple
|
||||
superset instances. Audit metadata isn't copied over.
|
||||
"""
|
||||
|
||||
lookup_database: Callable[[BaseDatasource], Optional[Database]]
|
||||
lookup_datasource: Callable[[BaseDatasource], Optional[BaseDatasource]]
|
||||
if isinstance(i_datasource, SqlaTable):
|
||||
lookup_database = lookup_sqla_database
|
||||
lookup_datasource = lookup_sqla_table
|
||||
|
||||
else:
|
||||
raise DatasetInvalidError
|
||||
|
||||
return import_datasource(
|
||||
db.session,
|
||||
i_datasource,
|
||||
lookup_database,
|
||||
lookup_datasource,
|
||||
import_time,
|
||||
database_id,
|
||||
)
|
||||
|
||||
|
||||
def lookup_sqla_metric(session: Session, metric: SqlMetric) -> SqlMetric:
|
||||
return (
|
||||
session.query(SqlMetric)
|
||||
.filter(
|
||||
SqlMetric.table_id == metric.table_id,
|
||||
SqlMetric.metric_name == metric.metric_name,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def import_metric(session: Session, metric: SqlMetric) -> SqlMetric:
|
||||
return import_simple_obj(session, metric, lookup_sqla_metric)
|
||||
|
||||
|
||||
def lookup_sqla_column(session: Session, column: TableColumn) -> TableColumn:
|
||||
return (
|
||||
session.query(TableColumn)
|
||||
.filter(
|
||||
TableColumn.table_id == column.table_id,
|
||||
TableColumn.column_name == column.column_name,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
def import_column(session: Session, column: TableColumn) -> TableColumn:
|
||||
return import_simple_obj(session, column, lookup_sqla_column)
|
||||
|
||||
|
||||
def import_datasource( # pylint: disable=too-many-arguments
|
||||
session: Session,
|
||||
i_datasource: Model,
|
||||
lookup_database: Callable[[Model], Optional[Model]],
|
||||
lookup_datasource: Callable[[Model], Optional[Model]],
|
||||
import_time: Optional[int] = None,
|
||||
database_id: Optional[int] = None,
|
||||
) -> int:
|
||||
"""Imports the datasource from the object to the database.
|
||||
|
||||
Metrics and columns and datasource will be overridden if exists.
|
||||
This function can be used to import/export datasources between multiple
|
||||
superset instances. Audit metadata isn't copies over.
|
||||
"""
|
||||
make_transient(i_datasource)
|
||||
logger.info("Started import of the datasource: %s", i_datasource.to_json())
|
||||
|
||||
i_datasource.id = None
|
||||
i_datasource.database_id = (
|
||||
database_id
|
||||
if database_id
|
||||
else getattr(lookup_database(i_datasource), "id", None)
|
||||
)
|
||||
i_datasource.alter_params(import_time=import_time)
|
||||
|
||||
# override the datasource
|
||||
datasource = lookup_datasource(i_datasource)
|
||||
|
||||
if datasource:
|
||||
datasource.override(i_datasource)
|
||||
session.flush()
|
||||
else:
|
||||
datasource = i_datasource.copy()
|
||||
session.add(datasource)
|
||||
session.flush()
|
||||
|
||||
for metric in i_datasource.metrics:
|
||||
new_m = metric.copy()
|
||||
new_m.table_id = datasource.id
|
||||
logger.info(
|
||||
"Importing metric %s from the datasource: %s",
|
||||
new_m.to_json(),
|
||||
i_datasource.full_name,
|
||||
)
|
||||
imported_m = import_metric(session, new_m)
|
||||
if imported_m.metric_name not in [m.metric_name for m in datasource.metrics]:
|
||||
datasource.metrics.append(imported_m)
|
||||
|
||||
for column in i_datasource.columns:
|
||||
new_c = column.copy()
|
||||
new_c.table_id = datasource.id
|
||||
logger.info(
|
||||
"Importing column %s from the datasource: %s",
|
||||
new_c.to_json(),
|
||||
i_datasource.full_name,
|
||||
)
|
||||
imported_c = import_column(session, new_c)
|
||||
if imported_c.column_name not in [c.column_name for c in datasource.columns]:
|
||||
datasource.columns.append(imported_c)
|
||||
session.flush()
|
||||
return datasource.id
|
||||
|
||||
|
||||
def import_simple_obj(
|
||||
session: Session, i_obj: Model, lookup_obj: Callable[[Session, Model], Model]
|
||||
) -> Model:
|
||||
make_transient(i_obj)
|
||||
i_obj.id = None
|
||||
i_obj.table = None
|
||||
|
||||
# find if the column was already imported
|
||||
existing_column = lookup_obj(session, i_obj)
|
||||
i_obj.table = None
|
||||
if existing_column:
|
||||
existing_column.override(i_obj)
|
||||
session.flush()
|
||||
return existing_column
|
||||
|
||||
session.add(i_obj)
|
||||
session.flush()
|
||||
return i_obj
|
||||
|
||||
|
||||
def import_from_dict(
|
||||
session: Session, data: dict[str, Any], sync: Optional[list[str]] = None
|
||||
) -> None:
|
||||
"""Imports databases from dictionary"""
|
||||
if not sync:
|
||||
sync = []
|
||||
if isinstance(data, dict):
|
||||
logger.info("Importing %d %s", len(data.get(DATABASES_KEY, [])), DATABASES_KEY)
|
||||
for database in data.get(DATABASES_KEY, []):
|
||||
Database.import_from_dict(session, database, sync=sync)
|
||||
session.commit()
|
||||
else:
|
||||
logger.info("Supplied object is not a dictionary.")
|
||||
|
||||
|
||||
class ImportDatasetsCommand(BaseCommand):
|
||||
"""
|
||||
Import datasources in YAML format.
|
||||
|
||||
This is the original unversioned format used to export and import datasources
|
||||
in Superset.
|
||||
"""
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def __init__(
|
||||
self,
|
||||
contents: dict[str, str],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
):
|
||||
self.contents = contents
|
||||
self._configs: dict[str, Any] = {}
|
||||
|
||||
self.sync = []
|
||||
if kwargs.get("sync_columns"):
|
||||
self.sync.append("columns")
|
||||
if kwargs.get("sync_metrics"):
|
||||
self.sync.append("metrics")
|
||||
|
||||
def run(self) -> None:
|
||||
self.validate()
|
||||
|
||||
# TODO (betodealmeida): add rollback in case of error
|
||||
for file_name, config in self._configs.items():
|
||||
logger.info("Importing dataset from file %s", file_name)
|
||||
if isinstance(config, dict):
|
||||
import_from_dict(db.session, config, sync=self.sync)
|
||||
else: # list
|
||||
for dataset in config:
|
||||
# UI exports don't have the database metadata, so we assume
|
||||
# the DB exists and has the same name
|
||||
params = json.loads(dataset["params"])
|
||||
database = (
|
||||
db.session.query(Database)
|
||||
.filter_by(database_name=params["database_name"])
|
||||
.one()
|
||||
)
|
||||
dataset["database_id"] = database.id
|
||||
SqlaTable.import_from_dict(db.session, dataset, sync=self.sync)
|
||||
|
||||
def validate(self) -> None:
|
||||
# ensure all files are YAML
|
||||
for file_name, content in self.contents.items():
|
||||
try:
|
||||
config = yaml.safe_load(content)
|
||||
except yaml.parser.ParserError as ex:
|
||||
logger.exception("Invalid YAML file")
|
||||
raise IncorrectVersionError(
|
||||
f"{file_name} is not a valid YAML file"
|
||||
) from ex
|
||||
|
||||
# CLI export
|
||||
if isinstance(config, dict):
|
||||
# TODO (betodealmeida): validate with Marshmallow
|
||||
if DATABASES_KEY not in config:
|
||||
raise IncorrectVersionError(f"{file_name} has no valid keys")
|
||||
|
||||
# UI export
|
||||
elif isinstance(config, list):
|
||||
# TODO (betodealmeida): validate with Marshmallow
|
||||
pass
|
||||
|
||||
else:
|
||||
raise IncorrectVersionError(f"{file_name} is not a valid file")
|
||||
|
||||
self._configs[file_name] = config
|
||||
@@ -1,69 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from typing import Any
|
||||
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from superset.commands.importers.v1 import ImportModelsCommand
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.databases.commands.importers.v1.utils import import_database
|
||||
from superset.databases.schemas import ImportV1DatabaseSchema
|
||||
from superset.datasets.commands.exceptions import DatasetImportError
|
||||
from superset.datasets.commands.importers.v1.utils import import_dataset
|
||||
from superset.datasets.schemas import ImportV1DatasetSchema
|
||||
|
||||
|
||||
class ImportDatasetsCommand(ImportModelsCommand):
|
||||
|
||||
"""Import datasets"""
|
||||
|
||||
dao = DatasetDAO
|
||||
model_name = "dataset"
|
||||
prefix = "datasets/"
|
||||
schemas: dict[str, Schema] = {
|
||||
"databases/": ImportV1DatabaseSchema(),
|
||||
"datasets/": ImportV1DatasetSchema(),
|
||||
}
|
||||
import_error = DatasetImportError
|
||||
|
||||
@staticmethod
|
||||
def _import(
|
||||
session: Session, configs: dict[str, Any], overwrite: bool = False
|
||||
) -> None:
|
||||
# discover databases associated with datasets
|
||||
database_uuids: set[str] = set()
|
||||
for file_name, config in configs.items():
|
||||
if file_name.startswith("datasets/"):
|
||||
database_uuids.add(config["database_uuid"])
|
||||
|
||||
# import related databases
|
||||
database_ids: dict[str, int] = {}
|
||||
for file_name, config in configs.items():
|
||||
if file_name.startswith("databases/") and config["uuid"] in database_uuids:
|
||||
database = import_database(session, config, overwrite=False)
|
||||
database_ids[str(database.uuid)] = database.id
|
||||
|
||||
# import datasets with the correct parent ref
|
||||
for file_name, config in configs.items():
|
||||
if (
|
||||
file_name.startswith("datasets/")
|
||||
and config["database_uuid"] in database_ids
|
||||
):
|
||||
config["database_id"] = database_ids[config["database_uuid"]]
|
||||
import_dataset(session, config, overwrite=overwrite)
|
||||
@@ -1,233 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
from urllib import request
|
||||
|
||||
import pandas as pd
|
||||
from flask import current_app, g
|
||||
from sqlalchemy import BigInteger, Boolean, Date, DateTime, Float, String, Text
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm.exc import MultipleResultsFound
|
||||
from sqlalchemy.sql.visitors import VisitableType
|
||||
|
||||
from superset import security_manager
|
||||
from superset.commands.exceptions import ImportFailedError
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.datasets.commands.exceptions import DatasetForbiddenDataURI
|
||||
from superset.models.core import Database
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CHUNKSIZE = 512
|
||||
VARCHAR = re.compile(r"VARCHAR\((\d+)\)", re.IGNORECASE)
|
||||
|
||||
JSON_KEYS = {"params", "template_params", "extra"}
|
||||
|
||||
|
||||
type_map = {
|
||||
"BOOLEAN": Boolean(),
|
||||
"VARCHAR": String(255),
|
||||
"STRING": String(255),
|
||||
"TEXT": Text(),
|
||||
"BIGINT": BigInteger(),
|
||||
"FLOAT": Float(),
|
||||
"FLOAT64": Float(),
|
||||
"DOUBLE PRECISION": Float(),
|
||||
"DATE": Date(),
|
||||
"DATETIME": DateTime(),
|
||||
"TIMESTAMP WITHOUT TIME ZONE": DateTime(timezone=False),
|
||||
"TIMESTAMP WITH TIME ZONE": DateTime(timezone=True),
|
||||
}
|
||||
|
||||
|
||||
def get_sqla_type(native_type: str) -> VisitableType:
|
||||
if native_type.upper() in type_map:
|
||||
return type_map[native_type.upper()]
|
||||
|
||||
if match := VARCHAR.match(native_type):
|
||||
size = int(match.group(1))
|
||||
return String(size)
|
||||
|
||||
raise Exception( # pylint: disable=broad-exception-raised
|
||||
f"Unknown type: {native_type}"
|
||||
)
|
||||
|
||||
|
||||
def get_dtype(df: pd.DataFrame, dataset: SqlaTable) -> dict[str, VisitableType]:
|
||||
return {
|
||||
column.column_name: get_sqla_type(column.type)
|
||||
for column in dataset.columns
|
||||
if column.column_name in df.keys()
|
||||
}
|
||||
|
||||
|
||||
def validate_data_uri(data_uri: str) -> None:
|
||||
"""
|
||||
Validate that the data URI is configured on DATASET_IMPORT_ALLOWED_URLS
|
||||
has a valid URL.
|
||||
|
||||
:param data_uri:
|
||||
:return:
|
||||
"""
|
||||
allowed_urls = current_app.config["DATASET_IMPORT_ALLOWED_DATA_URLS"]
|
||||
for allowed_url in allowed_urls:
|
||||
try:
|
||||
match = re.match(allowed_url, data_uri)
|
||||
except re.error:
|
||||
logger.exception(
|
||||
"Invalid regular expression on DATASET_IMPORT_ALLOWED_URLS"
|
||||
)
|
||||
raise
|
||||
if match:
|
||||
return
|
||||
raise DatasetForbiddenDataURI()
|
||||
|
||||
|
||||
def import_dataset(
|
||||
session: Session,
|
||||
config: dict[str, Any],
|
||||
overwrite: bool = False,
|
||||
force_data: bool = False,
|
||||
ignore_permissions: bool = False,
|
||||
) -> SqlaTable:
|
||||
can_write = ignore_permissions or security_manager.can_access(
|
||||
"can_write",
|
||||
"Dataset",
|
||||
)
|
||||
existing = session.query(SqlaTable).filter_by(uuid=config["uuid"]).first()
|
||||
if existing:
|
||||
if not overwrite or not can_write:
|
||||
return existing
|
||||
config["id"] = existing.id
|
||||
elif not can_write:
|
||||
raise ImportFailedError(
|
||||
"Dataset doesn't exist and user doesn't have permission to create datasets"
|
||||
)
|
||||
|
||||
# TODO (betodealmeida): move this logic to import_from_dict
|
||||
config = config.copy()
|
||||
for key in JSON_KEYS:
|
||||
if config.get(key) is not None:
|
||||
try:
|
||||
config[key] = json.dumps(config[key])
|
||||
except TypeError:
|
||||
logger.info("Unable to encode `%s` field: %s", key, config[key])
|
||||
for key in ("metrics", "columns"):
|
||||
for attributes in config.get(key, []):
|
||||
if attributes.get("extra") is not None:
|
||||
try:
|
||||
attributes["extra"] = json.dumps(attributes["extra"])
|
||||
except TypeError:
|
||||
logger.info(
|
||||
"Unable to encode `extra` field: %s", attributes["extra"]
|
||||
)
|
||||
attributes["extra"] = None
|
||||
|
||||
# should we delete columns and metrics not present in the current import?
|
||||
sync = ["columns", "metrics"] if overwrite else []
|
||||
|
||||
# should we also load data into the dataset?
|
||||
data_uri = config.get("data")
|
||||
|
||||
# import recursively to include columns and metrics
|
||||
try:
|
||||
dataset = SqlaTable.import_from_dict(session, config, recursive=True, sync=sync)
|
||||
except MultipleResultsFound:
|
||||
# Finding multiple results when importing a dataset only happens because initially
|
||||
# datasets were imported without schemas (eg, `examples.NULL.users`), and later
|
||||
# they were fixed to have the default schema (eg, `examples.public.users`). If a
|
||||
# user created `examples.public.users` during that time the second import will
|
||||
# fail because the UUID match will try to update `examples.NULL.users` to
|
||||
# `examples.public.users`, resulting in a conflict.
|
||||
#
|
||||
# When that happens, we return the original dataset, unmodified.
|
||||
dataset = session.query(SqlaTable).filter_by(uuid=config["uuid"]).one()
|
||||
|
||||
if dataset.id is None:
|
||||
session.flush()
|
||||
|
||||
try:
|
||||
table_exists = dataset.database.has_table_by_name(dataset.table_name)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
# MySQL doesn't play nice with GSheets table names
|
||||
logger.warning(
|
||||
"Couldn't check if table %s exists, assuming it does", dataset.table_name
|
||||
)
|
||||
table_exists = True
|
||||
|
||||
if data_uri and (not table_exists or force_data):
|
||||
load_data(data_uri, dataset, dataset.database, session)
|
||||
|
||||
if hasattr(g, "user") and g.user:
|
||||
dataset.owners.append(g.user)
|
||||
|
||||
return dataset
|
||||
|
||||
|
||||
def load_data(
|
||||
data_uri: str, dataset: SqlaTable, database: Database, session: Session
|
||||
) -> None:
|
||||
"""
|
||||
Load data from a data URI into a dataset.
|
||||
|
||||
:raises DatasetUnAllowedDataURI: If a dataset is trying
|
||||
to load data from a URI that is not allowed.
|
||||
"""
|
||||
validate_data_uri(data_uri)
|
||||
logger.info("Downloading data from %s", data_uri)
|
||||
data = request.urlopen(data_uri) # pylint: disable=consider-using-with
|
||||
if data_uri.endswith(".gz"):
|
||||
data = gzip.open(data)
|
||||
df = pd.read_csv(data, encoding="utf-8")
|
||||
dtype = get_dtype(df, dataset)
|
||||
|
||||
# convert temporal columns
|
||||
for column_name, sqla_type in dtype.items():
|
||||
if isinstance(sqla_type, (Date, DateTime)):
|
||||
df[column_name] = pd.to_datetime(df[column_name])
|
||||
|
||||
# reuse session when loading data if possible, to make import atomic
|
||||
if database.sqlalchemy_uri == current_app.config.get("SQLALCHEMY_DATABASE_URI"):
|
||||
logger.info("Loading data inside the import transaction")
|
||||
connection = session.connection()
|
||||
df.to_sql(
|
||||
dataset.table_name,
|
||||
con=connection,
|
||||
schema=dataset.schema,
|
||||
if_exists="replace",
|
||||
chunksize=CHUNKSIZE,
|
||||
dtype=dtype,
|
||||
index=False,
|
||||
method="multi",
|
||||
)
|
||||
else:
|
||||
logger.warning("Loading data outside the import transaction")
|
||||
with database.get_sqla_engine_with_context() as engine:
|
||||
df.to_sql(
|
||||
dataset.table_name,
|
||||
con=engine,
|
||||
schema=dataset.schema,
|
||||
if_exists="replace",
|
||||
chunksize=CHUNKSIZE,
|
||||
dtype=dtype,
|
||||
index=False,
|
||||
method="multi",
|
||||
)
|
||||
@@ -1,61 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from flask_appbuilder.models.sqla import Model
|
||||
|
||||
from superset import security_manager
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.datasets.commands.exceptions import (
|
||||
DatasetForbiddenError,
|
||||
DatasetNotFoundError,
|
||||
DatasetRefreshFailedError,
|
||||
)
|
||||
from superset.exceptions import SupersetSecurityException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RefreshDatasetCommand(BaseCommand):
|
||||
def __init__(self, model_id: int):
|
||||
self._model_id = model_id
|
||||
self._model: Optional[SqlaTable] = None
|
||||
|
||||
def run(self) -> Model:
|
||||
self.validate()
|
||||
if self._model:
|
||||
try:
|
||||
self._model.fetch_metadata()
|
||||
return self._model
|
||||
except Exception as ex:
|
||||
logger.exception(ex)
|
||||
raise DatasetRefreshFailedError() from ex
|
||||
raise DatasetRefreshFailedError()
|
||||
|
||||
def validate(self) -> None:
|
||||
# Validate/populate model exists
|
||||
self._model = DatasetDAO.find_by_id(self._model_id)
|
||||
if not self._model:
|
||||
raise DatasetNotFoundError()
|
||||
# Check ownership
|
||||
try:
|
||||
security_manager.raise_for_ownership(self._model)
|
||||
except SupersetSecurityException as ex:
|
||||
raise DatasetForbiddenError() from ex
|
||||
@@ -1,168 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from collections import Counter
|
||||
from typing import Any, Optional
|
||||
|
||||
from flask_appbuilder.models.sqla import Model
|
||||
from marshmallow import ValidationError
|
||||
|
||||
from superset import security_manager
|
||||
from superset.commands.base import BaseCommand, UpdateMixin
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.daos.dataset import DatasetDAO
|
||||
from superset.daos.exceptions import DAOUpdateFailedError
|
||||
from superset.datasets.commands.exceptions import (
|
||||
DatabaseChangeValidationError,
|
||||
DatasetColumnNotFoundValidationError,
|
||||
DatasetColumnsDuplicateValidationError,
|
||||
DatasetColumnsExistsValidationError,
|
||||
DatasetExistsValidationError,
|
||||
DatasetForbiddenError,
|
||||
DatasetInvalidError,
|
||||
DatasetMetricsDuplicateValidationError,
|
||||
DatasetMetricsExistsValidationError,
|
||||
DatasetMetricsNotFoundValidationError,
|
||||
DatasetNotFoundError,
|
||||
DatasetUpdateFailedError,
|
||||
)
|
||||
from superset.exceptions import SupersetSecurityException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UpdateDatasetCommand(UpdateMixin, BaseCommand):
|
||||
def __init__(
|
||||
self,
|
||||
model_id: int,
|
||||
data: dict[str, Any],
|
||||
override_columns: Optional[bool] = False,
|
||||
):
|
||||
self._model_id = model_id
|
||||
self._properties = data.copy()
|
||||
self._model: Optional[SqlaTable] = None
|
||||
self.override_columns = override_columns
|
||||
self._properties["override_columns"] = override_columns
|
||||
|
||||
def run(self) -> Model:
|
||||
self.validate()
|
||||
if self._model:
|
||||
try:
|
||||
dataset = DatasetDAO.update(
|
||||
self._model,
|
||||
attributes=self._properties,
|
||||
)
|
||||
return dataset
|
||||
except DAOUpdateFailedError as ex:
|
||||
logger.exception(ex.exception)
|
||||
raise DatasetUpdateFailedError() from ex
|
||||
raise DatasetUpdateFailedError()
|
||||
|
||||
def validate(self) -> None:
|
||||
exceptions: list[ValidationError] = []
|
||||
owner_ids: Optional[list[int]] = self._properties.get("owners")
|
||||
# Validate/populate model exists
|
||||
self._model = DatasetDAO.find_by_id(self._model_id)
|
||||
if not self._model:
|
||||
raise DatasetNotFoundError()
|
||||
# Check ownership
|
||||
try:
|
||||
security_manager.raise_for_ownership(self._model)
|
||||
except SupersetSecurityException as ex:
|
||||
raise DatasetForbiddenError() from ex
|
||||
|
||||
database_id = self._properties.get("database", None)
|
||||
table_name = self._properties.get("table_name", None)
|
||||
# Validate uniqueness
|
||||
if not DatasetDAO.validate_update_uniqueness(
|
||||
self._model.database_id,
|
||||
self._model.schema,
|
||||
self._model_id,
|
||||
table_name,
|
||||
):
|
||||
exceptions.append(DatasetExistsValidationError(table_name))
|
||||
# Validate/Populate database not allowed to change
|
||||
if database_id and database_id != self._model:
|
||||
exceptions.append(DatabaseChangeValidationError())
|
||||
# Validate/Populate owner
|
||||
try:
|
||||
owners = self.populate_owners(owner_ids)
|
||||
self._properties["owners"] = owners
|
||||
except ValidationError as ex:
|
||||
exceptions.append(ex)
|
||||
# Validate columns
|
||||
if columns := self._properties.get("columns"):
|
||||
self._validate_columns(columns, exceptions)
|
||||
|
||||
# Validate metrics
|
||||
if metrics := self._properties.get("metrics"):
|
||||
self._validate_metrics(metrics, exceptions)
|
||||
|
||||
if exceptions:
|
||||
raise DatasetInvalidError(exceptions=exceptions)
|
||||
|
||||
def _validate_columns(
|
||||
self, columns: list[dict[str, Any]], exceptions: list[ValidationError]
|
||||
) -> None:
|
||||
# Validate duplicates on data
|
||||
if self._get_duplicates(columns, "column_name"):
|
||||
exceptions.append(DatasetColumnsDuplicateValidationError())
|
||||
else:
|
||||
# validate invalid id's
|
||||
columns_ids: list[int] = [
|
||||
column["id"] for column in columns if "id" in column
|
||||
]
|
||||
if not DatasetDAO.validate_columns_exist(self._model_id, columns_ids):
|
||||
exceptions.append(DatasetColumnNotFoundValidationError())
|
||||
|
||||
# validate new column names uniqueness
|
||||
if not self.override_columns:
|
||||
columns_names: list[str] = [
|
||||
column["column_name"] for column in columns if "id" not in column
|
||||
]
|
||||
if not DatasetDAO.validate_columns_uniqueness(
|
||||
self._model_id, columns_names
|
||||
):
|
||||
exceptions.append(DatasetColumnsExistsValidationError())
|
||||
|
||||
def _validate_metrics(
|
||||
self, metrics: list[dict[str, Any]], exceptions: list[ValidationError]
|
||||
) -> None:
|
||||
if self._get_duplicates(metrics, "metric_name"):
|
||||
exceptions.append(DatasetMetricsDuplicateValidationError())
|
||||
else:
|
||||
# validate invalid id's
|
||||
metrics_ids: list[int] = [
|
||||
metric["id"] for metric in metrics if "id" in metric
|
||||
]
|
||||
if not DatasetDAO.validate_metrics_exist(self._model_id, metrics_ids):
|
||||
exceptions.append(DatasetMetricsNotFoundValidationError())
|
||||
# validate new metric names uniqueness
|
||||
metric_names: list[str] = [
|
||||
metric["metric_name"] for metric in metrics if "id" not in metric
|
||||
]
|
||||
if not DatasetDAO.validate_metrics_uniqueness(self._model_id, metric_names):
|
||||
exceptions.append(DatasetMetricsExistsValidationError())
|
||||
|
||||
@staticmethod
|
||||
def _get_duplicates(data: list[dict[str, Any]], key: str) -> list[str]:
|
||||
duplicates = [
|
||||
name
|
||||
for name, count in Counter([item[key] for item in data]).items()
|
||||
if count > 1
|
||||
]
|
||||
return duplicates
|
||||
@@ -1,68 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.connectors.sqla.models import SqlaTable
|
||||
from superset.datasets.commands.exceptions import WarmUpCacheTableNotFoundError
|
||||
from superset.extensions import db
|
||||
from superset.models.core import Database
|
||||
from superset.models.slice import Slice
|
||||
|
||||
|
||||
class DatasetWarmUpCacheCommand(BaseCommand):
|
||||
def __init__(
|
||||
self,
|
||||
db_name: str,
|
||||
table_name: str,
|
||||
dashboard_id: Optional[int],
|
||||
extra_filters: Optional[str],
|
||||
):
|
||||
self._db_name = db_name
|
||||
self._table_name = table_name
|
||||
self._dashboard_id = dashboard_id
|
||||
self._extra_filters = extra_filters
|
||||
self._charts: list[Slice] = []
|
||||
|
||||
def run(self) -> list[dict[str, Any]]:
|
||||
self.validate()
|
||||
return [
|
||||
ChartWarmUpCacheCommand(
|
||||
chart, self._dashboard_id, self._extra_filters
|
||||
).run()
|
||||
for chart in self._charts
|
||||
]
|
||||
|
||||
def validate(self) -> None:
|
||||
table = (
|
||||
db.session.query(SqlaTable)
|
||||
.join(Database)
|
||||
.filter(
|
||||
Database.database_name == self._db_name,
|
||||
SqlaTable.table_name == self._table_name,
|
||||
)
|
||||
).one_or_none()
|
||||
if not table:
|
||||
raise WarmUpCacheTableNotFoundError()
|
||||
self._charts = (
|
||||
db.session.query(Slice)
|
||||
.filter_by(datasource_id=table.id, datasource_type=table.type)
|
||||
.all()
|
||||
)
|
||||
@@ -20,14 +20,14 @@ from flask import Response
|
||||
from flask_appbuilder.api import expose, permission_name, protect, safe
|
||||
from flask_appbuilder.models.sqla.interface import SQLAInterface
|
||||
|
||||
from superset.connectors.sqla.models import TableColumn
|
||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
|
||||
from superset.datasets.metrics.commands.delete import DeleteDatasetMetricCommand
|
||||
from superset.datasets.metrics.commands.exceptions import (
|
||||
from superset.commands.dataset.metrics.delete import DeleteDatasetMetricCommand
|
||||
from superset.commands.dataset.metrics.exceptions import (
|
||||
DatasetMetricDeleteFailedError,
|
||||
DatasetMetricForbiddenError,
|
||||
DatasetMetricNotFoundError,
|
||||
)
|
||||
from superset.connectors.sqla.models import TableColumn
|
||||
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
|
||||
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
@@ -1,60 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from superset import security_manager
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.connectors.sqla.models import SqlMetric
|
||||
from superset.daos.dataset import DatasetDAO, DatasetMetricDAO
|
||||
from superset.daos.exceptions import DAODeleteFailedError
|
||||
from superset.datasets.metrics.commands.exceptions import (
|
||||
DatasetMetricDeleteFailedError,
|
||||
DatasetMetricForbiddenError,
|
||||
DatasetMetricNotFoundError,
|
||||
)
|
||||
from superset.exceptions import SupersetSecurityException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DeleteDatasetMetricCommand(BaseCommand):
|
||||
def __init__(self, dataset_id: int, model_id: int):
|
||||
self._dataset_id = dataset_id
|
||||
self._model_id = model_id
|
||||
self._model: Optional[SqlMetric] = None
|
||||
|
||||
def run(self) -> None:
|
||||
self.validate()
|
||||
assert self._model
|
||||
|
||||
try:
|
||||
DatasetMetricDAO.delete([self._model])
|
||||
except DAODeleteFailedError as ex:
|
||||
logger.exception(ex.exception)
|
||||
raise DatasetMetricDeleteFailedError() from ex
|
||||
|
||||
def validate(self) -> None:
|
||||
# Validate/populate model exists
|
||||
self._model = DatasetDAO.find_dataset_metric(self._dataset_id, self._model_id)
|
||||
if not self._model:
|
||||
raise DatasetMetricNotFoundError()
|
||||
# Check ownership
|
||||
try:
|
||||
security_manager.raise_for_ownership(self._model)
|
||||
except SupersetSecurityException as ex:
|
||||
raise DatasetMetricForbiddenError() from ex
|
||||
@@ -1,31 +0,0 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from flask_babel import lazy_gettext as _
|
||||
|
||||
from superset.commands.exceptions import CommandException
|
||||
|
||||
|
||||
class DatasetMetricNotFoundError(CommandException):
|
||||
message = _("Dataset metric not found.")
|
||||
|
||||
|
||||
class DatasetMetricDeleteFailedError(CommandException):
|
||||
message = _("Dataset metric delete failed.")
|
||||
|
||||
|
||||
class DatasetMetricForbiddenError(CommandException):
|
||||
message = _("Changing this dataset is forbidden.")
|
||||
Reference in New Issue
Block a user