mirror of
https://github.com/apache/superset.git
synced 2026-04-20 08:34:37 +00:00
feat: export databases as a ZIP bundle (#11229)
* Export databases as Zip file * Fix tests * Address comments * Implement mulexport for database * Fix lint * Fix lint
This commit is contained in:
@@ -15,9 +15,12 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
from typing import Any, Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
from flask import g, request, Response
|
||||
from flask import g, request, Response, send_file
|
||||
from flask_appbuilder.api import expose, protect, rison, safe
|
||||
from flask_appbuilder.models.sqla.interface import SQLAInterface
|
||||
from flask_babel import gettext as _
|
||||
@@ -43,6 +46,7 @@ from superset.databases.commands.exceptions import (
|
||||
DatabaseSecurityUnsafeError,
|
||||
DatabaseUpdateFailedError,
|
||||
)
|
||||
from superset.databases.commands.export import ExportDatabasesCommand
|
||||
from superset.databases.commands.test_connection import TestConnectionDatabaseCommand
|
||||
from superset.databases.commands.update import UpdateDatabaseCommand
|
||||
from superset.databases.dao import DatabaseDAO
|
||||
@@ -54,6 +58,7 @@ from superset.databases.schemas import (
|
||||
DatabasePutSchema,
|
||||
DatabaseRelatedObjectsResponse,
|
||||
DatabaseTestConnectionSchema,
|
||||
get_export_ids_schema,
|
||||
SchemasResponseSchema,
|
||||
SelectStarResponseSchema,
|
||||
TableMetadataResponseSchema,
|
||||
@@ -72,6 +77,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
|
||||
datamodel = SQLAInterface(Database)
|
||||
|
||||
include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
|
||||
RouteMethod.EXPORT,
|
||||
"table_metadata",
|
||||
"select_star",
|
||||
"schemas",
|
||||
@@ -653,3 +659,61 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
|
||||
charts={"count": len(charts), "result": charts},
|
||||
dashboards={"count": len(dashboards), "result": dashboards},
|
||||
)
|
||||
|
||||
@expose("/export/", methods=["GET"])
|
||||
@protect()
|
||||
@safe
|
||||
@statsd_metrics
|
||||
@rison(get_export_ids_schema)
|
||||
def export(self, **kwargs: Any) -> Response:
|
||||
"""Export database(s) with associated datasets
|
||||
---
|
||||
get:
|
||||
description: Download database(s) and associated dataset(s) as a zip file
|
||||
parameters:
|
||||
- in: query
|
||||
name: q
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: integer
|
||||
responses:
|
||||
200:
|
||||
description: A zip file with database(s) and dataset(s) as YAML
|
||||
content:
|
||||
application/zip:
|
||||
schema:
|
||||
type: string
|
||||
format: binary
|
||||
401:
|
||||
$ref: '#/components/responses/401'
|
||||
404:
|
||||
$ref: '#/components/responses/404'
|
||||
500:
|
||||
$ref: '#/components/responses/500'
|
||||
"""
|
||||
requested_ids = kwargs["rison"]
|
||||
timestamp = datetime.now().strftime("%Y%m%dT%H%M%S")
|
||||
root = f"database_export_{timestamp}"
|
||||
filename = f"{root}.zip"
|
||||
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
try:
|
||||
for file_name, file_content in ExportDatabasesCommand(
|
||||
requested_ids
|
||||
).run():
|
||||
with bundle.open(f"{root}/{file_name}", "w") as fp:
|
||||
fp.write(file_content.encode())
|
||||
except DatabaseNotFoundError:
|
||||
return self.response_404()
|
||||
buf.seek(0)
|
||||
|
||||
return send_file(
|
||||
buf,
|
||||
mimetype="application/zip",
|
||||
as_attachment=True,
|
||||
attachment_filename=filename,
|
||||
)
|
||||
|
||||
@@ -28,7 +28,7 @@ from superset.security.analytics_db_safety import DBSecurityException
|
||||
|
||||
|
||||
class DatabaseInvalidError(CommandInvalidError):
|
||||
message = _("Dashboard parameters are invalid.")
|
||||
message = _("Database parameters are invalid.")
|
||||
|
||||
|
||||
class DatabaseExistsValidationError(ValidationError):
|
||||
|
||||
89
superset/databases/commands/export.py
Normal file
89
superset/databases/commands/export.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# isort:skip_file
|
||||
|
||||
import json
|
||||
from typing import Iterator, List, Tuple
|
||||
|
||||
import yaml
|
||||
|
||||
from superset.commands.base import BaseCommand
|
||||
from superset.databases.commands.exceptions import DatabaseNotFoundError
|
||||
from superset.databases.dao import DatabaseDAO
|
||||
from superset.utils.dict_import_export import IMPORT_EXPORT_VERSION, sanitize
|
||||
from superset.models.core import Database
|
||||
|
||||
|
||||
class ExportDatabasesCommand(BaseCommand):
|
||||
def __init__(self, database_ids: List[int]):
|
||||
self.database_ids = database_ids
|
||||
|
||||
# this will be set when calling validate()
|
||||
self._models: List[Database] = []
|
||||
|
||||
@staticmethod
|
||||
def export_database(database: Database) -> Iterator[Tuple[str, str]]:
|
||||
name = sanitize(database.database_name)
|
||||
file_name = f"databases/{name}.yaml"
|
||||
|
||||
payload = database.export_to_dict(
|
||||
recursive=False,
|
||||
include_parent_ref=False,
|
||||
include_defaults=True,
|
||||
export_uuids=True,
|
||||
)
|
||||
# TODO (betodealmeida): move this logic to export_to_dict once this
|
||||
# becomes the default export endpoint
|
||||
if "extra" in payload:
|
||||
try:
|
||||
payload["extra"] = json.loads(payload["extra"])
|
||||
except json.decoder.JSONDecodeError:
|
||||
pass
|
||||
|
||||
payload["version"] = IMPORT_EXPORT_VERSION
|
||||
|
||||
file_content = yaml.safe_dump(payload, sort_keys=False)
|
||||
yield file_name, file_content
|
||||
|
||||
# TODO (betodealmeida): reuse logic from ExportDatasetCommand once
|
||||
# it's implemented
|
||||
for dataset in database.tables:
|
||||
name = sanitize(dataset.table_name)
|
||||
file_name = f"datasets/{name}.yaml"
|
||||
|
||||
payload = dataset.export_to_dict(
|
||||
recursive=True,
|
||||
include_parent_ref=False,
|
||||
include_defaults=True,
|
||||
export_uuids=True,
|
||||
)
|
||||
payload["version"] = IMPORT_EXPORT_VERSION
|
||||
payload["database_uuid"] = str(database.uuid)
|
||||
|
||||
file_content = yaml.safe_dump(payload, sort_keys=False)
|
||||
yield file_name, file_content
|
||||
|
||||
def run(self) -> Iterator[Tuple[str, str]]:
|
||||
self.validate()
|
||||
|
||||
for database in self._models:
|
||||
yield from self.export_database(database)
|
||||
|
||||
def validate(self) -> None:
|
||||
self._models = DatabaseDAO.find_by_ids(self.database_ids)
|
||||
if len(self._models) != len(self.database_ids):
|
||||
raise DatabaseNotFoundError()
|
||||
@@ -109,6 +109,7 @@ extra_description = markdown(
|
||||
"whether or not the Explore button in SQL Lab results is shown.",
|
||||
True,
|
||||
)
|
||||
get_export_ids_schema = {"type": "array", "items": {"type": "integer"}}
|
||||
sqlalchemy_uri_description = markdown(
|
||||
"Refer to the "
|
||||
"[SqlAlchemy docs]"
|
||||
|
||||
Reference in New Issue
Block a user