diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 405d0bab3dd..aba2283ef2f 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -239,16 +239,24 @@ class ImportV1ColumnSchema(Schema): python_date_format = fields.String(allow_none=True) +class ImportMetricCurrencySchema(Schema): + symbol = fields.String(validate=Length(1, 128)) + symbolPosition = fields.String(validate=Length(1, 128)) # noqa: N815 + + class ImportV1MetricSchema(Schema): # pylint: disable=unused-argument @pre_load - def fix_extra(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]: + def fix_fields(self, data: dict[str, Any], **kwargs: Any) -> dict[str, Any]: """ - Fix for extra initially being exported as a string. + Fix for extra and currency initially being exported as a string. """ if isinstance(data.get("extra"), str): data["extra"] = json.loads(data["extra"]) + if isinstance(data.get("currency"), str): + data["currency"] = json.loads(data["currency"]) + return data metric_name = fields.String(required=True) @@ -257,7 +265,7 @@ class ImportV1MetricSchema(Schema): expression = fields.String(required=True) description = fields.String(allow_none=True) d3format = fields.String(allow_none=True) - currency = fields.String(allow_none=True, required=False) + currency = fields.Nested(ImportMetricCurrencySchema, allow_none=True) extra = fields.Dict(allow_none=True) warning_text = fields.String(allow_none=True) diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py index 1cf91ca2bbd..a8659c64c35 100644 --- a/tests/integration_tests/base_tests.py +++ b/tests/integration_tests/base_tests.py @@ -14,39 +14,48 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# isort:skip_file """Unit tests for Superset""" +from contextlib import contextmanager from datetime import datetime from importlib.util import find_spec -from contextlib import contextmanager -from typing import Any, Union, Optional -from unittest.mock import Mock, patch, MagicMock +from io import BytesIO +from typing import Any, Optional, Union +from unittest.mock import MagicMock, Mock, patch +from zipfile import ZipFile import pandas as pd import prison -from flask import Response, g +import yaml +from flask import g, Response from flask_appbuilder.security.sqla import models as ab_models from flask_testing import TestCase +from sqlalchemy.dialects.mysql import dialect from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.ext.declarative import DeclarativeMeta from sqlalchemy.orm import Session # noqa: F401 from sqlalchemy.sql import func -from sqlalchemy.dialects.mysql import dialect -from tests.integration_tests.constants import ADMIN_USERNAME -from tests.integration_tests.test_app import app, login -from superset.sql.parse import CTASMethod from superset import db, security_manager from superset.connectors.sqla.models import BaseDatasource, SqlaTable from superset.models import core as models -from superset.models.slice import Slice from superset.models.core import Database from superset.models.dashboard import Dashboard -from superset.utils.core import get_example_default_schema, shortid +from superset.models.slice import Slice +from superset.sql.parse import CTASMethod from superset.utils import json +from superset.utils.core import get_example_default_schema, shortid from superset.utils.database import get_example_database from superset.views.base_api import BaseSupersetModelRestApi +from tests.integration_tests.constants import ADMIN_USERNAME +from tests.integration_tests.fixtures.importexport import ( + chart_config, + dashboard_config, + database_config, + dataset_config, + metadata_files, +) +from tests.integration_tests.test_app import app, login FAKE_DB_NAME = "fake_db_100" DEFAULT_PASSWORD = "general" # noqa: S105 @@ -606,6 +615,48 @@ class SupersetTestCase(TestCase): response = self.get_assert_metric(uri, "get_list") return response + @staticmethod + def create_import_v1_zip_file(asset_type: str, **kwargs) -> BytesIO: + asset_configs = { + "databases": (kwargs.get("databases"), database_config, True), + "datasets": ( + kwargs.get("datasets"), + dataset_config, + asset_type != "database", + ), + "charts": ( + kwargs.get("charts"), + chart_config, + asset_type in {"chart", "dashboard"}, + ), + "dashboards": ( + kwargs.get("dashboards"), + dashboard_config, + asset_type == "dashboard", + ), + } + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + with bundle.open("export/metadata.yaml", "w") as fp: + fp.write(yaml.safe_dump(metadata_files[asset_type]).encode()) + + for folder, ( + assets, + default_config, + should_have_default, + ) in asset_configs.items(): + if assets: + for i, asset in enumerate(assets): + with bundle.open( + f"export/{folder}/{asset_type}_{i + 1}.yaml", "w" + ) as fp: + fp.write(yaml.safe_dump(asset).encode()) + elif should_have_default: + with bundle.open(f"export/{folder}/{asset_type}.yaml", "w") as fp: + fp.write(yaml.safe_dump(default_config).encode()) + buf.seek(0) + return buf + @contextmanager def db_insert_temp_object(obj: DeclarativeMeta): diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index f776f0e6324..89fb23fc393 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -18,11 +18,10 @@ from io import BytesIO from unittest import mock from unittest.mock import patch -from zipfile import is_zipfile, ZipFile +from zipfile import is_zipfile import prison import pytest -import yaml from flask_babel import lazy_gettext as _ from parameterized import parameterized from sqlalchemy import and_ @@ -56,10 +55,8 @@ from tests.integration_tests.fixtures.energy_dashboard import ( ) from tests.integration_tests.fixtures.importexport import ( chart_config, - chart_metadata_config, database_config, dataset_config, - dataset_metadata_config, ) from tests.integration_tests.fixtures.tags import ( create_custom_tags, # noqa: F401 @@ -310,22 +307,6 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): "can_warm_up_cache", } - def create_chart_import(self): - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("chart_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(chart_metadata_config).encode()) - with bundle.open( - "chart_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp: - fp.write(yaml.safe_dump(chart_config).encode()) - buf.seek(0) - return buf - def test_delete_chart(self): """ Chart API: Test delete @@ -1757,7 +1738,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/chart/import/" - buf = self.create_chart_import() + buf = self.create_import_v1_zip_file("chart") form_data = { "formData": (buf, "chart_export.zip"), } @@ -1795,7 +1776,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/chart/import/" - buf = self.create_chart_import() + buf = self.create_import_v1_zip_file("chart") form_data = { "formData": (buf, "chart_export.zip"), } @@ -1806,7 +1787,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): assert response == {"message": "OK"} # import again without overwrite flag - buf = self.create_chart_import() + buf = self.create_import_v1_zip_file("chart") form_data = { "formData": (buf, "chart_export.zip"), } @@ -1821,7 +1802,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "charts/imported_chart.yaml": "Chart already exists and `overwrite=true` was not passed", # noqa: E501 + "charts/chart.yaml": "Chart already exists and `overwrite=true` was not passed", # noqa: E501 "issue_codes": [ { "code": 1010, @@ -1834,7 +1815,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): } # import with overwrite flag - buf = self.create_chart_import() + buf = self.create_import_v1_zip_file("chart") form_data = { "formData": (buf, "chart_export.zip"), "overwrite": "true", @@ -1867,20 +1848,7 @@ class TestChartApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/chart/import/" - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("chart_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(dataset_metadata_config).encode()) - with bundle.open( - "chart_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp: - fp.write(yaml.safe_dump(chart_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file("dataset", charts=[chart_config]) form_data = { "formData": (buf, "chart_export.zip"), } diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 291096deb69..73f9fe5e36b 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -55,7 +55,6 @@ from tests.integration_tests.fixtures.importexport import ( dashboard_export, dashboard_metadata_config, dataset_config, - dataset_metadata_config, ) from tests.integration_tests.fixtures.tags import ( create_custom_tags, # noqa: F401 @@ -2313,7 +2312,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas self.login(ADMIN_USERNAME) uri = "api/v1/dashboard/import/" - buf = self.create_dashboard_import() + buf = self.create_import_v1_zip_file("dashboard") form_data = { "formData": (buf, "dashboard_export.zip"), } @@ -2418,7 +2417,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas self.login(ADMIN_USERNAME) uri = "api/v1/dashboard/import/" - buf = self.create_dashboard_import() + buf = self.create_import_v1_zip_file("dashboard") form_data = { "formData": (buf, "dashboard_export.zip"), } @@ -2429,7 +2428,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas assert response == {"message": "OK"} # import again without overwrite flag - buf = self.create_dashboard_import() + buf = self.create_import_v1_zip_file("dashboard") form_data = { "formData": (buf, "dashboard_export.zip"), } @@ -2444,7 +2443,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "dashboards/imported_dashboard.yaml": "Dashboard already exists and `overwrite=true` was not passed", # noqa: E501 + "dashboards/dashboard.yaml": "Dashboard already exists and `overwrite=true` was not passed", # noqa: E501 "issue_codes": [ { "code": 1010, @@ -2460,7 +2459,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas } # import with overwrite flag - buf = self.create_dashboard_import() + buf = self.create_import_v1_zip_file("dashboard") form_data = { "formData": (buf, "dashboard_export.zip"), "overwrite": "true", @@ -2492,26 +2491,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas self.login(ADMIN_USERNAME) uri = "api/v1/dashboard/import/" - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("dashboard_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(dataset_metadata_config).encode()) - with bundle.open( - "dashboard_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open( - "dashboard_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - with bundle.open("dashboard_export/charts/imported_chart.yaml", "w") as fp: - fp.write(yaml.safe_dump(chart_config).encode()) - with bundle.open( - "dashboard_export/dashboards/imported_dashboard.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dashboard_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file("chart", dashboards=[dashboard_config]) form_data = { "formData": (buf, "dashboard_export.zip"), } diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 3bf5d7d7652..3318cf860de 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -22,11 +22,10 @@ from collections import defaultdict from io import BytesIO from unittest import mock from unittest.mock import patch, MagicMock -from zipfile import is_zipfile, ZipFile +from zipfile import is_zipfile import prison import pytest -import yaml from unittest.mock import Mock @@ -69,8 +68,6 @@ from tests.integration_tests.fixtures.world_bank_dashboard import ( from tests.integration_tests.fixtures.importexport import ( database_config, dataset_config, - database_metadata_config, - dataset_metadata_config, database_with_ssh_tunnel_config_password, database_with_ssh_tunnel_config_private_key, database_with_ssh_tunnel_config_mix_credentials, @@ -170,22 +167,6 @@ class TestDatabaseApi(SupersetTestCase): db.session.commit() self._database = None - def create_database_import(self): - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - return buf - def test_get_items(self): """ Database API: Test get items @@ -2552,7 +2533,7 @@ class TestDatabaseApi(SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/database/import/" - buf = self.create_database_import() + buf = self.create_import_v1_zip_file("database", datasets=[dataset_config]) form_data = { "formData": (buf, "database_export.zip"), } @@ -2585,7 +2566,7 @@ class TestDatabaseApi(SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/database/import/" - buf = self.create_database_import() + buf = self.create_import_v1_zip_file("database", datasets=[dataset_config]) form_data = { "formData": (buf, "database_export.zip"), } @@ -2596,7 +2577,7 @@ class TestDatabaseApi(SupersetTestCase): assert response == {"message": "OK"} # import again without overwrite flag - buf = self.create_database_import() + buf = self.create_import_v1_zip_file("database", datasets=[dataset_config]) form_data = { "formData": (buf, "database_export.zip"), } @@ -2611,7 +2592,7 @@ class TestDatabaseApi(SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "databases/imported_database.yaml": "Database already exists and `overwrite=true` was not passed", # noqa: E501 + "databases/database.yaml": "Database already exists and `overwrite=true` was not passed", # noqa: E501 "issue_codes": [ { "code": 1010, @@ -2627,7 +2608,7 @@ class TestDatabaseApi(SupersetTestCase): } # import with overwrite flag - buf = self.create_database_import() + buf = self.create_import_v1_zip_file("database", datasets=[dataset_config]) form_data = { "formData": (buf, "database_export.zip"), "overwrite": "true", @@ -2656,20 +2637,7 @@ class TestDatabaseApi(SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/database/import/" - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(dataset_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file("dataset") form_data = { "formData": (buf, "database_export.zip"), } @@ -2712,20 +2680,11 @@ class TestDatabaseApi(SupersetTestCase): "postgresql://username:XXXXXXXXXX@host:12345/db" ) - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -2740,7 +2699,7 @@ class TestDatabaseApi(SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "databases/imported_database.yaml": { + "databases/database_1.yaml": { "_schema": ["Must provide a password for the database"] }, "issue_codes": [ @@ -2770,19 +2729,14 @@ class TestDatabaseApi(SupersetTestCase): "vertica+vertica_python://hackathon:XXXXXXXXXX@host:5433/dbname?ssl=1" ) - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), - "passwords": json.dumps({"databases/imported_database.yaml": "SECRET"}), + "passwords": json.dumps({"databases/database_1.yaml": "SECRET"}), } rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") response = json.loads(rv.data.decode("utf-8")) @@ -2818,21 +2772,11 @@ class TestDatabaseApi(SupersetTestCase): mock_schema_is_feature_enabled.return_value = True masked_database_config = database_with_ssh_tunnel_config_password.copy() - - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -2847,7 +2791,7 @@ class TestDatabaseApi(SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "databases/imported_database.yaml": { + "databases/database_1.yaml": { "_schema": ["Must provide a password for the ssh tunnel"] }, "issue_codes": [ @@ -2880,21 +2824,14 @@ class TestDatabaseApi(SupersetTestCase): masked_database_config = database_with_ssh_tunnel_config_password.copy() - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), - "ssh_tunnel_passwords": json.dumps( - {"databases/imported_database.yaml": "TEST"} - ), + "ssh_tunnel_passwords": json.dumps({"databases/database_1.yaml": "TEST"}), } rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") response = json.loads(rv.data.decode("utf-8")) @@ -2930,21 +2867,11 @@ class TestDatabaseApi(SupersetTestCase): mock_schema_is_feature_enabled.return_value = True masked_database_config = database_with_ssh_tunnel_config_private_key.copy() - - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -2959,7 +2886,7 @@ class TestDatabaseApi(SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "databases/imported_database.yaml": { + "databases/database_1.yaml": { "_schema": [ "Must provide a private key for the ssh tunnel", "Must provide a private key password for the ssh tunnel", # noqa: E501 @@ -2994,24 +2921,17 @@ class TestDatabaseApi(SupersetTestCase): mock_schema_is_feature_enabled.return_value = True masked_database_config = database_with_ssh_tunnel_config_private_key.copy() - - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + ) form_data = { "formData": (buf, "database_export.zip"), "ssh_tunnel_private_keys": json.dumps( - {"databases/imported_database.yaml": "TestPrivateKey"} + {"databases/database_1.yaml": "TestPrivateKey"} ), "ssh_tunnel_private_key_passwords": json.dumps( - {"databases/imported_database.yaml": "TEST"} + {"databases/database_1.yaml": "TEST"} ), } rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") @@ -3047,21 +2967,11 @@ class TestDatabaseApi(SupersetTestCase): uri = "api/v1/database/import/" masked_database_config = database_with_ssh_tunnel_config_private_key.copy() - - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -3106,20 +3016,11 @@ class TestDatabaseApi(SupersetTestCase): masked_database_config = database_with_ssh_tunnel_config_no_credentials.copy() - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -3164,20 +3065,11 @@ class TestDatabaseApi(SupersetTestCase): masked_database_config = database_with_ssh_tunnel_config_mix_credentials.copy() - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -3224,20 +3116,11 @@ class TestDatabaseApi(SupersetTestCase): database_with_ssh_tunnel_config_private_pass_only.copy() ) - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(masked_database_config).encode()) - with bundle.open( - "database_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file( + "database", + databases=[masked_database_config], + datasets=[dataset_config], + ) form_data = { "formData": (buf, "database_export.zip"), } @@ -3252,7 +3135,7 @@ class TestDatabaseApi(SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "databases/imported_database.yaml": { + "databases/database_1.yaml": { "_schema": [ "Must provide a private key for the ssh tunnel", "Must provide a private key password for the ssh tunnel", # noqa: E501 @@ -3296,22 +3179,10 @@ class TestDatabaseApi(SupersetTestCase): "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7ff90", "version": "1.0.0", } - - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("database_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "database_export/databases/DB_with_expand_rows_enabled.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(db_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file("database", databases=[db_config]) form_data = { "formData": (buf, "database_export.zip"), - "passwords": json.dumps( - {"databases/DB_with_expand_rows_enabled.yaml": "SECRET"} - ), + "passwords": json.dumps({"databases/database_1.yaml": "SECRET"}), } rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") response = json.loads(rv.data.decode("utf-8")) diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 9d226f793dc..dcb4b808b22 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -16,6 +16,7 @@ # under the License. from __future__ import annotations +import copy import unittest from datetime import timedelta from io import BytesIO @@ -61,9 +62,7 @@ from tests.integration_tests.fixtures.energy_dashboard import ( ) from tests.integration_tests.fixtures.importexport import ( database_config, - database_metadata_config, dataset_config, - dataset_metadata_config, dataset_ui_export, ) @@ -79,7 +78,7 @@ class TestDatasetApi(SupersetTestCase): def tearDown(self): for item in self.items_to_delete: db.session.delete(item) - db.session.commit() + db.session.commit() super().tearDown() @staticmethod @@ -193,22 +192,6 @@ class TestDatasetApi(SupersetTestCase): .one() ) - def create_dataset_import(self) -> BytesIO: - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("dataset_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(dataset_metadata_config).encode()) - with bundle.open( - "dataset_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open( - "dataset_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - return buf - @pytest.mark.usefixtures("load_energy_table_with_slice") def test_user_gets_all_datasets(self): # test filtering on datasource_name @@ -2437,7 +2420,7 @@ class TestDatasetApi(SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/dataset/import/" - buf = self.create_dataset_import() + buf = self.create_import_v1_zip_file("dataset") form_data = { "formData": (buf, "dataset_export.zip"), "sync_columns": "true", @@ -2460,10 +2443,7 @@ class TestDatasetApi(SupersetTestCase): assert dataset.table_name == "imported_dataset" assert str(dataset.uuid) == dataset_config["uuid"] - db.session.delete(dataset) - db.session.commit() - db.session.delete(database) - db.session.commit() + self.items_to_delete = [dataset, database] def test_import_dataset_v0_export(self): num_datasets = db.session.query(SqlaTable).count() @@ -2500,7 +2480,7 @@ class TestDatasetApi(SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/dataset/import/" - buf = self.create_dataset_import() + buf = self.create_import_v1_zip_file("dataset") form_data = { "formData": (buf, "dataset_export.zip"), } @@ -2511,7 +2491,7 @@ class TestDatasetApi(SupersetTestCase): assert response == {"message": "OK"} # import again without overwrite flag - buf = self.create_dataset_import() + buf = self.create_import_v1_zip_file("dataset") form_data = { "formData": (buf, "dataset_export.zip"), } @@ -2526,7 +2506,7 @@ class TestDatasetApi(SupersetTestCase): "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { - "datasets/imported_dataset.yaml": "Dataset already exists and `overwrite=true` was not passed", # noqa: E501 + "datasets/dataset.yaml": "Dataset already exists and `overwrite=true` was not passed", # noqa: E501 "issue_codes": [ { "code": 1010, @@ -2539,7 +2519,7 @@ class TestDatasetApi(SupersetTestCase): } # import with overwrite flag - buf = self.create_dataset_import() + buf = self.create_import_v1_zip_file("dataset") form_data = { "formData": (buf, "dataset_export.zip"), "overwrite": "true", @@ -2556,10 +2536,7 @@ class TestDatasetApi(SupersetTestCase): ) dataset = database.tables[0] - db.session.delete(dataset) - db.session.commit() - db.session.delete(database) - db.session.commit() + self.items_to_delete = [dataset, database] def test_import_dataset_invalid(self): """ @@ -2569,20 +2546,7 @@ class TestDatasetApi(SupersetTestCase): self.login(ADMIN_USERNAME) uri = "api/v1/dataset/import/" - buf = BytesIO() - with ZipFile(buf, "w") as bundle: - with bundle.open("dataset_export/metadata.yaml", "w") as fp: - fp.write(yaml.safe_dump(database_metadata_config).encode()) - with bundle.open( - "dataset_export/databases/imported_database.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(database_config).encode()) - with bundle.open( - "dataset_export/datasets/imported_dataset.yaml", "w" - ) as fp: - fp.write(yaml.safe_dump(dataset_config).encode()) - buf.seek(0) - + buf = self.create_import_v1_zip_file("database", datasets=[dataset_config]) form_data = { "formData": (buf, "dataset_export.zip"), } @@ -2657,6 +2621,67 @@ class TestDatasetApi(SupersetTestCase): ] } + def test_import_dataset_currency_config(self): + """ + Dataset API: Test import metric with currency config. + + This test confirms that importing a metric with a currency config + set as either string (for backwards compatibility) or dict works properly. + """ + self.login(ADMIN_USERNAME) + uri = "api/v1/dataset/import/" + dataset_with_currency = copy.deepcopy(dataset_config) + dataset_with_currency["metrics"][0]["currency"] = { + "symbol": "USD", + "symbolPosition": "left", + } + dataset_with_currency["metrics"].append( + { + "metric_name": "count_new", + "verbose_name": "", + "metric_type": None, + "expression": "count(1)", + "description": None, + "d3format": None, + "extra": {}, + "warning_text": None, + "currency": '{"symbol": "EUR","symbolPosition": "left"}', + } + ) + + buf = self.create_import_v1_zip_file( + "dataset", datasets=[dataset_with_currency] + ) + form_data = { + "formData": (buf, "dataset_export.zip"), + } + rv = self.client.post(uri, data=form_data, content_type="multipart/form-data") + response = json.loads(rv.data.decode("utf-8")) + + assert rv.status_code == 200 + assert response == {"message": "OK"} + + database = ( + db.session.query(Database).filter_by(uuid=database_config["uuid"]).one() + ) + + assert database.database_name == database_config["database_name"] + + assert len(database.tables) == 1 + assert len(database.tables[0].metrics) == 2 + final_metrics = [] + for metric in database.tables[0].metrics: + final_metrics.append(metric.currency) + assert final_metrics == [ + {"symbol": "USD", "symbolPosition": "left"}, + {"symbol": "EUR", "symbolPosition": "left"}, + ] + dataset = database.tables[0] + assert dataset.table_name == dataset_with_currency["table_name"] + assert str(dataset.uuid) == dataset_with_currency["uuid"] + + self.items_to_delete = [dataset, database] + @pytest.mark.usefixtures("create_datasets") def test_get_datasets_is_certified_filter(self): """ diff --git a/tests/integration_tests/fixtures/importexport.py b/tests/integration_tests/fixtures/importexport.py index 33aff24541c..40f1cef9e21 100644 --- a/tests/integration_tests/fixtures/importexport.py +++ b/tests/integration_tests/fixtures/importexport.py @@ -344,6 +344,12 @@ dashboard_metadata_config: dict[str, Any] = { "type": "Dashboard", "timestamp": "2020-11-04T21:27:44.423819+00:00", } +metadata_files = { + "database": database_metadata_config, + "dataset": dataset_metadata_config, + "chart": chart_metadata_config, + "dashboard": dashboard_metadata_config, +} saved_queries_metadata_config: dict[str, Any] = { "version": "1.0.0", "type": "SavedQuery",