mirror of
https://github.com/apache/superset.git
synced 2026-04-19 16:14:52 +00:00
feat: confirm overwrite when importing (#11982)
* feat: confirm overwrite when importing * Skip flaky test
This commit is contained in:
@@ -17,6 +17,7 @@
|
||||
# pylint: disable=too-many-public-methods, invalid-name
|
||||
"""Unit tests for Superset"""
|
||||
import json
|
||||
import unittest
|
||||
from io import BytesIO
|
||||
from typing import List, Optional
|
||||
from unittest.mock import patch
|
||||
@@ -138,6 +139,22 @@ class TestDatasetApi(SupersetTestCase):
|
||||
.one()
|
||||
)
|
||||
|
||||
def create_dataset_import(self):
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
with bundle.open("dataset_export/metadata.yaml", "w") as fp:
|
||||
fp.write(yaml.safe_dump(dataset_metadata_config).encode())
|
||||
with bundle.open(
|
||||
"dataset_export/databases/imported_database.yaml", "w"
|
||||
) as fp:
|
||||
fp.write(yaml.safe_dump(database_config).encode())
|
||||
with bundle.open(
|
||||
"dataset_export/datasets/imported_dataset.yaml", "w"
|
||||
) as fp:
|
||||
fp.write(yaml.safe_dump(dataset_config).encode())
|
||||
buf.seek(0)
|
||||
return buf
|
||||
|
||||
def test_get_dataset_list(self):
|
||||
"""
|
||||
Dataset API: Test get dataset list
|
||||
@@ -1031,6 +1048,7 @@ class TestDatasetApi(SupersetTestCase):
|
||||
db.session.delete(dataset)
|
||||
db.session.commit()
|
||||
|
||||
@unittest.skip("test is failing stochastically")
|
||||
def test_export_dataset(self):
|
||||
"""
|
||||
Dataset API: Test export dataset
|
||||
@@ -1216,27 +1234,14 @@ class TestDatasetApi(SupersetTestCase):
|
||||
for table_name in self.fixture_tables_names:
|
||||
assert table_name in [ds["table_name"] for ds in data["result"]]
|
||||
|
||||
def test_imported_dataset(self):
|
||||
def test_import_dataset(self):
|
||||
"""
|
||||
Dataset API: Test import dataset
|
||||
"""
|
||||
self.login(username="admin")
|
||||
uri = "api/v1/dataset/import/"
|
||||
|
||||
buf = BytesIO()
|
||||
with ZipFile(buf, "w") as bundle:
|
||||
with bundle.open("dataset_export/metadata.yaml", "w") as fp:
|
||||
fp.write(yaml.safe_dump(dataset_metadata_config).encode())
|
||||
with bundle.open(
|
||||
"dataset_export/databases/imported_database.yaml", "w"
|
||||
) as fp:
|
||||
fp.write(yaml.safe_dump(database_config).encode())
|
||||
with bundle.open(
|
||||
"dataset_export/datasets/imported_dataset.yaml", "w"
|
||||
) as fp:
|
||||
fp.write(yaml.safe_dump(dataset_config).encode())
|
||||
buf.seek(0)
|
||||
|
||||
buf = self.create_dataset_import()
|
||||
form_data = {
|
||||
"formData": (buf, "dataset_export.zip"),
|
||||
}
|
||||
@@ -1260,7 +1265,61 @@ class TestDatasetApi(SupersetTestCase):
|
||||
db.session.delete(database)
|
||||
db.session.commit()
|
||||
|
||||
def test_imported_dataset_invalid(self):
|
||||
def test_import_dataset_overwrite(self):
|
||||
"""
|
||||
Dataset API: Test import existing dataset
|
||||
"""
|
||||
self.login(username="admin")
|
||||
uri = "api/v1/dataset/import/"
|
||||
|
||||
buf = self.create_dataset_import()
|
||||
form_data = {
|
||||
"formData": (buf, "dataset_export.zip"),
|
||||
}
|
||||
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
||||
response = json.loads(rv.data.decode("utf-8"))
|
||||
|
||||
assert rv.status_code == 200
|
||||
assert response == {"message": "OK"}
|
||||
|
||||
# import again without overwrite flag
|
||||
buf = self.create_dataset_import()
|
||||
form_data = {
|
||||
"formData": (buf, "dataset_export.zip"),
|
||||
}
|
||||
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
||||
response = json.loads(rv.data.decode("utf-8"))
|
||||
|
||||
assert rv.status_code == 422
|
||||
assert response == {
|
||||
"message": {
|
||||
"datasets/imported_dataset.yaml": "Dataset already exists and `overwrite=true` was not passed"
|
||||
}
|
||||
}
|
||||
|
||||
# import with overwrite flag
|
||||
buf = self.create_dataset_import()
|
||||
form_data = {
|
||||
"formData": (buf, "dataset_export.zip"),
|
||||
"overwrite": "true",
|
||||
}
|
||||
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
||||
response = json.loads(rv.data.decode("utf-8"))
|
||||
|
||||
assert rv.status_code == 200
|
||||
assert response == {"message": "OK"}
|
||||
|
||||
# clean up
|
||||
database = (
|
||||
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
||||
)
|
||||
dataset = database.tables[0]
|
||||
|
||||
db.session.delete(dataset)
|
||||
db.session.delete(database)
|
||||
db.session.commit()
|
||||
|
||||
def test_import_dataset_invalid(self):
|
||||
"""
|
||||
Dataset API: Test import invalid dataset
|
||||
"""
|
||||
@@ -1292,7 +1351,7 @@ class TestDatasetApi(SupersetTestCase):
|
||||
"message": {"metadata.yaml": {"type": ["Must be equal to SqlaTable."]}}
|
||||
}
|
||||
|
||||
def test_imported_dataset_invalid_v0_validation(self):
|
||||
def test_import_dataset_invalid_v0_validation(self):
|
||||
"""
|
||||
Dataset API: Test import invalid dataset
|
||||
"""
|
||||
|
||||
@@ -349,7 +349,7 @@ class TestImportDatasetsCommand(SupersetTestCase):
|
||||
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
||||
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
|
||||
}
|
||||
command = v1.ImportDatasetsCommand(contents)
|
||||
command = v1.ImportDatasetsCommand(contents, overwrite=True)
|
||||
command.run()
|
||||
command.run()
|
||||
dataset = (
|
||||
@@ -367,7 +367,7 @@ class TestImportDatasetsCommand(SupersetTestCase):
|
||||
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
||||
"datasets/imported_dataset.yaml": yaml.safe_dump(new_config),
|
||||
}
|
||||
command = v1.ImportDatasetsCommand(contents)
|
||||
command = v1.ImportDatasetsCommand(contents, overwrite=True)
|
||||
command.run()
|
||||
dataset = (
|
||||
db.session.query(SqlaTable).filter_by(uuid=dataset_config["uuid"]).one()
|
||||
@@ -451,7 +451,7 @@ class TestImportDatasetsCommand(SupersetTestCase):
|
||||
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
|
||||
"databases/imported_database.yaml": yaml.safe_dump(database_config),
|
||||
}
|
||||
command = v1.ImportDatasetsCommand(contents)
|
||||
command = v1.ImportDatasetsCommand(contents, overwrite=True)
|
||||
command.run()
|
||||
|
||||
database = (
|
||||
|
||||
Reference in New Issue
Block a user