fix: export/import catalogs (#28408)

This commit is contained in:
Beto Dealmeida
2024-05-09 14:42:03 -04:00
committed by GitHub
parent ba2cf5dbbc
commit e6a85c5901
9 changed files with 105 additions and 2 deletions

View File

@@ -15,6 +15,8 @@
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from datetime import timedelta
from functools import wraps

View File

@@ -18,10 +18,14 @@
import pytest
from pytest_mock import MockerFixture
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.session import Session
from superset.connectors.sqla.models import SqlaTable
from superset.daos.dataset import DatasetDAO
from superset.exceptions import OAuth2RedirectError
from superset.models.core import Database
from superset.sql_parse import Table
from superset.superset_typing import QueryObjectDict
@@ -187,3 +191,75 @@ def test_query_datasources_by_permissions_with_catalog_schema(
"tables.schema_perm IN ('[my_db].[db1].[schema1]', '[my_other_db].[schema]') OR "
"tables.catalog_perm IN ('[my_db].[db1]')"
)
def test_dataset_uniqueness(session: Session) -> None:
"""
Test dataset uniqueness constraints.
"""
Database.metadata.create_all(session.bind)
database = Database(database_name="my_db", sqlalchemy_uri="sqlite://")
# add prod.schema.table
dataset = SqlaTable(
database=database,
catalog="prod",
schema="schema",
table_name="table",
)
session.add(dataset)
session.commit()
# add dev.schema.table
dataset = SqlaTable(
database=database,
catalog="dev",
schema="schema",
table_name="table",
)
session.add(dataset)
session.commit()
# try to add dev.schema.table again, fails
dataset = SqlaTable(
database=database,
catalog="dev",
schema="schema",
table_name="table",
)
session.add(dataset)
with pytest.raises(IntegrityError):
session.commit()
session.rollback()
# add schema.table
dataset = SqlaTable(
database=database,
catalog=None,
schema="schema",
table_name="table",
)
session.add(dataset)
session.commit()
# add schema.table again, works because in SQL `NULlL != NULL`
dataset = SqlaTable(
database=database,
catalog=None,
schema="schema",
table_name="table",
)
session.add(dataset)
session.commit()
# but the DAO enforces application logic for uniqueness
assert not DatasetDAO.validate_uniqueness(
database.id,
Table("table", "schema", None),
)
assert DatasetDAO.validate_uniqueness(
database.id,
Table("table", "schema", "some_catalog"),
)

View File

@@ -68,6 +68,7 @@ def test_export(session: Session) -> None:
description="This is the description",
is_featured=1,
cache_timeout=3600,
catalog="public",
schema="my_schema",
sql=None,
params=json.dumps(
@@ -111,6 +112,7 @@ description: This is the description
default_endpoint: null
offset: -8
cache_timeout: 3600
catalog: public
schema: my_schema
sql: null
params:

View File

@@ -61,6 +61,7 @@ def test_import_dataset(mocker: MockFixture, session: Session) -> None:
"default_endpoint": None,
"offset": -8,
"cache_timeout": 3600,
"catalog": "public",
"schema": "my_schema",
"sql": None,
"params": {
@@ -115,6 +116,7 @@ def test_import_dataset(mocker: MockFixture, session: Session) -> None:
assert sqla_table.default_endpoint is None
assert sqla_table.offset == -8
assert sqla_table.cache_timeout == 3600
assert sqla_table.catalog == "public"
assert sqla_table.schema == "my_schema"
assert sqla_table.sql is None
assert sqla_table.params == json.dumps(