refactor(db_engine_specs): Removing top-level import of app (#14366)

Co-authored-by: John Bodley <john.bodley@airbnb.com>
This commit is contained in:
John Bodley
2021-04-28 15:47:32 +12:00
committed by GitHub
parent 77d17152bc
commit d8bb2d3e62
7 changed files with 143 additions and 142 deletions

View File

@@ -14,8 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from tests.test_app import app # isort:skip
from superset.db_engine_specs.athena import AthenaEngineSpec
from tests.db_engine_specs.base_tests import TestDbEngineSpec

View File

@@ -167,25 +167,6 @@ class TestDbEngineSpecs(TestDbEngineSpec):
"SELECT * FROM table", "SELECT * FROM table", DummyEngineSpec
)
def test_time_grain_denylist(self):
with app.app_context():
app.config["TIME_GRAIN_DENYLIST"] = ["PT1M"]
time_grain_functions = SqliteEngineSpec.get_time_grain_expressions()
self.assertNotIn("PT1M", time_grain_functions)
def test_time_grain_addons(self):
with app.app_context():
app.config["TIME_GRAIN_ADDONS"] = {"PTXM": "x seconds"}
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {
"sqlite": {"PTXM": "ABC({col})"}
}
time_grains = SqliteEngineSpec.get_time_grains()
time_grain_addon = time_grains[-1]
self.assertEqual("PTXM", time_grain_addon.duration)
self.assertEqual("x seconds", time_grain_addon.label)
app.config["TIME_GRAIN_ADDONS"] = {}
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {}
def test_engine_time_grain_validity(self):
time_grains = set(builtin_time_grains.keys())
# loop over all subclasses of BaseEngineSpec
@@ -198,43 +179,6 @@ class TestDbEngineSpecs(TestDbEngineSpec):
intersection = time_grains.intersection(defined_grains)
self.assertSetEqual(defined_grains, intersection, engine)
def test_get_time_grain_with_config(self):
""" Should concatenate from configs and then sort in the proper order """
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {
"mysql": {
"PT2H": "foo",
"PT4H": "foo",
"PT6H": "foo",
"PT8H": "foo",
"PT10H": "foo",
"PT12H": "foo",
"PT1S": "foo",
}
}
time_grains = MySQLEngineSpec.get_time_grain_expressions()
self.assertEqual(
list(time_grains.keys()),
[
None,
"PT1S",
"PT1M",
"PT1H",
"PT2H",
"PT4H",
"PT6H",
"PT8H",
"PT10H",
"PT12H",
"P1D",
"P1W",
"P1M",
"P0.25Y",
"P1Y",
"1969-12-29T00:00:00Z/P1W",
],
)
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {}
def test_get_time_grain_expressions(self):
time_grains = MySQLEngineSpec.get_time_grain_expressions()
self.assertEqual(
@@ -253,18 +197,6 @@ class TestDbEngineSpecs(TestDbEngineSpec):
],
)
def test_get_time_grain_with_unkown_values(self):
"""Should concatenate from configs and then sort in the proper order
putting unknown patterns at the end"""
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {
"mysql": {"PT2H": "foo", "weird": "foo", "PT12H": "foo",}
}
time_grains = MySQLEngineSpec.get_time_grain_expressions()
self.assertEqual(
list(time_grains)[-1], "weird",
)
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {}
def test_get_table_names(self):
inspector = mock.Mock()
inspector.get_table_names = mock.Mock(return_value=["schema.table", "table_2"])
@@ -339,3 +271,84 @@ def test_is_readonly():
assert is_readonly("WITH (SELECT 1) bla SELECT * from bla")
assert is_readonly("SHOW CATALOGS")
assert is_readonly("SHOW TABLES")
def test_time_grain_denylist():
config = app.config.copy()
app.config["TIME_GRAIN_DENYLIST"] = ["PT1M"]
with app.app_context():
time_grain_functions = SqliteEngineSpec.get_time_grain_expressions()
assert not "PT1M" in time_grain_functions
app.config = config
def test_time_grain_addons():
config = app.config.copy()
app.config["TIME_GRAIN_ADDONS"] = {"PTXM": "x seconds"}
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {"sqlite": {"PTXM": "ABC({col})"}}
with app.app_context():
time_grains = SqliteEngineSpec.get_time_grains()
time_grain_addon = time_grains[-1]
assert "PTXM" == time_grain_addon.duration
assert "x seconds" == time_grain_addon.label
app.config = config
def test_get_time_grain_with_config():
""" Should concatenate from configs and then sort in the proper order """
config = app.config.copy()
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {
"mysql": {
"PT2H": "foo",
"PT4H": "foo",
"PT6H": "foo",
"PT8H": "foo",
"PT10H": "foo",
"PT12H": "foo",
"PT1S": "foo",
}
}
with app.app_context():
time_grains = MySQLEngineSpec.get_time_grain_expressions()
assert set(time_grains.keys()) == {
None,
"PT1S",
"PT1M",
"PT1H",
"PT2H",
"PT4H",
"PT6H",
"PT8H",
"PT10H",
"PT12H",
"P1D",
"P1W",
"P1M",
"P0.25Y",
"P1Y",
"1969-12-29T00:00:00Z/P1W",
}
app.config = config
def test_get_time_grain_with_unkown_values():
"""Should concatenate from configs and then sort in the proper order
putting unknown patterns at the end"""
config = app.config.copy()
app.config["TIME_GRAIN_ADDON_EXPRESSIONS"] = {
"mysql": {"PT2H": "foo", "weird": "foo", "PT12H": "foo",}
}
with app.app_context():
time_grains = MySQLEngineSpec.get_time_grain_expressions()
assert list(time_grains)[-1] == "weird"
app.config = config

View File

@@ -21,12 +21,11 @@ from unittest import mock
import pytest
import pandas as pd
from sqlalchemy.sql import select
from tests.test_app import app
with app.app_context():
from superset.db_engine_specs.hive import HiveEngineSpec, upload_to_s3
from superset.db_engine_specs.hive import HiveEngineSpec, upload_to_s3
from superset.exceptions import SupersetException
from superset.sql_parse import Table, ParsedQuery
from tests.test_app import app
def test_0_progress():
@@ -170,10 +169,6 @@ def test_df_to_csv() -> None:
)
@mock.patch(
"superset.db_engine_specs.hive.config",
{**app.config, "CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC": lambda *args: ""},
)
@mock.patch("superset.db_engine_specs.hive.g", spec={})
def test_df_to_sql_if_exists_fail(mock_g):
mock_g.user = True
@@ -185,10 +180,6 @@ def test_df_to_sql_if_exists_fail(mock_g):
)
@mock.patch(
"superset.db_engine_specs.hive.config",
{**app.config, "CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC": lambda *args: ""},
)
@mock.patch("superset.db_engine_specs.hive.g", spec={})
def test_df_to_sql_if_exists_fail_with_schema(mock_g):
mock_g.user = True
@@ -203,13 +194,11 @@ def test_df_to_sql_if_exists_fail_with_schema(mock_g):
)
@mock.patch(
"superset.db_engine_specs.hive.config",
{**app.config, "CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC": lambda *args: ""},
)
@mock.patch("superset.db_engine_specs.hive.g", spec={})
@mock.patch("superset.db_engine_specs.hive.upload_to_s3")
def test_df_to_sql_if_exists_replace(mock_upload_to_s3, mock_g):
config = app.config.copy()
app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: ""
mock_upload_to_s3.return_value = "mock-location"
mock_g.user = True
mock_database = mock.MagicMock()
@@ -218,23 +207,23 @@ def test_df_to_sql_if_exists_replace(mock_upload_to_s3, mock_g):
mock_database.get_sqla_engine.return_value.execute = mock_execute
table_name = "foobar"
HiveEngineSpec.df_to_sql(
mock_database,
Table(table=table_name),
pd.DataFrame(),
{"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"},
)
with app.app_context():
HiveEngineSpec.df_to_sql(
mock_database,
Table(table=table_name),
pd.DataFrame(),
{"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"},
)
mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {table_name}")
app.config = config
@mock.patch(
"superset.db_engine_specs.hive.config",
{**app.config, "CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC": lambda *args: ""},
)
@mock.patch("superset.db_engine_specs.hive.g", spec={})
@mock.patch("superset.db_engine_specs.hive.upload_to_s3")
def test_df_to_sql_if_exists_replace_with_schema(mock_upload_to_s3, mock_g):
config = app.config.copy()
app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: ""
mock_upload_to_s3.return_value = "mock-location"
mock_g.user = True
mock_database = mock.MagicMock()
@@ -244,14 +233,16 @@ def test_df_to_sql_if_exists_replace_with_schema(mock_upload_to_s3, mock_g):
table_name = "foobar"
schema = "schema"
HiveEngineSpec.df_to_sql(
mock_database,
Table(table=table_name, schema=schema),
pd.DataFrame(),
{"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"},
)
with app.app_context():
HiveEngineSpec.df_to_sql(
mock_database,
Table(table=table_name, schema=schema),
pd.DataFrame(),
{"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"},
)
mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {schema}.{table_name}")
app.config = config
def test_is_readonly():
@@ -284,39 +275,42 @@ def test_s3_upload_prefix(schema: str, upload_prefix: str) -> None:
def test_upload_to_s3_no_bucket_path():
with pytest.raises(
Exception,
match="No upload bucket specified. You can specify one in the config file.",
):
upload_to_s3("filename", "prefix", Table("table"))
with app.app_context():
with pytest.raises(
Exception,
match="No upload bucket specified. You can specify one in the config file.",
):
upload_to_s3("filename", "prefix", Table("table"))
@mock.patch("boto3.client")
@mock.patch(
"superset.db_engine_specs.hive.config",
{**app.config, "CSV_TO_HIVE_UPLOAD_S3_BUCKET": "bucket"},
)
def test_upload_to_s3_client_error(client):
config = app.config.copy()
app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"] = "bucket"
from botocore.exceptions import ClientError
client.return_value.upload_file.side_effect = ClientError(
{"Error": {}}, "operation_name"
)
with pytest.raises(ClientError):
upload_to_s3("filename", "prefix", Table("table"))
with app.app_context():
with pytest.raises(ClientError):
upload_to_s3("filename", "prefix", Table("table"))
app.config = config
@mock.patch("boto3.client")
@mock.patch(
"superset.db_engine_specs.hive.config",
{**app.config, "CSV_TO_HIVE_UPLOAD_S3_BUCKET": "bucket"},
)
def test_upload_to_s3_success(client):
config = app.config.copy()
app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"] = "bucket"
client.return_value.upload_file.return_value = True
location = upload_to_s3("filename", "prefix", Table("table"))
assert f"s3a://bucket/prefix/table" == location
with app.app_context():
location = upload_to_s3("filename", "prefix", Table("table"))
assert f"s3a://bucket/prefix/table" == location
app.config = config
def test_fetch_data_query_error():