mirror of
https://github.com/apache/superset.git
synced 2026-04-20 08:34:37 +00:00
feat(datasource): Checkbox for always filtering main dttm in datasource (#25204)
Co-authored-by: Elizabeth Thompson <eschutho@gmail.com>
This commit is contained in:
@@ -53,7 +53,12 @@ def create_table_metadata(
|
||||
|
||||
table = get_table(table_name, database, schema)
|
||||
if not table:
|
||||
table = SqlaTable(schema=schema, table_name=table_name, normalize_columns=False)
|
||||
table = SqlaTable(
|
||||
schema=schema,
|
||||
table_name=table_name,
|
||||
normalize_columns=False,
|
||||
always_filter_main_dttm=False,
|
||||
)
|
||||
if fetch_values_predicate:
|
||||
table.fetch_values_predicate = fetch_values_predicate
|
||||
table.database = database
|
||||
|
||||
@@ -582,6 +582,7 @@ class TestDatasetApi(SupersetTestCase):
|
||||
"schema": None,
|
||||
"table_name": "ab_permission",
|
||||
"normalize_columns": True,
|
||||
"always_filter_main_dttm": False,
|
||||
}
|
||||
uri = "api/v1/dataset/"
|
||||
rv = self.post_assert_metric(uri, table_data, "post")
|
||||
|
||||
@@ -171,6 +171,7 @@ class TestExportDatasetsCommand(SupersetTestCase):
|
||||
},
|
||||
],
|
||||
"normalize_columns": False,
|
||||
"always_filter_main_dttm": False,
|
||||
"offset": 0,
|
||||
"params": None,
|
||||
"schema": get_example_default_schema(),
|
||||
@@ -231,6 +232,7 @@ class TestExportDatasetsCommand(SupersetTestCase):
|
||||
"fetch_values_predicate",
|
||||
"extra",
|
||||
"normalize_columns",
|
||||
"always_filter_main_dttm",
|
||||
"uuid",
|
||||
"metrics",
|
||||
"columns",
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
"""Unit tests for Superset"""
|
||||
import json
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta
|
||||
from unittest import mock
|
||||
|
||||
import prison
|
||||
@@ -76,6 +77,58 @@ class TestDatasource(SupersetTestCase):
|
||||
col_names, {"num_boys", "num", "gender", "name", "ds", "state", "num_girls"}
|
||||
)
|
||||
|
||||
def test_always_filter_main_dttm(self):
|
||||
self.login(username="admin")
|
||||
session = db.session
|
||||
database = get_example_database()
|
||||
|
||||
sql = f"SELECT DATE() as default_dttm, DATE() as additional_dttm, 1 as metric;"
|
||||
if database.backend == "sqlite":
|
||||
pass
|
||||
elif database.backend in ["postgresql", "mysql"]:
|
||||
sql = sql.replace("DATE()", "NOW()")
|
||||
else:
|
||||
return
|
||||
|
||||
query_obj = {
|
||||
"columns": ["metric"],
|
||||
"filter": [],
|
||||
"from_dttm": datetime.now() - timedelta(days=1),
|
||||
"granularity": "additional_dttm",
|
||||
"orderby": [],
|
||||
"to_dttm": datetime.now() + timedelta(days=1),
|
||||
"series_columns": [],
|
||||
"row_limit": 1000,
|
||||
"row_offset": 0,
|
||||
}
|
||||
table = SqlaTable(
|
||||
table_name="dummy_sql_table",
|
||||
database=database,
|
||||
schema=get_example_default_schema(),
|
||||
main_dttm_col="default_dttm",
|
||||
columns=[
|
||||
TableColumn(column_name="default_dttm", type="DATETIME", is_dttm=True),
|
||||
TableColumn(
|
||||
column_name="additional_dttm", type="DATETIME", is_dttm=True
|
||||
),
|
||||
],
|
||||
sql=sql,
|
||||
)
|
||||
|
||||
session.add(table)
|
||||
session.commit()
|
||||
|
||||
table.always_filter_main_dttm = False
|
||||
result = str(table.get_sqla_query(**query_obj).sqla_query.whereclause)
|
||||
assert "default_dttm" not in result and "additional_dttm" in result
|
||||
|
||||
table.always_filter_main_dttm = True
|
||||
result = str(table.get_sqla_query(**query_obj).sqla_query.whereclause)
|
||||
assert "default_dttm" in result and "additional_dttm" in result
|
||||
|
||||
session.delete(table)
|
||||
session.commit()
|
||||
|
||||
def test_external_metadata_for_virtual_table(self):
|
||||
self.login(username="admin")
|
||||
session = db.session
|
||||
@@ -106,6 +159,7 @@ class TestDatasource(SupersetTestCase):
|
||||
"schema_name": tbl.schema,
|
||||
"table_name": tbl.table_name,
|
||||
"normalize_columns": tbl.normalize_columns,
|
||||
"always_filter_main_dttm": tbl.always_filter_main_dttm,
|
||||
}
|
||||
)
|
||||
url = f"/datasource/external_metadata_by_name/?q={params}"
|
||||
@@ -135,6 +189,7 @@ class TestDatasource(SupersetTestCase):
|
||||
"schema_name": tbl.schema,
|
||||
"table_name": tbl.table_name,
|
||||
"normalize_columns": tbl.normalize_columns,
|
||||
"always_filter_main_dttm": tbl.always_filter_main_dttm,
|
||||
}
|
||||
)
|
||||
url = f"/datasource/external_metadata_by_name/?q={params}"
|
||||
@@ -154,6 +209,7 @@ class TestDatasource(SupersetTestCase):
|
||||
"table_name": "test_table",
|
||||
"schema_name": get_example_default_schema(),
|
||||
"normalize_columns": False,
|
||||
"always_filter_main_dttm": False,
|
||||
}
|
||||
)
|
||||
url = f"/datasource/external_metadata_by_name/?q={params}"
|
||||
@@ -168,6 +224,7 @@ class TestDatasource(SupersetTestCase):
|
||||
"database_name": "foo",
|
||||
"table_name": "bar",
|
||||
"normalize_columns": False,
|
||||
"always_filter_main_dttm": False,
|
||||
}
|
||||
)
|
||||
url = f"/datasource/external_metadata_by_name/?q={params}"
|
||||
@@ -185,6 +242,7 @@ class TestDatasource(SupersetTestCase):
|
||||
"database_name": example_database.database_name,
|
||||
"table_name": "fooooooooobarrrrrr",
|
||||
"normalize_columns": False,
|
||||
"always_filter_main_dttm": False,
|
||||
}
|
||||
)
|
||||
url = f"/datasource/external_metadata_by_name/?q={params}"
|
||||
|
||||
@@ -313,6 +313,7 @@ dashboard_export: dict[str, Any] = {
|
||||
"table_name": "birth_names_2",
|
||||
"template_params": None,
|
||||
"normalize_columns": False,
|
||||
"always_filter_main_dttm": False,
|
||||
}
|
||||
}
|
||||
],
|
||||
@@ -496,6 +497,7 @@ dataset_config: dict[str, Any] = {
|
||||
"params": None,
|
||||
"template_params": {},
|
||||
"normalize_columns": False,
|
||||
"always_filter_main_dttm": False,
|
||||
"filter_select_enabled": True,
|
||||
"fetch_values_predicate": None,
|
||||
"extra": '{ "certification": { "certified_by": "Data Platform Team", "details": "This table is the source of truth." }, "warning_markdown": "This is a warning." }',
|
||||
|
||||
Reference in New Issue
Block a user