mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
fix: Time Offset in SQLite and refine logic in Date Type conversion (#21378)
This commit is contained in:
@@ -402,3 +402,13 @@ only_postgresql = pytest.mark.skipif(
|
||||
"postgresql" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""),
|
||||
reason="Only run test case in Postgresql",
|
||||
)
|
||||
|
||||
only_sqlite = pytest.mark.skipif(
|
||||
"sqlite" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""),
|
||||
reason="Only run test case in SQLite",
|
||||
)
|
||||
|
||||
only_mysql = pytest.mark.skipif(
|
||||
"mysql" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""),
|
||||
reason="Only run test case in MySQL",
|
||||
)
|
||||
|
||||
@@ -18,6 +18,8 @@ import re
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytest
|
||||
from pandas import DateOffset
|
||||
|
||||
@@ -39,7 +41,7 @@ from superset.utils.core import (
|
||||
)
|
||||
from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
from tests.integration_tests.conftest import only_postgresql
|
||||
from tests.integration_tests.conftest import only_postgresql, only_sqlite
|
||||
from tests.integration_tests.fixtures.birth_names_dashboard import (
|
||||
load_birth_names_dashboard_with_slices,
|
||||
load_birth_names_data,
|
||||
@@ -910,3 +912,109 @@ def test_non_date_adhoc_column(app_context, physical_dataset):
|
||||
df = qc.get_df_payload(query_object)["df"]
|
||||
assert df["ADHOC COLUMN"][0] == 0
|
||||
assert df["ADHOC COLUMN"][1] == 10
|
||||
|
||||
|
||||
@only_sqlite
|
||||
def test_time_grain_and_time_offset_with_base_axis(app_context, physical_dataset):
|
||||
column_on_axis: AdhocColumn = {
|
||||
"label": "col6",
|
||||
"sqlExpression": "col6",
|
||||
"columnType": "BASE_AXIS",
|
||||
"timeGrain": "P3M",
|
||||
}
|
||||
qc = QueryContextFactory().create(
|
||||
datasource={
|
||||
"type": physical_dataset.type,
|
||||
"id": physical_dataset.id,
|
||||
},
|
||||
queries=[
|
||||
{
|
||||
"columns": [column_on_axis],
|
||||
"metrics": [
|
||||
{
|
||||
"label": "SUM(col1)",
|
||||
"expressionType": "SQL",
|
||||
"sqlExpression": "SUM(col1)",
|
||||
}
|
||||
],
|
||||
"time_offsets": ["3 month ago"],
|
||||
"granularity": "col6",
|
||||
"time_range": "2002-01 : 2003-01",
|
||||
}
|
||||
],
|
||||
result_type=ChartDataResultType.FULL,
|
||||
force=True,
|
||||
)
|
||||
query_object = qc.queries[0]
|
||||
df = qc.get_df_payload(query_object)["df"]
|
||||
# todo: MySQL returns integer and float column as object type
|
||||
"""
|
||||
col6 SUM(col1) SUM(col1)__3 month ago
|
||||
0 2002-01-01 3 NaN
|
||||
1 2002-04-01 12 3.0
|
||||
2 2002-07-01 21 12.0
|
||||
3 2002-10-01 9 21.0
|
||||
"""
|
||||
assert df.equals(
|
||||
pd.DataFrame(
|
||||
data={
|
||||
"col6": pd.to_datetime(
|
||||
["2002-01-01", "2002-04-01", "2002-07-01", "2002-10-01"]
|
||||
),
|
||||
"SUM(col1)": [3, 12, 21, 9],
|
||||
"SUM(col1)__3 month ago": [np.nan, 3, 12, 21],
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@only_sqlite
|
||||
def test_time_grain_and_time_offset_on_legacy_query(app_context, physical_dataset):
|
||||
qc = QueryContextFactory().create(
|
||||
datasource={
|
||||
"type": physical_dataset.type,
|
||||
"id": physical_dataset.id,
|
||||
},
|
||||
queries=[
|
||||
{
|
||||
"columns": [],
|
||||
"extras": {
|
||||
"time_grain_sqla": "P3M",
|
||||
},
|
||||
"metrics": [
|
||||
{
|
||||
"label": "SUM(col1)",
|
||||
"expressionType": "SQL",
|
||||
"sqlExpression": "SUM(col1)",
|
||||
}
|
||||
],
|
||||
"time_offsets": ["3 month ago"],
|
||||
"granularity": "col6",
|
||||
"time_range": "2002-01 : 2003-01",
|
||||
"is_timeseries": True,
|
||||
}
|
||||
],
|
||||
result_type=ChartDataResultType.FULL,
|
||||
force=True,
|
||||
)
|
||||
query_object = qc.queries[0]
|
||||
df = qc.get_df_payload(query_object)["df"]
|
||||
# todo: MySQL returns integer and float column as object type
|
||||
"""
|
||||
__timestamp SUM(col1) SUM(col1)__3 month ago
|
||||
0 2002-01-01 3 NaN
|
||||
1 2002-04-01 12 3.0
|
||||
2 2002-07-01 21 12.0
|
||||
3 2002-10-01 9 21.0
|
||||
"""
|
||||
assert df.equals(
|
||||
pd.DataFrame(
|
||||
data={
|
||||
"__timestamp": pd.to_datetime(
|
||||
["2002-01-01", "2002-04-01", "2002-07-01", "2002-10-01"]
|
||||
),
|
||||
"SUM(col1)": [3, 12, 21, 9],
|
||||
"SUM(col1)__3 month ago": [np.nan, 3, 12, 21],
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -70,6 +70,7 @@ from superset.utils.core import (
|
||||
validate_json,
|
||||
zlib_compress,
|
||||
zlib_decompress,
|
||||
DateColumn,
|
||||
)
|
||||
from superset.utils.database import get_or_create_db
|
||||
from superset.utils import schema
|
||||
@@ -1062,7 +1063,18 @@ class TestUtils(SupersetTestCase):
|
||||
time_shift: Optional[timedelta],
|
||||
) -> pd.DataFrame:
|
||||
df = df.copy()
|
||||
normalize_dttm_col(df, timestamp_format, offset, time_shift)
|
||||
normalize_dttm_col(
|
||||
df,
|
||||
tuple(
|
||||
[
|
||||
DateColumn.get_legacy_time_column(
|
||||
timestamp_format=timestamp_format,
|
||||
offset=offset,
|
||||
time_shift=time_shift,
|
||||
)
|
||||
]
|
||||
),
|
||||
)
|
||||
return df
|
||||
|
||||
ts = pd.Timestamp(2021, 2, 15, 19, 0, 0, 0)
|
||||
|
||||
Reference in New Issue
Block a user