chore: enforce more ruff rules (#31447)

Co-authored-by: Elizabeth Thompson <eschutho@gmail.com>
This commit is contained in:
Maxime Beauchemin
2024-12-18 17:41:34 -08:00
committed by GitHub
parent 9da65d6bfd
commit e51b95ffa8
375 changed files with 1821 additions and 1718 deletions

View File

@@ -43,7 +43,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.athena import AthenaEngineSpec as spec
from superset.db_engine_specs.athena import AthenaEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -59,7 +59,7 @@ def test_extract_errors() -> None:
result = AthenaEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message='Please check your query for syntax errors at or near "from_". Then, try running your query again.',
message='Please check your query for syntax errors at or near "from_". Then, try running your query again.', # noqa: E501
error_type=SupersetErrorType.SYNTAX_ERROR,
level=ErrorLevel.ERROR,
extra={

View File

@@ -90,7 +90,7 @@ def test_validate_db_uri(mocker: MockerFixture) -> None:
from superset.db_engine_specs.base import BaseEngineSpec
with pytest.raises(ValueError):
with pytest.raises(ValueError): # noqa: PT011
BaseEngineSpec.validate_database_uri(URL.create("sqlite"))
@@ -164,7 +164,9 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec as spec
from superset.db_engine_specs.databricks import (
DatabricksNativeEngineSpec as spec, # noqa: N813
)
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)

View File

@@ -344,11 +344,11 @@ def test_parse_error_message() -> None:
(job ID: ddf30b05-44e8-4fbf-aa29-40bfccaed886)
-----Query Job SQL Follows-----
| . | . | . |\n 1:select * from case_detail_all_suites\n 2:LIMIT 1001\n | . | . | . |
"""
""" # noqa: E501
from superset.db_engine_specs.bigquery import BigQueryEngineSpec
message = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).\n\n(job ID: ddf30b05-44e8-4fbf-aa29-40bfccaed886)\n\n -----Query Job SQL Follows----- \n\n | . | . | . |\n 1:select * from case_detail_all_suites\n 2:LIMIT 1001\n | . | . | . |'
expected_result = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).'
message = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).\n\n(job ID: ddf30b05-44e8-4fbf-aa29-40bfccaed886)\n\n -----Query Job SQL Follows----- \n\n | . | . | . |\n 1:select * from case_detail_all_suites\n 2:LIMIT 1001\n | . | . | . |' # noqa: E501
expected_result = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).' # noqa: E501
assert (
str(BigQueryEngineSpec.parse_error_exception(Exception(message)))
== expected_result
@@ -362,12 +362,12 @@ def test_parse_error_raises_exception() -> None:
Example errors:
400 Syntax error: Expected "(" or keyword UNNEST but got "@" at [4:80]
bigquery error: 400 Table \"case_detail_all_suites\" must be qualified with a dataset (e.g. dataset.table).
"""
""" # noqa: E501
from superset.db_engine_specs.bigquery import BigQueryEngineSpec
message = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).'
message = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).' # noqa: E501
message_2 = "6"
expected_result = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).'
expected_result = 'bigquery error: 400 Syntax error: Table "case_detail_all_suites" must be qualified with a dataset (e.g. dataset.table).' # noqa: E501
assert (
str(BigQueryEngineSpec.parse_error_exception(Exception(message)))
== expected_result
@@ -393,7 +393,9 @@ def test_convert_dttm(
"""
DB Eng Specs (bigquery): Test conversion to date time
"""
from superset.db_engine_specs.bigquery import BigQueryEngineSpec as spec
from superset.db_engine_specs.bigquery import (
BigQueryEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -54,7 +54,9 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec as spec
from superset.db_engine_specs.clickhouse import (
ClickHouseEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -86,7 +88,9 @@ def test_connect_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec as spec
from superset.db_engine_specs.clickhouse import (
ClickHouseEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -200,7 +204,9 @@ def test_connect_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.clickhouse import ClickHouseConnectEngineSpec as spec
from superset.db_engine_specs.clickhouse import (
ClickHouseConnectEngineSpec as spec, # noqa: N813
)
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)
@@ -213,7 +219,9 @@ def test_connect_get_column_spec(
],
)
def test_connect_make_label_compatible(column_name: str, expected_result: str) -> None:
from superset.db_engine_specs.clickhouse import ClickHouseConnectEngineSpec as spec
from superset.db_engine_specs.clickhouse import (
ClickHouseConnectEngineSpec as spec, # noqa: N813
)
label = spec.make_label_compatible(column_name)
assert label == expected_result

View File

@@ -62,7 +62,9 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.couchbase import CouchbaseEngineSpec as spec
from superset.db_engine_specs.couchbase import (
CouchbaseEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -88,6 +90,8 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.couchbase import CouchbaseEngineSpec as spec
from superset.db_engine_specs.couchbase import (
CouchbaseEngineSpec as spec, # noqa: N813
)
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)

View File

@@ -68,6 +68,6 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.crate import CrateEngineSpec as spec
from superset.db_engine_specs.crate import CrateEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -37,6 +37,8 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.cockroachdb import CockroachDbEngineSpec as spec
from superset.db_engine_specs.cockroachdb import (
CockroachDbEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -53,7 +53,9 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.databend import DatabendEngineSpec as spec
from superset.db_engine_specs.databend import (
DatabendEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -115,7 +117,9 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.databend import DatabendConnectEngineSpec as spec
from superset.db_engine_specs.databend import (
DatabendConnectEngineSpec as spec, # noqa: N813
)
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)
@@ -128,7 +132,9 @@ def test_get_column_spec(
],
)
def test_make_label_compatible(column_name: str, expected_result: str) -> None:
from superset.db_engine_specs.databend import DatabendConnectEngineSpec as spec
from superset.db_engine_specs.databend import (
DatabendConnectEngineSpec as spec, # noqa: N813
)
label = spec.make_label_compatible(column_name)
assert label == expected_result

View File

@@ -191,7 +191,7 @@ def test_extract_errors() -> None:
"issue_codes": [
{
"code": 1002,
"message": "Issue 1002 - The database returned an unexpected error.",
"message": "Issue 1002 - The database returned an unexpected error.", # noqa: E501
}
],
},
@@ -218,7 +218,7 @@ def test_extract_errors_with_context() -> None:
"issue_codes": [
{
"code": 1002,
"message": "Issue 1002 - The database returned an unexpected error.",
"message": "Issue 1002 - The database returned an unexpected error.", # noqa: E501
}
],
},
@@ -242,7 +242,9 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.databricks import DatabricksNativeEngineSpec as spec
from superset.db_engine_specs.databricks import (
DatabricksNativeEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -22,7 +22,7 @@ import pytest
from sqlalchemy import column, types
from sqlalchemy.engine.url import make_url
from superset.db_engine_specs.denodo import DenodoEngineSpec as spec
from superset.db_engine_specs.denodo import DenodoEngineSpec as spec # noqa: N813
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,

View File

@@ -74,7 +74,7 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.doris import DorisEngineSpec as spec
from superset.db_engine_specs.doris import DorisEngineSpec as spec # noqa: N813
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)

View File

@@ -40,7 +40,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.dremio import DremioEngineSpec as spec
from superset.db_engine_specs.dremio import DremioEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -106,7 +106,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.drill import DrillEngineSpec as spec
from superset.db_engine_specs.drill import DrillEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -39,7 +39,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.druid import DruidEngineSpec as spec
from superset.db_engine_specs.druid import DruidEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -51,11 +51,11 @@ def test_convert_dttm(
("PT5M", "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT5M')"),
(
"P1W/1970-01-03T00:00:00Z",
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST(col AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', 5)",
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST(col AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', 5)", # noqa: E501
),
(
"1969-12-28T00:00:00Z/P1W",
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST(col AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', -1)",
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST(col AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', -1)", # noqa: E501
),
],
)

View File

@@ -40,7 +40,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.duckdb import DuckDBEngineSpec as spec
from superset.db_engine_specs.duckdb import DuckDBEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -100,16 +100,16 @@ def test_md_build_sqlalchemy_uri() -> None:
# No access token provided, throw ValueError
parameters = DuckDBParametersType(database="my_db")
with pytest.raises(ValueError):
with pytest.raises(ValueError): # noqa: PT011
MotherDuckEngineSpec.build_sqlalchemy_uri(parameters)
# No database provided, default to "md:"
parameters = DuckDBParametersType(access_token="token")
parameters = DuckDBParametersType(access_token="token") # noqa: S106
uri = MotherDuckEngineSpec.build_sqlalchemy_uri(parameters)
assert "duckdb:///md:?motherduck_token=token"
# Database and access_token provided
parameters = DuckDBParametersType(database="my_db", access_token="token")
parameters = DuckDBParametersType(database="my_db", access_token="token") # noqa: S106
uri = MotherDuckEngineSpec.build_sqlalchemy_uri(parameters)
assert "duckdb:///md:my_db?motherduck_token=token" == uri
@@ -126,4 +126,4 @@ def test_get_parameters_from_uri() -> None:
parameters = DuckDBEngineSpec.get_parameters_from_uri(uri)
assert parameters["database"] == "md:my_db"
assert parameters["access_token"] == "token"
assert parameters["access_token"] == "token" # noqa: S105

View File

@@ -37,6 +37,8 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.dynamodb import DynamoDBEngineSpec as spec
from superset.db_engine_specs.dynamodb import (
DynamoDBEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -53,7 +53,9 @@ def test_elasticsearch_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec as spec
from superset.db_engine_specs.elasticsearch import (
ElasticSearchEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm, db_extra)
@@ -70,7 +72,9 @@ def test_opendistro_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.elasticsearch import OpenDistroEngineSpec as spec
from superset.db_engine_specs.elasticsearch import (
OpenDistroEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -99,6 +99,8 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.firebird import FirebirdEngineSpec as spec
from superset.db_engine_specs.firebird import (
FirebirdEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -45,7 +45,9 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.firebolt import FireboltEngineSpec as spec
from superset.db_engine_specs.firebolt import (
FireboltEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -120,58 +120,61 @@ def test_validate_parameters_catalog(
}
errors = GSheetsEngineSpec.validate_parameters(properties) # ignore: type
assert errors == [
SupersetError(
message=(
"The URL could not be identified. Please check for typos "
"and make sure that Type of Google Sheets allowed "
"selection matches the input."
),
error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.WARNING,
extra={
"catalog": {
"idx": 0,
"url": True,
assert (
errors
== [
SupersetError(
message=(
"The URL could not be identified. Please check for typos "
"and make sure that Type of Google Sheets allowed "
"selection matches the input."
),
error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.WARNING,
extra={
"catalog": {
"idx": 0,
"url": True,
},
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501
},
],
},
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.",
},
],
},
),
SupersetError(
message=(
"The URL could not be identified. Please check for typos "
"and make sure that Type of Google Sheets allowed "
"selection matches the input."
),
error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.WARNING,
extra={
"catalog": {
"idx": 2,
"url": True,
SupersetError(
message=(
"The URL could not be identified. Please check for typos "
"and make sure that Type of Google Sheets allowed "
"selection matches the input."
),
error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR,
level=ErrorLevel.WARNING,
extra={
"catalog": {
"idx": 2,
"url": True,
},
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501
},
],
},
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.",
},
],
},
),
]
),
]
)
create_engine.assert_called_with(
"gsheets://",
@@ -229,11 +232,11 @@ def test_validate_parameters_catalog_and_credentials(
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.",
"message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501
},
],
},
@@ -502,7 +505,7 @@ def test_get_url_for_impersonation_access_token() -> None:
url=make_url("gsheets://"),
impersonate_user=True,
username=None,
access_token="access-token",
access_token="access-token", # noqa: S106
) == make_url("gsheets://?access_token=access-token")

View File

@@ -30,7 +30,7 @@ from tests.unit_tests.fixtures.common import dttm # noqa: F401
("Date", "TO_DATE('2019-01-02', 'YYYY-MM-DD')"),
(
"TimeStamp",
"TO_TIMESTAMP('2019-01-02T03:04:05.678900', 'YYYY-MM-DD\"T\"HH24:MI:SS.ff6')",
"TO_TIMESTAMP('2019-01-02T03:04:05.678900', 'YYYY-MM-DD\"T\"HH24:MI:SS.ff6')", # noqa: E501
),
("UnknownType", None),
],
@@ -40,6 +40,6 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.hana import HanaEngineSpec as spec
from superset.db_engine_specs.hana import HanaEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -42,7 +42,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.hive import HiveEngineSpec as spec
from superset.db_engine_specs.hive import HiveEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -21,7 +21,7 @@ from unittest.mock import Mock, patch
import pytest
from superset.db_engine_specs.impala import ImpalaEngineSpec as spec
from superset.db_engine_specs.impala import ImpalaEngineSpec as spec # noqa: N813
from superset.models.core import Database
from superset.models.sql_lab import Query
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm

View File

@@ -126,7 +126,7 @@ def test_kql_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.kusto import KustoKqlEngineSpec as spec
from superset.db_engine_specs.kusto import KustoKqlEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -146,6 +146,6 @@ def test_sql_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.kusto import KustoSqlEngineSpec as spec
from superset.db_engine_specs.kusto import KustoSqlEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -37,6 +37,6 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.kylin import KylinEngineSpec as spec
from superset.db_engine_specs.kylin import KylinEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -57,7 +57,7 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec # noqa: N813
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)
@@ -128,7 +128,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -307,7 +307,7 @@ Unable to connect: Adaptive Server is unavailable or does not exist (localhost_)
"issue_codes": [
{
"code": 1007,
"message": "Issue 1007 - The hostname provided can't be resolved.",
"message": "Issue 1007 - The hostname provided can't be resolved.", # noqa: E501
}
],
},
@@ -367,7 +367,7 @@ Net-Lib error during Operation timed out (60)
"issue_codes": [
{
"code": 1009,
"message": "Issue 1009 - The host might be down, and can't be reached on the provided port.",
"message": "Issue 1009 - The host might be down, and can't be reached on the provided port.", # noqa: E501
}
],
},
@@ -400,7 +400,7 @@ Net-Lib error during Operation timed out (60)
"issue_codes": [
{
"code": 1009,
"message": "Issue 1009 - The host might be down, and can't be reached on the provided port.",
"message": "Issue 1009 - The host might be down, and can't be reached on the provided port.", # noqa: E501
}
],
},
@@ -420,28 +420,31 @@ Adaptive Server connection failed (mssqldb.cxiotftzsypc.us-west-2.rds.amazonaws.
result = MssqlEngineSpec.extract_errors(
Exception(msg), context={"username": "testuser", "database": "testdb"}
)
assert result == [
SupersetError(
message='Either the username "testuser", password, or database name "testdb" is incorrect.',
error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Microsoft SQL Server",
"issue_codes": [
{
"code": 1014,
"message": "Issue 1014 - Either the username or "
"the password is wrong.",
},
{
"code": 1015,
"message": "Issue 1015 - Either the database is "
"spelled incorrectly or does not exist.",
},
],
},
)
]
assert (
result
== [
SupersetError(
message='Either the username "testuser", password, or database name "testdb" is incorrect.', # noqa: E501
error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR,
level=ErrorLevel.ERROR,
extra={
"engine_name": "Microsoft SQL Server",
"issue_codes": [
{
"code": 1014,
"message": "Issue 1014 - Either the username or "
"the password is wrong.",
},
{
"code": 1015,
"message": "Issue 1015 - Either the database is "
"spelled incorrectly or does not exist.",
},
],
},
)
]
)
@pytest.mark.parametrize(
@@ -453,6 +456,6 @@ Adaptive Server connection failed (mssqldb.cxiotftzsypc.us-west-2.rds.amazonaws.
],
)
def test_denormalize_name(name: str, expected_result: str):
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec
from superset.db_engine_specs.mssql import MssqlEngineSpec as spec # noqa: N813
assert spec.denormalize_name(mssql.dialect(), name) == expected_result

View File

@@ -77,7 +77,7 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec # noqa: N813
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)
@@ -98,7 +98,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -119,7 +119,7 @@ def test_validate_database_uri(sqlalchemy_uri: str, error: bool) -> None:
url = make_url(sqlalchemy_uri)
if error:
with pytest.raises(ValueError):
with pytest.raises(ValueError): # noqa: PT011
MySQLEngineSpec.validate_database_uri(url)
return
MySQLEngineSpec.validate_database_uri(url)
@@ -255,7 +255,7 @@ def test_column_type_mutator(
description: list[Any],
expected_result: list[tuple[Any, ...]],
):
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec
from superset.db_engine_specs.mysql import MySQLEngineSpec as spec # noqa: N813
mock_cursor = Mock()
mock_cursor.fetchall.return_value = data

View File

@@ -54,6 +54,8 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.oceanbase import OceanBaseEngineSpec as spec
from superset.db_engine_specs.oceanbase import (
OceanBaseEngineSpec as spec, # noqa: N813
)
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)

View File

@@ -45,7 +45,7 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1012,
"message": "Issue 1012 - The username provided when connecting to a database is not valid.",
"message": "Issue 1012 - The username provided when connecting to a database is not valid.", # noqa: E501
}
],
},
@@ -54,7 +54,7 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
(
"The userid/password combination was not valid (Incorrect password for user)",
SupersetError(
message="The user/password combination is not valid (Incorrect password for user).",
message="The user/password combination is not valid (Incorrect password for user).", # noqa: E501
error_type=SupersetErrorType.CONNECTION_INVALID_PASSWORD_ERROR,
level=ErrorLevel.ERROR,
extra={
@@ -62,7 +62,7 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1013,
"message": "Issue 1013 - The password provided when connecting to a database is not valid.",
"message": "Issue 1013 - The password provided when connecting to a database is not valid.", # noqa: E501
}
],
},
@@ -79,7 +79,7 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1015,
"message": "Issue 1015 - Either the database is spelled incorrectly or does not exist.",
"message": "Issue 1015 - Either the database is spelled incorrectly or does not exist.", # noqa: E501
}
],
},
@@ -96,7 +96,7 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1007,
"message": "Issue 1007 - The hostname provided can't be resolved.",
"message": "Issue 1007 - The hostname provided can't be resolved.", # noqa: E501
}
],
},
@@ -120,7 +120,7 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
),
),
(
"An invalid connection string attribute was specified (failed to decrypt cipher text)",
"An invalid connection string attribute was specified (failed to decrypt cipher text)", # noqa: E501
SupersetError(
message="Invalid Connection String: Expecting String of the form 'ocient://user:pass@host:port/database'.",
error_type=SupersetErrorType.GENERIC_DB_ENGINE_ERROR,
@@ -130,16 +130,16 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1002,
"message": "Issue 1002 - The database returned an unexpected error.",
"message": "Issue 1002 - The database returned an unexpected error.", # noqa: E501
}
],
},
),
),
(
"There is a syntax error in your statement (extraneous input 'foo bar baz' expecting {<EOF>, 'trace', 'using'})",
"There is a syntax error in your statement (extraneous input 'foo bar baz' expecting {<EOF>, 'trace', 'using'})", # noqa: E501
SupersetError(
message="Syntax Error: extraneous input \"foo bar baz\" expecting \"{<EOF>, 'trace', 'using'}",
message="Syntax Error: extraneous input \"foo bar baz\" expecting \"{<EOF>, 'trace', 'using'}", # noqa: E501
error_type=SupersetErrorType.SYNTAX_ERROR,
level=ErrorLevel.ERROR,
extra={
@@ -154,9 +154,9 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
),
),
(
"There is a syntax error in your statement (mismatched input 'to' expecting {<EOF>, 'trace', 'using'})",
"There is a syntax error in your statement (mismatched input 'to' expecting {<EOF>, 'trace', 'using'})", # noqa: E501
SupersetError(
message="Syntax Error: mismatched input \"to\" expecting \"{<EOF>, 'trace', 'using'}",
message="Syntax Error: mismatched input \"to\" expecting \"{<EOF>, 'trace', 'using'}", # noqa: E501
error_type=SupersetErrorType.SYNTAX_ERROR,
level=ErrorLevel.ERROR,
extra={
@@ -181,11 +181,11 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501
},
{
"code": 1005,
"message": "Issue 1005 - The table was deleted or renamed in the database.",
"message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501
},
],
},
@@ -202,11 +202,11 @@ MARSHALED_OCIENT_ERRORS: list[tuple[str, SupersetError]] = [
"issue_codes": [
{
"code": 1003,
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.",
"message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501
},
{
"code": 1004,
"message": "Issue 1004 - The column was deleted or renamed in the database.",
"message": "Issue 1004 - The column was deleted or renamed in the database.", # noqa: E501
},
],
},

View File

@@ -100,7 +100,7 @@ def test_fetch_data() -> None:
("DateTime", """TO_DATE('2019-01-02T03:04:05', 'YYYY-MM-DD"T"HH24:MI:SS')"""),
(
"TimeStamp",
"""TO_TIMESTAMP('2019-01-02T03:04:05.678900', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')""",
"""TO_TIMESTAMP('2019-01-02T03:04:05.678900', 'YYYY-MM-DD"T"HH24:MI:SS.ff6')""", # noqa: E501
),
("Other", None),
],
@@ -110,7 +110,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.oracle import OracleEngineSpec as spec
from superset.db_engine_specs.oracle import OracleEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -124,6 +124,6 @@ def test_convert_dttm(
],
)
def test_denormalize_name(name: str, expected_result: str):
from superset.db_engine_specs.oracle import OracleEngineSpec as spec
from superset.db_engine_specs.oracle import OracleEngineSpec as spec # noqa: N813
assert spec.denormalize_name(oracle.dialect(), name) == expected_result

View File

@@ -26,7 +26,7 @@ from sqlalchemy import column
("PT1S", "CAST(DATE_TRUNC('second', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
(
"PT5M",
"CAST(ROUND(DATE_TRUNC('minute', CAST(col AS TIMESTAMP)), 300000) AS TIMESTAMP)",
"CAST(ROUND(DATE_TRUNC('minute', CAST(col AS TIMESTAMP)), 300000) AS TIMESTAMP)", # noqa: E501
),
("P1W", "CAST(DATE_TRUNC('week', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
("P1M", "CAST(DATE_TRUNC('month', CAST(col AS TIMESTAMP)) AS TIMESTAMP)"),
@@ -38,7 +38,7 @@ def test_timegrain_expressions(time_grain: str, expected_result: str) -> None:
"""
DB Eng Specs (pinot): Test time grain expressions
"""
from superset.db_engine_specs.pinot import PinotEngineSpec as spec
from superset.db_engine_specs.pinot import PinotEngineSpec as spec # noqa: N813
actual = str(
spec.get_timestamp_expr(col=column("col"), pdf=None, time_grain=time_grain)
@@ -47,7 +47,7 @@ def test_timegrain_expressions(time_grain: str, expected_result: str) -> None:
def test_extras_without_ssl() -> None:
from superset.db_engine_specs.pinot import PinotEngineSpec as spec
from superset.db_engine_specs.pinot import PinotEngineSpec as spec # noqa: N813
from tests.integration_tests.fixtures.database import default_db_extra
database = mock.Mock()

View File

@@ -24,7 +24,7 @@ from sqlalchemy import column, types
from sqlalchemy.dialects.postgresql import DOUBLE_PRECISION, ENUM, JSON
from sqlalchemy.engine.url import make_url
from superset.db_engine_specs.postgres import PostgresEngineSpec as spec
from superset.db_engine_specs.postgres import PostgresEngineSpec as spec # noqa: N813
from superset.exceptions import SupersetSecurityException
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import (
@@ -121,7 +121,7 @@ def test_get_schema_from_engine_params() -> None:
== "secret"
)
with pytest.raises(Exception) as excinfo:
with pytest.raises(Exception) as excinfo: # noqa: PT011
spec.get_schema_from_engine_params(
make_url("postgresql://user:password@host/db1"),
{"options": "-csearch_path=secret,public"},
@@ -204,28 +204,28 @@ def test_get_default_catalog() -> None:
("PT1S", "DATE_TRUNC('second', col)"),
(
"PT5S",
"DATE_TRUNC('minute', col) + INTERVAL '5 seconds' * FLOOR(EXTRACT(SECOND FROM col) / 5)",
"DATE_TRUNC('minute', col) + INTERVAL '5 seconds' * FLOOR(EXTRACT(SECOND FROM col) / 5)", # noqa: E501
),
(
"PT30S",
"DATE_TRUNC('minute', col) + INTERVAL '30 seconds' * FLOOR(EXTRACT(SECOND FROM col) / 30)",
"DATE_TRUNC('minute', col) + INTERVAL '30 seconds' * FLOOR(EXTRACT(SECOND FROM col) / 30)", # noqa: E501
),
("PT1M", "DATE_TRUNC('minute', col)"),
(
"PT5M",
"DATE_TRUNC('hour', col) + INTERVAL '5 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 5)",
"DATE_TRUNC('hour', col) + INTERVAL '5 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 5)", # noqa: E501
),
(
"PT10M",
"DATE_TRUNC('hour', col) + INTERVAL '10 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 10)",
"DATE_TRUNC('hour', col) + INTERVAL '10 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 10)", # noqa: E501
),
(
"PT15M",
"DATE_TRUNC('hour', col) + INTERVAL '15 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 15)",
"DATE_TRUNC('hour', col) + INTERVAL '15 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 15)", # noqa: E501
),
(
"PT30M",
"DATE_TRUNC('hour', col) + INTERVAL '30 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 30)",
"DATE_TRUNC('hour', col) + INTERVAL '30 minutes' * FLOOR(EXTRACT(MINUTE FROM col) / 30)", # noqa: E501
),
("PT1H", "DATE_TRUNC('hour', col)"),
("P1D", "DATE_TRUNC('day', col)"),

View File

@@ -59,7 +59,7 @@ def test_convert_dttm(
dttm: datetime,
expected_result: Optional[str],
) -> None:
from superset.db_engine_specs.presto import PrestoEngineSpec as spec
from superset.db_engine_specs.presto import PrestoEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -83,7 +83,7 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.presto import PrestoEngineSpec as spec
from superset.db_engine_specs.presto import PrestoEngineSpec as spec # noqa: N813
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)
@@ -150,7 +150,7 @@ def test_where_latest_partition(
compile_kwargs={"literal_binds": True},
)
)
== f"""SELECT * FROM table \nWHERE "partition_key" = {expected_value}"""
== f"""SELECT * FROM table \nWHERE "partition_key" = {expected_value}""" # noqa: S608
)
@@ -248,33 +248,33 @@ def test_get_default_catalog() -> None:
("PT1S", "date_trunc('second', CAST(col AS TIMESTAMP))"),
(
"PT5S",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 5)",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 5)", # noqa: E501
),
(
"PT30S",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 30)",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 30)", # noqa: E501
),
("PT1M", "date_trunc('minute', CAST(col AS TIMESTAMP))"),
(
"PT5M",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 5)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 5)", # noqa: E501
),
(
"PT10M",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 10)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 10)", # noqa: E501
),
(
"PT15M",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 15)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 15)", # noqa: E501
),
(
"PT0.5H",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 30)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 30)", # noqa: E501
),
("PT1H", "date_trunc('hour', CAST(col AS TIMESTAMP))"),
(
"PT6H",
"date_trunc('hour', CAST(col AS TIMESTAMP)) - interval '1' hour * (hour(CAST(col AS TIMESTAMP)) % 6)",
"date_trunc('hour', CAST(col AS TIMESTAMP)) - interval '1' hour * (hour(CAST(col AS TIMESTAMP)) % 6)", # noqa: E501
),
("P1D", "date_trunc('day', CAST(col AS TIMESTAMP))"),
("P1W", "date_trunc('week', CAST(col AS TIMESTAMP))"),
@@ -283,12 +283,12 @@ def test_get_default_catalog() -> None:
("P1Y", "date_trunc('year', CAST(col AS TIMESTAMP))"),
(
"1969-12-28T00:00:00Z/P1W",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) - interval '1' day",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) - interval '1' day", # noqa: E501
),
("1969-12-29T00:00:00Z/P1W", "date_trunc('week', CAST(col AS TIMESTAMP))"),
(
"P1W/1970-01-03T00:00:00Z",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) + interval '5' day",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) + interval '5' day", # noqa: E501
),
(
"P1W/1970-01-04T00:00:00Z",
@@ -297,7 +297,7 @@ def test_get_default_catalog() -> None:
],
)
def test_timegrain_expressions(time_grain: str, expected_result: str) -> None:
from superset.db_engine_specs.presto import PrestoEngineSpec as spec
from superset.db_engine_specs.presto import PrestoEngineSpec as spec # noqa: N813
actual = str(
spec.get_timestamp_expr(col=column("col"), pdf=None, time_grain=time_grain)

View File

@@ -44,6 +44,8 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.redshift import RedshiftEngineSpec as spec
from superset.db_engine_specs.redshift import (
RedshiftEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -38,6 +38,6 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.rockset import RocksetEngineSpec as spec
from superset.db_engine_specs.rockset import RocksetEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)

View File

@@ -56,7 +56,9 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.snowflake import SnowflakeEngineSpec as spec
from superset.db_engine_specs.snowflake import (
SnowflakeEngineSpec as spec, # noqa: N813
)
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -89,7 +91,7 @@ def test_extract_errors() -> None:
"issue_codes": [
{
"code": 1029,
"message": "Issue 1029 - The object does not exist in the given database.",
"message": "Issue 1029 - The object does not exist in the given database.", # noqa: E501
}
],
},
@@ -100,7 +102,7 @@ def test_extract_errors() -> None:
result = SnowflakeEngineSpec.extract_errors(Exception(msg))
assert result == [
SupersetError(
message='Please check your query for syntax errors at or near "limited". Then, try running your query again.',
message='Please check your query for syntax errors at or near "limited". Then, try running your query again.', # noqa: E501
error_type=SupersetErrorType.SYNTAX_ERROR,
level=ErrorLevel.ERROR,
extra={

View File

@@ -40,7 +40,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.sqlite import SqliteEngineSpec as spec
from superset.db_engine_specs.sqlite import SqliteEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)
@@ -126,6 +126,6 @@ def test_time_grain_expressions(dttm: str, grain: str, expected: str) -> None:
# pylint: disable=protected-access
expression = SqliteEngineSpec._time_grain_expressions[grain].format(col="dttm")
sql = f"SELECT {expression} FROM t"
sql = f"SELECT {expression} FROM t" # noqa: S608
result = connection.execute(sql).scalar()
assert result == expected

View File

@@ -66,7 +66,9 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.starrocks import StarRocksEngineSpec as spec
from superset.db_engine_specs.starrocks import (
StarRocksEngineSpec as spec, # noqa: N813
)
assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)

View File

@@ -240,7 +240,7 @@ def test_auth_custom_auth_denied() -> None:
superset.config.ALLOWED_EXTRA_AUTHENTICATIONS = {}
with pytest.raises(ValueError) as excinfo:
with pytest.raises(ValueError) as excinfo: # noqa: PT011
TrinoEngineSpec.update_params_from_encrypted_extra(database, {})
assert str(excinfo.value) == (
@@ -291,7 +291,7 @@ def test_get_column_spec(
generic_type: GenericDataType,
is_dttm: bool,
) -> None:
from superset.db_engine_specs.trino import TrinoEngineSpec as spec
from superset.db_engine_specs.trino import TrinoEngineSpec as spec # noqa: N813
assert_column_spec(
spec,
@@ -791,7 +791,7 @@ def test_where_latest_partition(
compile_kwargs={"literal_binds": True},
)
)
== f"""SELECT * FROM table \nWHERE partition_key = {expected_value}"""
== f"""SELECT * FROM table \nWHERE partition_key = {expected_value}""" # noqa: S608
)
@@ -855,33 +855,33 @@ def test_get_oauth2_token(
("PT1S", "date_trunc('second', CAST(col AS TIMESTAMP))"),
(
"PT5S",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 5)",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 5)", # noqa: E501
),
(
"PT30S",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 30)",
"date_trunc('second', CAST(col AS TIMESTAMP)) - interval '1' second * (second(CAST(col AS TIMESTAMP)) % 30)", # noqa: E501
),
("PT1M", "date_trunc('minute', CAST(col AS TIMESTAMP))"),
(
"PT5M",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 5)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 5)", # noqa: E501
),
(
"PT10M",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 10)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 10)", # noqa: E501
),
(
"PT15M",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 15)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 15)", # noqa: E501
),
(
"PT0.5H",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 30)",
"date_trunc('minute', CAST(col AS TIMESTAMP)) - interval '1' minute * (minute(CAST(col AS TIMESTAMP)) % 30)", # noqa: E501
),
("PT1H", "date_trunc('hour', CAST(col AS TIMESTAMP))"),
(
"PT6H",
"date_trunc('hour', CAST(col AS TIMESTAMP)) - interval '1' hour * (hour(CAST(col AS TIMESTAMP)) % 6)",
"date_trunc('hour', CAST(col AS TIMESTAMP)) - interval '1' hour * (hour(CAST(col AS TIMESTAMP)) % 6)", # noqa: E501
),
("P1D", "date_trunc('day', CAST(col AS TIMESTAMP))"),
("P1W", "date_trunc('week', CAST(col AS TIMESTAMP))"),
@@ -890,12 +890,12 @@ def test_get_oauth2_token(
("P1Y", "date_trunc('year', CAST(col AS TIMESTAMP))"),
(
"1969-12-28T00:00:00Z/P1W",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) - interval '1' day",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) - interval '1' day", # noqa: E501
),
("1969-12-29T00:00:00Z/P1W", "date_trunc('week', CAST(col AS TIMESTAMP))"),
(
"P1W/1970-01-03T00:00:00Z",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) + interval '5' day",
"date_trunc('week', CAST(col AS TIMESTAMP) + interval '1' day) + interval '5' day", # noqa: E501
),
(
"P1W/1970-01-04T00:00:00Z",
@@ -904,7 +904,7 @@ def test_get_oauth2_token(
],
)
def test_timegrain_expressions(time_grain: str, expected_result: str) -> None:
from superset.db_engine_specs.trino import TrinoEngineSpec as spec
from superset.db_engine_specs.trino import TrinoEngineSpec as spec # noqa: N813
actual = str(
spec.get_timestamp_expr(col=column("col"), pdf=None, time_grain=time_grain)

View File

@@ -50,7 +50,7 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
from superset.db_engine_specs.ydb import YDBEngineSpec as spec
from superset.db_engine_specs.ydb import YDBEngineSpec as spec # noqa: N813
assert_convert_dttm(spec, target_type, expected_result, dttm)