Compare commits

...

6 Commits

Author SHA1 Message Date
Beto Dealmeida
3dc6e0cdb7 feat: add Firebird DB engine spec (#13353)
* feat: add Firebird DB engine spec

* Add dep to setup.py

* Fix lint

* Add tests

* Remove uneeded code

* Fix old bug

(cherry picked from commit 892eef1af6)
2021-03-01 14:17:37 -08:00
Jesse Yang
c4880cc06c fix(FilterBox): treat empty defaultValues as null (#13109)
* fix(FilterBox): treat empty defaultValues as null

* Add a unit test and move constant around

(cherry picked from commit 613945a200)
2021-03-01 13:36:10 -08:00
Daniel Vaz Gaspar
311422f2ec fix: engines that don't support comments (#13153)
* fix: engines that don't support comments

* fix: engines that don't support comments

* add quick inexpensive test

* add test

(cherry picked from commit 9568985b7b)
2021-02-17 12:21:04 -08:00
Daniel Vaz Gaspar
ffa1d0c08f feat(db engines): add support for Opendistro Elasticsearch (AWS ES) (#12602)
* feat(db engines): add support for Opendistro Elasticsearch (AWS ES)

* add time grains

* lint

* bump elasticsearch-dbapi version

* add tests

* fix test

(cherry picked from commit b3a814fa27)
2021-02-17 12:14:40 -08:00
Daniel Vaz Gaspar
5874b92977 fix(alerts): void query with numeric comparison (#13090)
* fix(alerts): void query with numeric comparison

* remove config changes

* fix tests

* better logic

* fix logic

* fix logic

* Improve test readability

(cherry picked from commit 2e6ea76631)
2021-02-15 14:14:31 -08:00
Ville Brofeldt
92e172ec8d fix: sorting by saved metric (#13059)
(cherry picked from commit c1e10c4627)
2021-02-15 14:14:07 -08:00
18 changed files with 376 additions and 79 deletions

View File

@@ -126,10 +126,11 @@ setup(
"drill": ["sqlalchemy-drill==0.1.dev"],
"druid": ["pydruid>=0.6.1,<0.7"],
"solr": ["sqlalchemy-solr >= 0.2.0"],
"elasticsearch": ["elasticsearch-dbapi>=0.1.0, <0.2.0"],
"elasticsearch": ["elasticsearch-dbapi>=0.2.0, <0.3.0"],
"exasol": ["sqlalchemy-exasol>=2.1.0, <2.2"],
"excel": ["xlrd>=1.2.0, <1.3"],
"gsheets": ["gsheetsdb>=0.1.9"],
"firebird": ["sqlalchemy-firebird>=0.7.0, <0.8"],
"gsheets": ["shillelagh>=0.2, <0.3"],
"hana": ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"],
"hive": ["pyhive[hive]>=0.6.1", "tableschema", "thrift>=0.11.0, <1.0.0"],
"impala": ["impyla>0.16.2, <0.17"],
@@ -140,6 +141,7 @@ setup(
"pinot": ["pinotdb>=0.3.3, <0.4"],
"postgres": ["psycopg2-binary==2.8.5"],
"presto": ["pyhive[presto]>=0.4.0"],
"trino": ["sqlalchemy-trino>=0.2"],
"prophet": ["fbprophet>=0.6, <0.7"],
"redshift": ["sqlalchemy-redshift>=0.8.1, < 0.9"],
"snowflake": ["snowflake-sqlalchemy>=1.2.3, <1.3"],

View File

@@ -56,13 +56,25 @@ describe('getFilterConfigsFromFormdata', () => {
});
});
it('should use default value from form_data', () => {
it('should use default value and treat empty defaults as null', () => {
const result = getFilterConfigsFromFormdata({
...testFormdata,
show_sqla_time_column: true,
filter_configs: [
...testFormdata.filter_configs,
{
asc: false,
clearable: true,
column: 'country',
defaultValue: '',
key: 'foo',
multiple: true,
},
],
});
expect(result.columns).toMatchObject({
state: ['CA'],
country: null,
});
});

View File

@@ -16,9 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
import { getChartIdsInFilterScope } from '../../util/activeDashboardFilters';
import { TIME_FILTER_MAP } from '../../../visualizations/FilterBox/FilterBox';
import { NativeFiltersState, NativeFilterState } from '../../reducers/types';
import { TIME_FILTER_MAP } from 'src/explore/constants';
import { getChartIdsInFilterScope } from 'src/dashboard/util/activeDashboardFilters';
import {
NativeFiltersState,
NativeFilterState,
} from 'src/dashboard/reducers/types';
export enum IndicatorStatus {
Unset = 'UNSET',

View File

@@ -17,11 +17,11 @@
* under the License.
*/
/* eslint-disable camelcase */
import { TIME_FILTER_MAP } from '../../visualizations/FilterBox/FilterBox';
import {
FILTER_CONFIG_ATTRIBUTES,
TIME_FILTER_LABELS,
} from '../../explore/constants';
TIME_FILTER_MAP,
} from 'src/explore/constants';
export default function getFilterConfigsFromFormdata(form_data = {}) {
const {
@@ -35,14 +35,18 @@ export default function getFilterConfigsFromFormdata(form_data = {}) {
let configs = filter_configs.reduce(
({ columns, labels }, config) => {
let defaultValues = config[FILTER_CONFIG_ATTRIBUTES.DEFAULT_VALUE];
// treat empty string as null (no default value)
if (defaultValues === '') {
defaultValues = null;
}
// defaultValue could be ; separated values,
// could be null or ''
if (
config[FILTER_CONFIG_ATTRIBUTES.DEFAULT_VALUE] &&
config[FILTER_CONFIG_ATTRIBUTES.MULTIPLE]
) {
if (defaultValues && config[FILTER_CONFIG_ATTRIBUTES.MULTIPLE]) {
defaultValues = config.defaultValue.split(';');
}
const updatedColumns = {
...columns,
[config.column]: config.vals || defaultValues,

View File

@@ -92,3 +92,14 @@ export const FILTER_CONFIG_ATTRIBUTES = {
};
export const FILTER_OPTIONS_LIMIT = 1000;
/**
* Map control names to their key in extra_filters
*/
export const TIME_FILTER_MAP = {
time_range: '__time_range',
granularity_sqla: '__time_col',
time_grain_sqla: '__time_grain',
druid_time_origin: '__time_origin',
granularity: '__granularity',
};

View File

@@ -36,19 +36,11 @@ import {
FILTER_CONFIG_ATTRIBUTES,
FILTER_OPTIONS_LIMIT,
TIME_FILTER_LABELS,
TIME_FILTER_MAP,
} from 'src/explore/constants';
import './FilterBox.less';
// maps control names to their key in extra_filters
export const TIME_FILTER_MAP = {
time_range: '__time_range',
granularity_sqla: '__time_col',
time_grain_sqla: '__time_grain',
druid_time_origin: '__time_origin',
granularity: '__granularity',
};
// a shortcut to a map key, used by many components
export const TIME_RANGE = TIME_FILTER_MAP.time_range;
@@ -336,10 +328,12 @@ class FilterBox extends React.PureComponent {
// Add created options to filtersChoices, even though it doesn't exist,
// or these options will exist in query sql but invisible to end user.
Object.keys(selectedValues)
.filter(
key => selectedValues.hasOwnProperty(key) && key in filtersChoices,
)
.filter(key => key in filtersChoices)
.forEach(key => {
// empty values are ignored
if (!selectedValues[key]) {
return;
}
const choices = filtersChoices[key] || (filtersChoices[key] = []);
const choiceIds = new Set(choices.map(f => f.id));
const selectedValuesForKey = Array.isArray(selectedValues[key])
@@ -356,21 +350,21 @@ class FilterBox extends React.PureComponent {
});
});
});
const { key, label } = filterConfig;
const {
key,
label,
[FILTER_CONFIG_ATTRIBUTES.MULTIPLE]: isMultiple,
[FILTER_CONFIG_ATTRIBUTES.DEFAULT_VALUE]: defaultValue,
[FILTER_CONFIG_ATTRIBUTES.CLEARABLE]: isClearable,
[FILTER_CONFIG_ATTRIBUTES.SEARCH_ALL_OPTIONS]: searchAllOptions,
} = filterConfig;
const data = filtersChoices[key] || [];
let value = selectedValues[key] || null;
// Assign default value if required
if (
value === undefined &&
filterConfig[FILTER_CONFIG_ATTRIBUTES.DEFAULT_VALUE]
) {
if (filterConfig[FILTER_CONFIG_ATTRIBUTES.MULTIPLE]) {
// Support for semicolon-delimited multiple values
value = filterConfig[FILTER_CONFIG_ATTRIBUTES.DEFAULT_VALUE].split(';');
} else {
value = filterConfig[FILTER_CONFIG_ATTRIBUTES.DEFAULT_VALUE];
}
if (value === undefined && defaultValue) {
// multiple values are separated by semicolons
value = isMultiple ? defaultValue.split(';') : defaultValue;
}
return (
@@ -380,8 +374,8 @@ class FilterBox extends React.PureComponent {
defaultOptions={this.transformOptions(data)}
key={key}
placeholder={t('Type or Select [%s]', label)}
isMulti={filterConfig[FILTER_CONFIG_ATTRIBUTES.MULTIPLE]}
isClearable={filterConfig[FILTER_CONFIG_ATTRIBUTES.CLEARABLE]}
isMulti={isMultiple}
isClearable={isClearable}
value={value}
options={this.transformOptions(data)}
onChange={newValue => {
@@ -396,8 +390,7 @@ class FilterBox extends React.PureComponent {
onBlur={() => this.onFilterMenuClose(key)}
onMenuClose={() => this.onFilterMenuClose(key)}
selectWrap={
filterConfig[FILTER_CONFIG_ATTRIBUTES.SEARCH_ALL_OPTIONS] &&
data.length >= FILTER_OPTIONS_LIMIT
searchAllOptions && data.length >= FILTER_OPTIONS_LIMIT
? AsyncCreatableSelect
: CreatableSelect
}

View File

@@ -1,27 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export const TIME_CHOICES = [
'1 hour ago',
'12 hours ago',
'1 day ago',
'7 days ago',
'28 days ago',
'90 days ago',
'1 year ago',
];

View File

@@ -1150,6 +1150,8 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
col = self.adhoc_metric_to_sqla(col, columns_by_name)
elif col in columns_by_name:
col = columns_by_name[col].get_sqla_col()
elif col in metrics_by_name:
col = metrics_by_name[col].get_sqla_col()
if isinstance(col, Label):
label = col._label # pylint: disable=protected-access

View File

@@ -153,6 +153,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
allows_joins = True
allows_subqueries = True
allows_column_aliases = True
allows_sql_comments = True
force_column_alias_quotes = False
arraysize = 0
max_column_name_length = 0
@@ -455,7 +456,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
)
return database.compile_sqla_query(qry)
if LimitMethod.FORCE_LIMIT:
if cls.limit_method == LimitMethod.FORCE_LIMIT:
parsed_query = sql_parse.ParsedQuery(sql)
sql = parsed_query.set_or_update_query_limit(limit)
@@ -857,7 +858,6 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
"""
parsed_query = ParsedQuery(statement)
sql = parsed_query.stripped()
sql_query_mutator = config["SQL_QUERY_MUTATOR"]
if sql_query_mutator:
sql = sql_query_mutator(sql, user_name, security_manager, database)
@@ -933,6 +933,9 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
:param kwargs: kwargs to be passed to cursor.execute()
:return:
"""
if not cls.allows_sql_comments:
query = sql_parse.strip_comments_from_sql(query)
if cls.arraysize:
cursor.arraysize = cls.arraysize
try:

View File

@@ -33,6 +33,7 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
time_secondary_columns = True
allows_joins = False
allows_subqueries = True
allows_sql_comments = False
_time_grain_expressions = {
None: "{col}",
@@ -61,3 +62,35 @@ class ElasticSearchEngineSpec(BaseEngineSpec): # pylint: disable=abstract-metho
if target_type.upper() == utils.TemporalType.DATETIME:
return f"""CAST('{dttm.isoformat(timespec="seconds")}' AS DATETIME)"""
return None
class OpenDistroEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
time_groupby_inline = True
time_secondary_columns = True
allows_joins = False
allows_subqueries = True
allows_sql_comments = False
_time_grain_expressions = {
None: "{col}",
"PT1S": "date_format({col}, 'yyyy-MM-dd HH:mm:ss.000')",
"PT1M": "date_format({col}, 'yyyy-MM-dd HH:mm:00.000')",
"PT1H": "date_format({col}, 'yyyy-MM-dd HH:00:00.000')",
"P1D": "date_format({col}, 'yyyy-MM-dd 00:00:00.000')",
"P1M": "date_format({col}, 'yyyy-MM-01 00:00:00.000')",
"P1Y": "date_format({col}, 'yyyy-01-01 00:00:00.000')",
}
engine = "odelasticsearch"
engine_name = "ElasticSearch"
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
if target_type.upper() == utils.TemporalType.DATETIME:
return f"""'{dttm.isoformat(timespec="seconds")}'"""
return None
@staticmethod
def _mutate_label(label: str) -> str:
return label.replace(".", "_")

View File

@@ -0,0 +1,83 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from typing import Optional
from superset.db_engine_specs.base import BaseEngineSpec, LimitMethod
from superset.utils import core as utils
class FirebirdEngineSpec(BaseEngineSpec):
"""Engine for Firebird"""
engine = "firebird"
engine_name = "Firebird"
# Firebird uses FIRST to limit: `SELECT FIRST 10 * FROM table`
limit_method = LimitMethod.FETCH_MANY
_time_grain_expressions = {
None: "{col}",
"PT1S": (
"CAST(CAST({col} AS DATE) "
"|| ' ' "
"|| EXTRACT(HOUR FROM {col}) "
"|| ':' "
"|| EXTRACT(MINUTE FROM {col}) "
"|| ':' "
"|| FLOOR(EXTRACT(SECOND FROM {col})) AS TIMESTAMP)"
),
"PT1M": (
"CAST(CAST({col} AS DATE) "
"|| ' ' "
"|| EXTRACT(HOUR FROM {col}) "
"|| ':' "
"|| EXTRACT(MINUTE FROM {col}) "
"|| ':00' AS TIMESTAMP)"
),
"PT1H": (
"CAST(CAST({col} AS DATE) "
"|| ' ' "
"|| EXTRACT(HOUR FROM {col}) "
"|| ':00:00' AS TIMESTAMP)"
),
"P1D": "CAST({col} AS DATE)",
"P1M": (
"CAST(EXTRACT(YEAR FROM {col}) "
"|| '-' "
"|| EXTRACT(MONTH FROM {col}) "
"|| '-01' AS DATE)"
),
"P1Y": "CAST(EXTRACT(YEAR FROM {col}) || '-01-01' AS DATE)",
}
@classmethod
def epoch_to_dttm(cls) -> str:
return "DATEADD(second, {col}, CAST('00:00:00' AS TIMESTAMP))"
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
tt = target_type.upper()
if tt == utils.TemporalType.TIMESTAMP:
dttm_formatted = dttm.isoformat(sep=" ")
dttm_valid_precision = dttm_formatted[: len("YYYY-MM-DD HH:MM:SS.MMMM")]
return f"CAST('{dttm_valid_precision}' AS TIMESTAMP)"
if tt == utils.TemporalType.DATE:
return f"CAST('{dttm.date().isoformat()}' AS DATE)"
if tt == utils.TemporalType.TIME:
return f"CAST('{dttm.time().isoformat()}' AS TIME)"
return None

View File

@@ -271,8 +271,8 @@ def create_slices(
groupby=["name"],
adhoc_filters=[gen_filter("gender", "girl")],
row_limit=50,
timeseries_limit_metric="sum__num",
metrics=metrics,
timeseries_limit_metric=metric,
metrics=[metric],
),
),
Slice(
@@ -300,7 +300,8 @@ def create_slices(
groupby=["name"],
adhoc_filters=[gen_filter("gender", "boy")],
row_limit=50,
metrics=metrics,
timeseries_limit_metric=metric,
metrics=[metric],
),
),
Slice(

View File

@@ -47,15 +47,16 @@ class AlertCommand(BaseCommand):
def run(self) -> bool:
self.validate()
if self._report_schedule.validator_type == ReportScheduleValidatorType.NOT_NULL:
if self._is_validator_not_null:
self._report_schedule.last_value_row_json = str(self._result)
return self._result not in (0, None, np.nan)
return self._result is not None
self._report_schedule.last_value = self._result
try:
operator = json.loads(self._report_schedule.validator_config_json)["op"]
threshold = json.loads(self._report_schedule.validator_config_json)[
"threshold"
]
return OPERATOR_FUNCTIONS[operator](self._result, threshold)
except (KeyError, json.JSONDecodeError):
raise AlertValidatorConfigError()
@@ -95,6 +96,18 @@ class AlertCommand(BaseCommand):
except (AssertionError, TypeError, ValueError):
raise AlertQueryInvalidTypeError()
@property
def _is_validator_not_null(self) -> bool:
return (
self._report_schedule.validator_type == ReportScheduleValidatorType.NOT_NULL
)
@property
def _is_validator_operator(self) -> bool:
return (
self._report_schedule.validator_type == ReportScheduleValidatorType.OPERATOR
)
def validate(self) -> None:
"""
Validate the query result as a Pandas DataFrame
@@ -108,10 +121,14 @@ class AlertCommand(BaseCommand):
except Exception as ex:
raise AlertQueryError(message=str(ex))
if df.empty:
if df.empty and self._is_validator_not_null:
self._result = None
return
if df.empty and self._is_validator_operator:
self._result = 0.0
return
rows = df.to_records()
if self._report_schedule.validator_type == ReportScheduleValidatorType.NOT_NULL:
if self._is_validator_not_null:
self._validate_not_null(rows)
return
self._validate_operator(rows)

View File

@@ -58,6 +58,19 @@ def _extract_limit_from_query(statement: TokenList) -> Optional[int]:
return None
def strip_comments_from_sql(statement: str) -> str:
"""
Strips comments from a SQL statement, does a simple test first
to avoid always instantiating the expensive ParsedQuery constructor
This is useful for engines that don't support comments
:param statement: A string with the SQL statement
:return: SQL statement without comments
"""
return ParsedQuery(statement).strip_comments() if "--" in statement else statement
@dataclass(eq=True, frozen=True)
class Table: # pylint: disable=too-few-public-methods
"""
@@ -150,6 +163,9 @@ class ParsedQuery:
def stripped(self) -> str:
return self.sql.strip(" \t\n;")
def strip_comments(self) -> str:
return sqlparse.format(self.stripped(), strip_comments=True)
def get_statements(self) -> List[str]:
"""Returns a list of SQL statements as strings, stripped"""
statements = []

View File

@@ -14,7 +14,14 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from superset.db_engine_specs.elasticsearch import ElasticSearchEngineSpec
from unittest.mock import MagicMock
from sqlalchemy import column
from superset.db_engine_specs.elasticsearch import (
ElasticSearchEngineSpec,
OpenDistroEngineSpec,
)
from tests.db_engine_specs.base_tests import TestDbEngineSpec
@@ -26,3 +33,38 @@ class TestElasticSearchDbEngineSpec(TestDbEngineSpec):
ElasticSearchEngineSpec.convert_dttm("DATETIME", dttm),
"CAST('2019-01-02T03:04:05' AS DATETIME)",
)
def test_opendistro_convert_dttm(self):
"""
DB Eng Specs (opendistro): Test convert_dttm
"""
dttm = self.get_dttm()
self.assertEqual(
OpenDistroEngineSpec.convert_dttm("DATETIME", dttm),
"'2019-01-02T03:04:05'",
)
def test_opendistro_sqla_column_label(self):
"""
DB Eng Specs (opendistro): Test column label
"""
test_cases = {
"Col": "Col",
"Col.keyword": "Col_keyword",
}
for original, expected in test_cases.items():
actual = OpenDistroEngineSpec.make_label_compatible(column(original).name)
self.assertEqual(actual, expected)
def test_opendistro_strip_comments(self):
"""
DB Eng Specs (opendistro): Test execute sql strip comments
"""
mock_cursor = MagicMock()
mock_cursor.execute.return_value = []
OpenDistroEngineSpec.execute(
mock_cursor, "-- some comment \nSELECT 1\n --other comment"
)
mock_cursor.execute.assert_called_once_with("SELECT 1\n")

View File

@@ -0,0 +1,81 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from unittest import mock
import pytest
from superset.db_engine_specs.firebird import FirebirdEngineSpec
grain_expressions = {
None: "timestamp_column",
"PT1S": (
"CAST(CAST(timestamp_column AS DATE) "
"|| ' ' "
"|| EXTRACT(HOUR FROM timestamp_column) "
"|| ':' "
"|| EXTRACT(MINUTE FROM timestamp_column) "
"|| ':' "
"|| FLOOR(EXTRACT(SECOND FROM timestamp_column)) AS TIMESTAMP)"
),
"PT1M": (
"CAST(CAST(timestamp_column AS DATE) "
"|| ' ' "
"|| EXTRACT(HOUR FROM timestamp_column) "
"|| ':' "
"|| EXTRACT(MINUTE FROM timestamp_column) "
"|| ':00' AS TIMESTAMP)"
),
"P1D": "CAST(timestamp_column AS DATE)",
"P1M": (
"CAST(EXTRACT(YEAR FROM timestamp_column) "
"|| '-' "
"|| EXTRACT(MONTH FROM timestamp_column) "
"|| '-01' AS DATE)"
),
"P1Y": "CAST(EXTRACT(YEAR FROM timestamp_column) || '-01-01' AS DATE)",
}
@pytest.mark.parametrize("grain,expected", grain_expressions.items())
def test_time_grain_expressions(grain, expected):
assert (
FirebirdEngineSpec._time_grain_expressions[grain].format(col="timestamp_column")
== expected
)
def test_epoch_to_dttm():
assert (
FirebirdEngineSpec.epoch_to_dttm().format(col="timestamp_column")
== "DATEADD(second, timestamp_column, CAST('00:00:00' AS TIMESTAMP))"
)
def test_convert_dttm():
dttm = datetime(2021, 1, 1)
assert (
FirebirdEngineSpec.convert_dttm("timestamp", dttm)
== "CAST('2021-01-01 00:00:00' AS TIMESTAMP)"
)
assert (
FirebirdEngineSpec.convert_dttm("TIMESTAMP", dttm)
== "CAST('2021-01-01 00:00:00' AS TIMESTAMP)"
)
assert FirebirdEngineSpec.convert_dttm("TIME", dttm) == "CAST('00:00:00' AS TIME)"
assert FirebirdEngineSpec.convert_dttm("DATE", dttm) == "CAST('2021-01-01' AS DATE)"
assert FirebirdEngineSpec.convert_dttm("STRING", dttm) is None

View File

@@ -308,7 +308,7 @@ def create_test_table_context(database: Database):
@pytest.yield_fixture(
params=["alert1", "alert2", "alert3", "alert4", "alert5", "alert6"]
params=["alert1", "alert2", "alert3", "alert4", "alert5", "alert6", "alert7"]
)
def create_no_alert_email_chart(request):
param_config = {
@@ -338,10 +338,15 @@ def create_no_alert_email_chart(request):
"validator_config_json": '{"op": "!=", "threshold": 10}',
},
"alert6": {
"sql": "SELECT first from test_table where first=0",
"sql": "SELECT first from test_table where 1=0",
"validator_type": ReportScheduleValidatorType.NOT_NULL,
"validator_config_json": "{}",
},
"alert7": {
"sql": "SELECT first from test_table where 1=0",
"validator_type": ReportScheduleValidatorType.OPERATOR,
"validator_config_json": '{"op": ">", "threshold": 0}',
},
}
with app.app_context():
chart = db.session.query(Slice).first()

View File

@@ -18,7 +18,7 @@ import unittest
import sqlparse
from superset.sql_parse import ParsedQuery, Table
from superset.sql_parse import ParsedQuery, strip_comments_from_sql, Table
class TestSupersetSqlParse(unittest.TestCase):
@@ -732,3 +732,19 @@ class TestSupersetSqlParse(unittest.TestCase):
"""
parsed = ParsedQuery(query, strip_comments=True)
assert not parsed.is_valid_ctas()
def test_strip_comments_from_sql(self):
"""Test that we are able to strip comments out of SQL stmts"""
assert (
strip_comments_from_sql("SELECT col1, col2 FROM table1")
== "SELECT col1, col2 FROM table1"
)
assert (
strip_comments_from_sql("SELECT col1, col2 FROM table1\n-- comment")
== "SELECT col1, col2 FROM table1\n"
)
assert (
strip_comments_from_sql("SELECT '--abc' as abc, col2 FROM table1\n")
== "SELECT '--abc' as abc, col2 FROM table1"
)