mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
refactor(tests): decouple unittests from integration tests (#15473)
* refactor move all tests to be under integration_tests package * refactor decouple unittests from integration tests - commands * add unit_tests package * fix celery_tests.py * fix wrong FIXTURES_DIR value
This commit is contained in:
29
tests/integration_tests/utils/__init__.py
Normal file
29
tests/integration_tests/utils/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import json
|
||||
from os import path
|
||||
|
||||
FIXTURES_DIR = "tests/integration_tests/fixtures"
|
||||
|
||||
|
||||
def read_fixture(fixture_file_name):
|
||||
with open(path.join(FIXTURES_DIR, fixture_file_name), "rb") as fixture_file:
|
||||
return fixture_file.read()
|
||||
|
||||
|
||||
def load_fixture(fixture_file_name):
|
||||
return json.loads(read_fixture(fixture_file_name))
|
||||
54
tests/integration_tests/utils/core_tests.py
Normal file
54
tests/integration_tests/utils/core_tests.py
Normal file
@@ -0,0 +1,54 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=no-self-use
|
||||
import pytest
|
||||
|
||||
from superset.utils.core import to_adhoc
|
||||
|
||||
|
||||
def test_to_adhoc_generates_deterministic_values():
|
||||
input_1 = {
|
||||
"op": "IS NOT NULL",
|
||||
"col": "LATITUDE",
|
||||
"val": "",
|
||||
}
|
||||
|
||||
input_2 = {**input_1, "col": "LONGITUDE"}
|
||||
|
||||
# The result is the same when given the same input
|
||||
assert to_adhoc(input_1) == to_adhoc(input_1)
|
||||
assert to_adhoc(input_1) == {
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"isExtra": False,
|
||||
"comparator": "",
|
||||
"operator": "IS NOT NULL",
|
||||
"subject": "LATITUDE",
|
||||
"filterOptionName": "d0908f77d950131db7a69fdc820cb739",
|
||||
}
|
||||
|
||||
# The result is different when given different input
|
||||
assert to_adhoc(input_1) != to_adhoc(input_2)
|
||||
assert to_adhoc(input_2) == {
|
||||
"clause": "WHERE",
|
||||
"expressionType": "SIMPLE",
|
||||
"isExtra": False,
|
||||
"comparator": "",
|
||||
"operator": "IS NOT NULL",
|
||||
"subject": "LONGITUDE",
|
||||
"filterOptionName": "c5f283f727d4dfc6258b351d4a8663bc",
|
||||
}
|
||||
80
tests/integration_tests/utils/csv_tests.py
Normal file
80
tests/integration_tests/utils/csv_tests.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=no-self-use
|
||||
import io
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from superset.utils import csv
|
||||
|
||||
|
||||
def test_escape_value():
|
||||
result = csv.escape_value("value")
|
||||
assert result == "value"
|
||||
|
||||
result = csv.escape_value("-10")
|
||||
assert result == "-10"
|
||||
|
||||
result = csv.escape_value("@value")
|
||||
assert result == "'@value"
|
||||
|
||||
result = csv.escape_value("+value")
|
||||
assert result == "'+value"
|
||||
|
||||
result = csv.escape_value("-value")
|
||||
assert result == "'-value"
|
||||
|
||||
result = csv.escape_value("=value")
|
||||
assert result == "'=value"
|
||||
|
||||
result = csv.escape_value("|value")
|
||||
assert result == "'\|value"
|
||||
|
||||
result = csv.escape_value("%value")
|
||||
assert result == "'%value"
|
||||
|
||||
result = csv.escape_value("=cmd|' /C calc'!A0")
|
||||
assert result == "'=cmd\|' /C calc'!A0"
|
||||
|
||||
result = csv.escape_value('""=10+2')
|
||||
assert result == '\'""=10+2'
|
||||
|
||||
result = csv.escape_value(" =10+2")
|
||||
assert result == "' =10+2"
|
||||
|
||||
|
||||
def test_df_to_escaped_csv():
|
||||
csv_rows = [
|
||||
["col_a", "=func()"],
|
||||
["-10", "=cmd|' /C calc'!A0"],
|
||||
["a", '""=b'],
|
||||
[" =a", "b"],
|
||||
]
|
||||
csv_str = "\n".join([",".join(row) for row in csv_rows])
|
||||
|
||||
df = pd.read_csv(io.StringIO(csv_str))
|
||||
|
||||
escaped_csv_str = csv.df_to_escaped_csv(df, encoding="utf8", index=False)
|
||||
escaped_csv_rows = [row.split(",") for row in escaped_csv_str.strip().split("\n")]
|
||||
|
||||
assert escaped_csv_rows == [
|
||||
["col_a", "'=func()"],
|
||||
["-10", "'=cmd\|' /C calc'!A0"],
|
||||
["a", "'=b"], # pandas seems to be removing the leading ""
|
||||
["' =a", "b"],
|
||||
]
|
||||
323
tests/integration_tests/utils/date_parser_tests.py
Normal file
323
tests/integration_tests/utils/date_parser_tests.py
Normal file
@@ -0,0 +1,323 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from superset.charts.commands.exceptions import (
|
||||
TimeRangeParseFailError,
|
||||
TimeRangeUnclearError,
|
||||
)
|
||||
from superset.utils.date_parser import (
|
||||
DateRangeMigration,
|
||||
datetime_eval,
|
||||
get_since_until,
|
||||
parse_human_datetime,
|
||||
parse_human_timedelta,
|
||||
parse_past_timedelta,
|
||||
)
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
|
||||
|
||||
def mock_parse_human_datetime(s):
|
||||
if s == "now":
|
||||
return datetime(2016, 11, 7, 9, 30, 10)
|
||||
elif s == "2018":
|
||||
return datetime(2018, 1, 1)
|
||||
elif s == "2018-9":
|
||||
return datetime(2018, 9, 1)
|
||||
elif s == "today":
|
||||
return datetime(2016, 11, 7)
|
||||
elif s == "yesterday":
|
||||
return datetime(2016, 11, 6)
|
||||
elif s == "tomorrow":
|
||||
return datetime(2016, 11, 8)
|
||||
elif s == "Last year":
|
||||
return datetime(2015, 11, 7)
|
||||
elif s == "Last week":
|
||||
return datetime(2015, 10, 31)
|
||||
elif s == "Last 5 months":
|
||||
return datetime(2016, 6, 7)
|
||||
elif s == "Next 5 months":
|
||||
return datetime(2017, 4, 7)
|
||||
elif s in ["5 days", "5 days ago"]:
|
||||
return datetime(2016, 11, 2)
|
||||
elif s == "2018-01-01T00:00:00":
|
||||
return datetime(2018, 1, 1)
|
||||
elif s == "2018-12-31T23:59:59":
|
||||
return datetime(2018, 12, 31, 23, 59, 59)
|
||||
|
||||
|
||||
class TestDateParser(SupersetTestCase):
|
||||
@patch("superset.utils.date_parser.parse_human_datetime", mock_parse_human_datetime)
|
||||
def test_get_since_until(self):
|
||||
result = get_since_until()
|
||||
expected = None, datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until(" : now")
|
||||
expected = None, datetime(2016, 11, 7, 9, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("yesterday : tomorrow")
|
||||
expected = datetime(2016, 11, 6), datetime(2016, 11, 8)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("2018-01-01T00:00:00 : 2018-12-31T23:59:59")
|
||||
expected = datetime(2018, 1, 1), datetime(2018, 12, 31, 23, 59, 59)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last year")
|
||||
expected = datetime(2015, 11, 7), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last quarter")
|
||||
expected = datetime(2016, 8, 7), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last 5 months")
|
||||
expected = datetime(2016, 6, 7), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last 1 month")
|
||||
expected = datetime(2016, 10, 7), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Next 5 months")
|
||||
expected = datetime(2016, 11, 7), datetime(2017, 4, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Next 1 month")
|
||||
expected = datetime(2016, 11, 7), datetime(2016, 12, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until(since="5 days")
|
||||
expected = datetime(2016, 11, 2), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until(since="5 days ago", until="tomorrow")
|
||||
expected = datetime(2016, 11, 2), datetime(2016, 11, 8)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until(time_range="yesterday : tomorrow", time_shift="1 day")
|
||||
expected = datetime(2016, 11, 5), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until(time_range="5 days : now")
|
||||
expected = datetime(2016, 11, 2), datetime(2016, 11, 7, 9, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last week", relative_end="now")
|
||||
expected = datetime(2016, 10, 31), datetime(2016, 11, 7, 9, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last week", relative_start="now")
|
||||
expected = datetime(2016, 10, 31, 9, 30, 10), datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("Last week", relative_start="now", relative_end="now")
|
||||
expected = datetime(2016, 10, 31, 9, 30, 10), datetime(2016, 11, 7, 9, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("previous calendar week")
|
||||
expected = datetime(2016, 10, 31, 0, 0, 0), datetime(2016, 11, 7, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("previous calendar month")
|
||||
expected = datetime(2016, 10, 1, 0, 0, 0), datetime(2016, 11, 1, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = get_since_until("previous calendar year")
|
||||
expected = datetime(2015, 1, 1, 0, 0, 0), datetime(2016, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
get_since_until(time_range="tomorrow : yesterday")
|
||||
|
||||
@patch("superset.utils.date_parser.parse_human_datetime", mock_parse_human_datetime)
|
||||
def test_datetime_eval(self):
|
||||
result = datetime_eval("datetime('now')")
|
||||
expected = datetime(2016, 11, 7, 9, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetime('today' )")
|
||||
expected = datetime(2016, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetime('2018')")
|
||||
expected = datetime(2018, 1, 1)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetime('2018-9')")
|
||||
expected = datetime(2018, 9, 1)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
# Parse compact arguments spelling
|
||||
result = datetime_eval("dateadd(datetime('today'),1,year,)")
|
||||
expected = datetime(2017, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('today'), -2, year)")
|
||||
expected = datetime(2014, 11, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('today'), 2, quarter)")
|
||||
expected = datetime(2017, 5, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('today'), 3, month)")
|
||||
expected = datetime(2017, 2, 7)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('today'), -3, week)")
|
||||
expected = datetime(2016, 10, 17)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('today'), 3, day)")
|
||||
expected = datetime(2016, 11, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('now'), 3, hour)")
|
||||
expected = datetime(2016, 11, 7, 12, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('now'), 40, minute)")
|
||||
expected = datetime(2016, 11, 7, 10, 10, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("dateadd(datetime('now'), -11, second)")
|
||||
expected = datetime(2016, 11, 7, 9, 29, 59)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), year)")
|
||||
expected = datetime(2016, 1, 1, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), month)")
|
||||
expected = datetime(2016, 11, 1, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), day)")
|
||||
expected = datetime(2016, 11, 7, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), week)")
|
||||
expected = datetime(2016, 11, 7, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), hour)")
|
||||
expected = datetime(2016, 11, 7, 9, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), minute)")
|
||||
expected = datetime(2016, 11, 7, 9, 30, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("datetrunc(datetime('now'), second)")
|
||||
expected = datetime(2016, 11, 7, 9, 30, 10)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("lastday(datetime('now'), year)")
|
||||
expected = datetime(2016, 12, 31, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("lastday(datetime('today'), month)")
|
||||
expected = datetime(2016, 11, 30, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("holiday('Christmas')")
|
||||
expected = datetime(2016, 12, 25, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval("holiday('Labor day', datetime('2018-01-01T00:00:00'))")
|
||||
expected = datetime(2018, 9, 3, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval(
|
||||
"holiday('Boxing day', datetime('2018-01-01T00:00:00'), 'UK')"
|
||||
)
|
||||
expected = datetime(2018, 12, 26, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
result = datetime_eval(
|
||||
"lastday(dateadd(datetime('2018-01-01T00:00:00'), 1, month), month)"
|
||||
)
|
||||
expected = datetime(2018, 2, 28, 0, 0, 0)
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
@patch("superset.utils.date_parser.datetime")
|
||||
def test_parse_human_timedelta(self, mock_datetime):
|
||||
mock_datetime.now.return_value = datetime(2019, 4, 1)
|
||||
mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw)
|
||||
self.assertEqual(parse_human_timedelta("now"), timedelta(0))
|
||||
self.assertEqual(parse_human_timedelta("1 year"), timedelta(366))
|
||||
self.assertEqual(parse_human_timedelta("-1 year"), timedelta(-365))
|
||||
self.assertEqual(parse_human_timedelta(None), timedelta(0))
|
||||
self.assertEqual(
|
||||
parse_human_timedelta("1 month", datetime(2019, 4, 1)), timedelta(30),
|
||||
)
|
||||
self.assertEqual(
|
||||
parse_human_timedelta("1 month", datetime(2019, 5, 1)), timedelta(31),
|
||||
)
|
||||
self.assertEqual(
|
||||
parse_human_timedelta("1 month", datetime(2019, 2, 1)), timedelta(28),
|
||||
)
|
||||
self.assertEqual(
|
||||
parse_human_timedelta("-1 month", datetime(2019, 2, 1)), timedelta(-31),
|
||||
)
|
||||
|
||||
@patch("superset.utils.date_parser.datetime")
|
||||
def test_parse_past_timedelta(self, mock_datetime):
|
||||
mock_datetime.now.return_value = datetime(2019, 4, 1)
|
||||
mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw)
|
||||
self.assertEqual(parse_past_timedelta("1 year"), timedelta(365))
|
||||
self.assertEqual(parse_past_timedelta("-1 year"), timedelta(365))
|
||||
self.assertEqual(parse_past_timedelta("52 weeks"), timedelta(364))
|
||||
self.assertEqual(parse_past_timedelta("1 month"), timedelta(31))
|
||||
|
||||
def test_parse_human_datetime(self):
|
||||
with self.assertRaises(TimeRangeUnclearError):
|
||||
parse_human_datetime(" 2 days ")
|
||||
|
||||
with self.assertRaises(TimeRangeUnclearError):
|
||||
parse_human_datetime("2 day")
|
||||
|
||||
with self.assertRaises(TimeRangeParseFailError):
|
||||
parse_human_datetime("xxxxxxx")
|
||||
|
||||
def test_DateRangeMigration(self):
|
||||
params = '{"time_range": " 8 days : 2020-03-10T00:00:00"}'
|
||||
self.assertRegex(params, DateRangeMigration.x_dateunit_in_since)
|
||||
|
||||
params = '{"time_range": "2020-03-10T00:00:00 : 8 days "}'
|
||||
self.assertRegex(params, DateRangeMigration.x_dateunit_in_until)
|
||||
|
||||
params = '{"time_range": " 2 weeks : 8 days "}'
|
||||
self.assertRegex(params, DateRangeMigration.x_dateunit_in_since)
|
||||
self.assertRegex(params, DateRangeMigration.x_dateunit_in_until)
|
||||
|
||||
params = '{"time_range": "2 weeks ago : 8 days later"}'
|
||||
self.assertNotRegex(params, DateRangeMigration.x_dateunit_in_since)
|
||||
self.assertNotRegex(params, DateRangeMigration.x_dateunit_in_until)
|
||||
|
||||
field = " 8 days "
|
||||
self.assertRegex(field, DateRangeMigration.x_dateunit)
|
||||
|
||||
field = "last week"
|
||||
self.assertNotRegex(field, DateRangeMigration.x_dateunit)
|
||||
|
||||
field = "10 years ago"
|
||||
self.assertNotRegex(field, DateRangeMigration.x_dateunit)
|
||||
43
tests/integration_tests/utils/decorators_tests.py
Normal file
43
tests/integration_tests/utils/decorators_tests.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from unittest.mock import call, Mock
|
||||
|
||||
from superset.utils import decorators
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
|
||||
|
||||
class UtilsDecoratorsTests(SupersetTestCase):
|
||||
def test_debounce(self):
|
||||
mock = Mock()
|
||||
|
||||
@decorators.debounce()
|
||||
def myfunc(arg1: int, arg2: int, kwarg1: str = "abc", kwarg2: int = 2):
|
||||
mock(arg1, kwarg1)
|
||||
return arg1 + arg2 + kwarg2
|
||||
|
||||
# should be called only once when arguments don't change
|
||||
myfunc(1, 1)
|
||||
myfunc(1, 1)
|
||||
result = myfunc(1, 1)
|
||||
mock.assert_called_once_with(1, "abc")
|
||||
self.assertEqual(result, 4)
|
||||
|
||||
# kwarg order shouldn't matter
|
||||
myfunc(1, 0, kwarg2=2, kwarg1="haha")
|
||||
result = myfunc(1, 0, kwarg1="haha", kwarg2=2)
|
||||
mock.assert_has_calls([call(1, "abc"), call(1, "haha")])
|
||||
self.assertEqual(result, 3)
|
||||
63
tests/integration_tests/utils/encrypt_tests.py
Normal file
63
tests/integration_tests/utils/encrypt_tests.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import String, TypeDecorator
|
||||
from sqlalchemy_utils import EncryptedType
|
||||
from sqlalchemy_utils.types.encrypted.encrypted_type import StringEncryptedType
|
||||
|
||||
from superset.extensions import encrypted_field_factory
|
||||
from superset.utils.encrypt import AbstractEncryptedFieldAdapter, SQLAlchemyUtilsAdapter
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
|
||||
|
||||
class CustomEncFieldAdapter(AbstractEncryptedFieldAdapter):
|
||||
def create(
|
||||
self,
|
||||
app_config: Optional[Dict[str, Any]],
|
||||
*args: List[Any],
|
||||
**kwargs: Optional[Dict[str, Any]]
|
||||
) -> TypeDecorator:
|
||||
if app_config:
|
||||
return StringEncryptedType(*args, app_config["SECRET_KEY"], **kwargs)
|
||||
else:
|
||||
raise Exception("Missing app_config kwarg")
|
||||
|
||||
|
||||
class EncryptedFieldTest(SupersetTestCase):
|
||||
def setUp(self) -> None:
|
||||
self.app.config[
|
||||
"SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"
|
||||
] = SQLAlchemyUtilsAdapter
|
||||
encrypted_field_factory.init_app(self.app)
|
||||
|
||||
super().setUp()
|
||||
|
||||
def test_create_field(self):
|
||||
field = encrypted_field_factory.create(String(1024))
|
||||
self.assertTrue(isinstance(field, EncryptedType))
|
||||
self.assertEqual(self.app.config["SECRET_KEY"], field.key)
|
||||
|
||||
def test_custom_adapter(self):
|
||||
self.app.config[
|
||||
"SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER"
|
||||
] = CustomEncFieldAdapter
|
||||
encrypted_field_factory.init_app(self.app)
|
||||
field = encrypted_field_factory.create(String(1024))
|
||||
self.assertTrue(isinstance(field, StringEncryptedType))
|
||||
self.assertFalse(isinstance(field, EncryptedType))
|
||||
self.assertEqual(self.app.config["SECRET_KEY"], field.key)
|
||||
28
tests/integration_tests/utils/get_dashboards.py
Normal file
28
tests/integration_tests/utils/get_dashboards.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from typing import List
|
||||
|
||||
from flask_appbuilder import SQLA
|
||||
|
||||
from superset.models.dashboard import Dashboard
|
||||
|
||||
|
||||
def get_dashboards_ids(db: SQLA, dashboard_slugs: List[str]) -> List[int]:
|
||||
result = (
|
||||
db.session.query(Dashboard.id).filter(Dashboard.slug.in_(dashboard_slugs)).all()
|
||||
)
|
||||
return [row[0] for row in result]
|
||||
97
tests/integration_tests/utils/hashing_tests.py
Normal file
97
tests/integration_tests/utils/hashing_tests.py
Normal file
@@ -0,0 +1,97 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=no-self-use
|
||||
import datetime
|
||||
import math
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str
|
||||
|
||||
|
||||
def test_basic_md5_sha():
|
||||
obj = {
|
||||
"product": "Coffee",
|
||||
"company": "Gobias Industries",
|
||||
"price_in_cents": 4000,
|
||||
}
|
||||
|
||||
serialized_obj = (
|
||||
'{"company": "Gobias Industries", "price_in_cents": 4000, "product": "Coffee"}'
|
||||
)
|
||||
|
||||
assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(obj)
|
||||
assert md5_sha_from_str(serialized_obj) == "35f22273cd6a6798b04f8ddef51135e3"
|
||||
|
||||
|
||||
def test_sort_order_md5_sha():
|
||||
obj_1 = {
|
||||
"product": "Coffee",
|
||||
"price_in_cents": 4000,
|
||||
"company": "Gobias Industries",
|
||||
}
|
||||
|
||||
obj_2 = {
|
||||
"product": "Coffee",
|
||||
"company": "Gobias Industries",
|
||||
"price_in_cents": 4000,
|
||||
}
|
||||
|
||||
assert md5_sha_from_dict(obj_1) == md5_sha_from_dict(obj_2)
|
||||
assert md5_sha_from_dict(obj_1) == "35f22273cd6a6798b04f8ddef51135e3"
|
||||
|
||||
|
||||
def test_custom_default_md5_sha():
|
||||
def custom_datetime_serializer(obj: Any):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
return "<datetime>"
|
||||
|
||||
obj = {
|
||||
"product": "Coffee",
|
||||
"company": "Gobias Industries",
|
||||
"datetime": datetime.datetime.now(),
|
||||
}
|
||||
|
||||
serialized_obj = '{"company": "Gobias Industries", "datetime": "<datetime>", "product": "Coffee"}'
|
||||
|
||||
assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(
|
||||
obj, default=custom_datetime_serializer
|
||||
)
|
||||
assert md5_sha_from_str(serialized_obj) == "dc280121213aabcaeb8087aef268fd0d"
|
||||
|
||||
|
||||
def test_ignore_nan_md5_sha():
|
||||
obj = {
|
||||
"product": "Coffee",
|
||||
"company": "Gobias Industries",
|
||||
"price": math.nan,
|
||||
}
|
||||
|
||||
serialized_obj = (
|
||||
'{"company": "Gobias Industries", "price": NaN, "product": "Coffee"}'
|
||||
)
|
||||
|
||||
assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(obj)
|
||||
assert md5_sha_from_str(serialized_obj) == "5d129d1dffebc0bacc734366476d586d"
|
||||
|
||||
serialized_obj = (
|
||||
'{"company": "Gobias Industries", "price": null, "product": "Coffee"}'
|
||||
)
|
||||
|
||||
assert md5_sha_from_str(serialized_obj) == md5_sha_from_dict(obj, ignore_nan=True)
|
||||
assert md5_sha_from_str(serialized_obj) == "40e87d61f6add03816bccdeac5713b9f"
|
||||
56
tests/integration_tests/utils/machine_auth_tests.py
Normal file
56
tests/integration_tests/utils/machine_auth_tests.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from unittest.mock import call, Mock, patch
|
||||
|
||||
from superset.extensions import machine_auth_provider_factory
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
|
||||
|
||||
class MachineAuthProviderTests(SupersetTestCase):
|
||||
def test_get_auth_cookies(self):
|
||||
user = self.get_user("admin")
|
||||
auth_cookies = machine_auth_provider_factory.instance.get_auth_cookies(user)
|
||||
self.assertIsNotNone(auth_cookies["session"])
|
||||
|
||||
@patch("superset.utils.machine_auth.MachineAuthProvider.get_auth_cookies")
|
||||
def test_auth_driver_user(self, get_auth_cookies):
|
||||
user = self.get_user("admin")
|
||||
driver = Mock()
|
||||
get_auth_cookies.return_value = {
|
||||
"session": "session_val",
|
||||
"other_cookie": "other_val",
|
||||
}
|
||||
machine_auth_provider_factory.instance.authenticate_webdriver(driver, user)
|
||||
driver.add_cookie.assert_has_calls(
|
||||
[
|
||||
call({"name": "session", "value": "session_val"}),
|
||||
call({"name": "other_cookie", "value": "other_val"}),
|
||||
]
|
||||
)
|
||||
|
||||
@patch("superset.utils.machine_auth.request")
|
||||
def test_auth_driver_request(self, request):
|
||||
driver = Mock()
|
||||
request.cookies = {"session": "session_val", "other_cookie": "other_val"}
|
||||
machine_auth_provider_factory.instance.authenticate_webdriver(driver, None)
|
||||
driver.add_cookie.assert_has_calls(
|
||||
[
|
||||
call({"name": "session", "value": "session_val"}),
|
||||
call({"name": "other_cookie", "value": "other_val"}),
|
||||
]
|
||||
)
|
||||
106
tests/integration_tests/utils/public_interfaces_test.py
Normal file
106
tests/integration_tests/utils/public_interfaces_test.py
Normal file
@@ -0,0 +1,106 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# pylint: disable=no-self-use
|
||||
import pytest
|
||||
|
||||
from superset.sql_lab import dummy_sql_query_mutator
|
||||
from superset.utils.public_interfaces import compute_hash, get_warning_message
|
||||
from tests.integration_tests.base_tests import SupersetTestCase
|
||||
|
||||
# These are public interfaces exposed by Superset. Make sure
|
||||
# to only change the interfaces and update the hashes in new
|
||||
# major versions of Superset.
|
||||
hashes = {
|
||||
dummy_sql_query_mutator: "Kv%NM3b;7BcpoD2wbPkW",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("interface,expected_hash", list(hashes.items()))
|
||||
def test_public_interfaces(interface, expected_hash):
|
||||
"""Test that public interfaces have not been accidentally changed."""
|
||||
current_hash = compute_hash(interface)
|
||||
assert current_hash == expected_hash, get_warning_message(interface, current_hash)
|
||||
|
||||
|
||||
def test_func_hash():
|
||||
"""Test that changing a function signature changes its hash."""
|
||||
|
||||
def some_function(a, b):
|
||||
return a + b
|
||||
|
||||
original_hash = compute_hash(some_function)
|
||||
|
||||
# pylint: disable=function-redefined
|
||||
def some_function(a, b, c):
|
||||
return a + b + c
|
||||
|
||||
assert original_hash != compute_hash(some_function)
|
||||
|
||||
|
||||
def test_class_hash():
|
||||
"""Test that changing a class changes its hash."""
|
||||
|
||||
# pylint: disable=too-few-public-methods, invalid-name
|
||||
class SomeClass:
|
||||
def __init__(self, a, b):
|
||||
self.a = a
|
||||
self.b = b
|
||||
|
||||
def add(self):
|
||||
return self.a + self.b
|
||||
|
||||
original_hash = compute_hash(SomeClass)
|
||||
|
||||
# changing the __init__ should change the hash
|
||||
# pylint: disable=function-redefined, too-few-public-methods, invalid-name
|
||||
class SomeClass:
|
||||
def __init__(self, a, b, c):
|
||||
self.a = a
|
||||
self.b = b
|
||||
self.c = c
|
||||
|
||||
def add(self):
|
||||
return self.a + self.b
|
||||
|
||||
assert original_hash != compute_hash(SomeClass)
|
||||
|
||||
# renaming a public method should change the hash
|
||||
# pylint: disable=function-redefined, too-few-public-methods, invalid-name
|
||||
class SomeClass:
|
||||
def __init__(self, a, b):
|
||||
self.a = a
|
||||
self.b = b
|
||||
|
||||
def sum(self):
|
||||
return self.a + self.b
|
||||
|
||||
assert original_hash != compute_hash(SomeClass)
|
||||
|
||||
# adding a private method should not change the hash
|
||||
# pylint: disable=function-redefined, too-few-public-methods, invalid-name
|
||||
class SomeClass:
|
||||
def __init__(self, a, b):
|
||||
self.a = a
|
||||
self.b = b
|
||||
|
||||
def add(self):
|
||||
return self._sum()
|
||||
|
||||
def _sum(self):
|
||||
return self.a + self.b
|
||||
|
||||
assert original_hash == compute_hash(SomeClass)
|
||||
Reference in New Issue
Block a user