refactor(tests): decouple unittests from integration tests (#15473)

* refactor move all tests to be under integration_tests package

* refactor decouple unittests from integration tests - commands

* add unit_tests package

* fix celery_tests.py

* fix wrong FIXTURES_DIR value
This commit is contained in:
ofekisr
2021-07-01 18:03:07 +03:00
committed by GitHub
parent 55d0371b92
commit b5119b8dff
172 changed files with 456 additions and 262 deletions

View File

@@ -0,0 +1,31 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
load_birth_names_dashboard_with_slices_module_scope,
)
from .energy_dashboard import load_energy_table_with_slice
from .public_role import public_role_like_gamma, public_role_like_test_role
from .unicode_dashboard import (
load_unicode_dashboard_with_position,
load_unicode_dashboard_with_slice,
)
from .world_bank_dashboard import (
load_world_bank_dashboard_with_slices,
load_world_bank_dashboard_with_slices_module_scope,
)

View File

@@ -0,0 +1,218 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import string
from datetime import date, datetime
from random import choice, getrandbits, randint, random, uniform
from typing import Any, Dict, List, Optional
import pandas as pd
import pytest
from pandas import DataFrame
from sqlalchemy import DateTime, String, TIMESTAMP
from superset import ConnectorRegistry, db
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database
from tests.integration_tests.dashboard_utils import create_table_for_dashboard
from tests.integration_tests.test_app import app
@pytest.fixture()
def load_birth_names_dashboard_with_slices():
dash_id_to_delete, slices_ids_to_delete = _load_data()
yield
with app.app_context():
_cleanup(dash_id_to_delete, slices_ids_to_delete)
@pytest.fixture(scope="module")
def load_birth_names_dashboard_with_slices_module_scope():
dash_id_to_delete, slices_ids_to_delete = _load_data()
yield
with app.app_context():
_cleanup(dash_id_to_delete, slices_ids_to_delete)
def _load_data():
table_name = "birth_names"
with app.app_context():
database = get_example_database()
df = _get_dataframe(database)
dtype = {
"ds": DateTime if database.backend != "presto" else String(255),
"gender": String(16),
"state": String(10),
"name": String(255),
}
table = _create_table(
df=df,
table_name=table_name,
database=database,
dtype=dtype,
fetch_values_predicate="123 = 123",
)
from superset.examples.birth_names import create_slices, create_dashboard
slices, _ = create_slices(table, admin_owner=False)
dash = create_dashboard(slices)
slices_ids_to_delete = [slice.id for slice in slices]
dash_id_to_delete = dash.id
return dash_id_to_delete, slices_ids_to_delete
def _create_table(
df: DataFrame,
table_name: str,
database: "Database",
dtype: Dict[str, Any],
fetch_values_predicate: Optional[str] = None,
):
table = create_table_for_dashboard(
df=df,
table_name=table_name,
database=database,
dtype=dtype,
fetch_values_predicate=fetch_values_predicate,
)
from superset.examples.birth_names import _add_table_metrics, _set_table_metadata
_set_table_metadata(table, database)
_add_table_metrics(table)
db.session.commit()
return table
def _cleanup(dash_id: int, slices_ids: List[int]) -> None:
table_id = db.session.query(SqlaTable).filter_by(table_name="birth_names").one().id
datasource = ConnectorRegistry.get_datasource("table", table_id, db.session)
columns = [column for column in datasource.columns]
metrics = [metric for metric in datasource.metrics]
engine = get_example_database().get_sqla_engine()
engine.execute("DROP TABLE IF EXISTS birth_names")
for column in columns:
db.session.delete(column)
for metric in metrics:
db.session.delete(metric)
dash = db.session.query(Dashboard).filter_by(id=dash_id).first()
db.session.delete(dash)
for slice_id in slices_ids:
db.session.query(Slice).filter_by(id=slice_id).delete()
db.session.commit()
def _get_dataframe(database: Database) -> DataFrame:
data = _get_birth_names_data()
df = pd.DataFrame.from_dict(data)
if database.backend == "presto":
df.ds = df.ds.dt.strftime("%Y-%m-%d %H:%M:%S")
return df
def _get_birth_names_data() -> List[Dict[Any, Any]]:
data = []
names = generate_names()
for year in range(1960, 2020):
ds = datetime(year, 1, 1, 0, 0, 0)
for _ in range(20):
gender = "boy" if choice([True, False]) else "girl"
num = randint(1, 100000)
data.append(
{
"ds": ds,
"gender": gender,
"name": choice(names),
"num": num,
"state": choice(us_states),
"num_boys": num if gender == "boy" else 0,
"num_girls": num if gender == "girl" else 0,
}
)
return data
def generate_names() -> List[str]:
names = []
for _ in range(250):
names.append(
"".join(choice(string.ascii_lowercase) for _ in range(randint(3, 12)))
)
return names
us_states = [
"AL",
"AK",
"AZ",
"AR",
"CA",
"CO",
"CT",
"DE",
"FL",
"GA",
"HI",
"ID",
"IL",
"IN",
"IA",
"KS",
"KY",
"LA",
"ME",
"MD",
"MA",
"MI",
"MN",
"MS",
"MO",
"MT",
"NE",
"NV",
"NH",
"NJ",
"NM",
"NY",
"NC",
"ND",
"OH",
"OK",
"OR",
"PA",
"RI",
"SC",
"SD",
"TN",
"TX",
"UT",
"VT",
"VA",
"WA",
"WV",
"WI",
"WY",
"other",
]

View File

@@ -0,0 +1,38 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
ssl_certificate = """-----BEGIN CERTIFICATE-----
MIIDnDCCAoQCCQCrdpcNPCA/eDANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMC
VVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcMCVNhbiBNYXRlbzEPMA0G
A1UECgwGUHJlc2V0MRMwEQYDVQQLDApTa3Vua3dvcmtzMRIwEAYDVQQDDAlwcmVz
ZXQuaW8xHTAbBgkqhkiG9w0BCQEWDmluZm9AcHJlc2V0LmlvMB4XDTIwMDMyNjEw
NTE1NFoXDTQwMDMyNjEwNTE1NFowgY8xCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApD
YWxpZm9ybmlhMRIwEAYDVQQHDAlTYW4gTWF0ZW8xDzANBgNVBAoMBlByZXNldDET
MBEGA1UECwwKU2t1bmt3b3JrczESMBAGA1UEAwwJcHJlc2V0LmlvMR0wGwYJKoZI
hvcNAQkBFg5pbmZvQHByZXNldC5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC
AQoCggEBAKNHQZcu2L/6HvZfzy4Hnm3POeztfO+NJ7OzppAcNlLbTAatUk1YoDbJ
5m5GUW8m7pVEHb76UL6Xxei9MoMVvHGuXqQeZZnNd+DySW/227wkOPYOCVSuDsWD
1EReG+pv/z8CDhdwmMTkDTZUDr0BUR/yc8qTCPdZoalj2muDl+k2J3LSCkelx4U/
2iYhoUQD+lzFS3k7ohAfaGc2aZOlwTITopXHSFfuZ7j9muBOYtU7NgpnCl6WgxYP
1+4ddBIauPTBY2gWfZC2FeOfYEqfsUUXRsw1ehEQf4uxxTKNJTfTuVbdgrTYx5QQ
jrM88WvWdyVnIM7u7/x9bawfGX/b/F0CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA
XYLLk3T5RWIagNa3DPrMI+SjRm4PAI/RsijtBV+9hrkCXOQ1mvlo/ORniaiemHvF
Kh6u6MTl014+f6Ytg/tx/OzuK2ffo9x44ZV/yqkbSmKD1pGftYNqCnBCN0uo1Gzb
HZ+bTozo+9raFN7OGPgbdBmpQT2c+LG5n+7REobHFb7VLeY2/7BKtxNBRXfIxn4X
+MIhpASwLH5X64a1f9LyuPNMyUvKgzDe7jRdX1JZ7uw/1T//OHGQth0jLiapa6FZ
GwgYUaruSZH51ZtxrJSXKSNBA7asPSBbyOmGptLsw2GTAsoBd5sUR4+hbuVo+1ai
XeA3AKTX/OdYWJvr5YIgeQ==
-----END CERTIFICATE-----"""

View File

@@ -0,0 +1,22 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
default_db_extra = """{
"metadata_params": {},
"engine_params": {},
"metadata_cache_timeout": {},
"schemas_allowed_for_csv_upload": []
}"""

View File

@@ -0,0 +1,158 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import date, datetime
from pandas import DataFrame, to_datetime
names_df = DataFrame(
[
{
"dt": date(2020, 1, 2),
"name": "John",
"region": "EU",
"country": "United Kingdom",
"cars": 3,
"bikes": 1,
"seconds": 30,
},
{
"dt": date(2020, 1, 2),
"name": "Peter",
"region": "EU",
"country": "Sweden",
"cars": 4,
"bikes": 2,
"seconds": 1,
},
{
"dt": date(2020, 1, 3),
"name": "Mary",
"region": "EU",
"country": "Finland",
"cars": 5,
"bikes": 3,
"seconds": None,
},
{
"dt": date(2020, 1, 3),
"name": "Peter",
"region": "Asia",
"country": "India",
"cars": 6,
"bikes": 4,
"seconds": 12,
},
{
"dt": date(2020, 1, 4),
"name": "John",
"region": "EU",
"country": "Portugal",
"cars": 7,
"bikes": None,
"seconds": 75,
},
{
"dt": date(2020, 1, 4),
"name": "Peter",
"region": "EU",
"country": "Italy",
"cars": None,
"bikes": 5,
"seconds": 600,
},
{
"dt": date(2020, 1, 4),
"name": "Mary",
"region": None,
"country": None,
"cars": 9,
"bikes": 6,
"seconds": 2,
},
{
"dt": date(2020, 1, 4),
"name": None,
"region": "Oceania",
"country": "Australia",
"cars": 10,
"bikes": 7,
"seconds": 99,
},
{
"dt": date(2020, 1, 1),
"name": "John",
"region": "North America",
"country": "USA",
"cars": 1,
"bikes": 8,
"seconds": None,
},
{
"dt": date(2020, 1, 1),
"name": "Mary",
"region": "Oceania",
"country": "Fiji",
"cars": 2,
"bikes": 9,
"seconds": 50,
},
]
)
categories_df = DataFrame(
{
"constant": ["dummy" for _ in range(0, 101)],
"category": [f"cat{i%3}" for i in range(0, 101)],
"dept": [f"dept{i%5}" for i in range(0, 101)],
"name": [f"person{i}" for i in range(0, 101)],
"asc_idx": [i for i in range(0, 101)],
"desc_idx": [i for i in range(100, -1, -1)],
"idx_nulls": [i if i % 5 == 0 else None for i in range(0, 101)],
}
)
timeseries_df = DataFrame(
index=to_datetime(["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]),
data={"label": ["x", "y", "z", "q"], "y": [1.0, 2.0, 3.0, 4.0]},
)
lonlat_df = DataFrame(
{
"city": ["New York City", "Sydney"],
"geohash": ["dr5regw3pg6f", "r3gx2u9qdevk"],
"latitude": [40.71277496, -33.85598011],
"longitude": [-74.00597306, 151.20666526],
"altitude": [5.5, 0.012],
"geodetic": [
"40.71277496, -74.00597306, 5.5km",
"-33.85598011, 151.20666526, 12m",
],
}
)
prophet_df = DataFrame(
{
"__timestamp": [
datetime(2018, 12, 31),
datetime(2019, 12, 31),
datetime(2020, 12, 31),
datetime(2021, 12, 31),
],
"a": [1.1, 1, 1.9, 3.15],
"b": [4, 3, 4.1, 3.95],
}
)

View File

@@ -0,0 +1,152 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Fixtures for test_datasource.py"""
datasource_post = {
"id": None,
"column_formats": {"ratio": ".2%"},
"database": {"id": 1},
"description": "Adding a DESCRip",
"default_endpoint": "",
"filter_select_enabled": True,
"name": "birth_names",
"table_name": "birth_names",
"datasource_name": "birth_names",
"type": "table",
"schema": "",
"offset": 66,
"cache_timeout": 55,
"sql": "",
"columns": [
{
"id": 504,
"column_name": "ds",
"verbose_name": "",
"description": None,
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": True,
"type": "DATETIME",
},
{
"id": 505,
"column_name": "gender",
"verbose_name": None,
"description": None,
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": False,
"type": "VARCHAR(16)",
},
{
"id": 506,
"column_name": "name",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "VARCHAR(255)",
},
{
"id": 508,
"column_name": "state",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "VARCHAR(10)",
},
{
"id": 509,
"column_name": "num_boys",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "BIGINT(20)",
},
{
"id": 510,
"column_name": "num_girls",
"verbose_name": None,
"description": None,
"expression": "",
"filterable": False,
"groupby": False,
"is_dttm": False,
"type": "BIGINT(20)",
},
{
"id": 532,
"column_name": "num",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "BIGINT(20)",
},
{
"id": 522,
"column_name": "num_california",
"verbose_name": None,
"description": None,
"expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
"filterable": False,
"groupby": False,
"is_dttm": False,
"type": "NUMBER",
},
],
"metrics": [
{
"id": 824,
"metric_name": "sum__num",
"verbose_name": "Babies",
"description": "",
"expression": "SUM(num)",
"warning_text": "",
"d3format": "",
},
{
"id": 836,
"metric_name": "count",
"verbose_name": "",
"description": None,
"expression": "count(1)",
"warning_text": None,
"d3format": None,
},
{
"id": 843,
"metric_name": "ratio",
"verbose_name": "Ratio Boys/Girls",
"description": "This represents the ratio of boys/girls",
"expression": "sum(num_boys) / sum(num_girls)",
"warning_text": "no warning",
"d3format": ".2%",
},
],
}

View File

@@ -0,0 +1,47 @@
{
"color_picker": {
"a": 1,
"b": 135,
"g": 122,
"r": 0
},
"datasource": "12__table",
"filters": [],
"having": "",
"js_columns": [
"color"
],
"js_datapoint_mutator": "d => {\n return {\n ...d,\n color: colors.hexToRGB(d.extraProps.color),\n }\n}",
"js_onclick_href": "",
"js_tooltip": "",
"mapbox_style": "mapbox://styles/mapbox/light-v9",
"reverse_long_lat": false,
"row_limit": 5000,
"since": "7 days ago",
"slice_id": 1013,
"time_grain_sqla": null,
"until": "now",
"geojson": "test_col",
"viewport": {
"altitude": 1.5,
"bearing": 0,
"height": 1094,
"latitude": 37.73671752604488,
"longitude": -122.18885402582598,
"maxLatitude": 85.05113,
"maxPitch": 60,
"maxZoom": 20,
"minLatitude": -85.05113,
"minPitch": 0,
"minZoom": 0,
"pitch": 0,
"width": 669,
"zoom": 9.51847667620428
},
"viz_type": "deck_geojson",
"where": "",
"granularity_sqla": null,
"autozoom": true,
"url_params": {},
"size": 100
}

View File

@@ -0,0 +1,49 @@
{
"color_picker": {
"a": 1,
"b": 135,
"g": 122,
"r": 0
},
"datasource": "12__table",
"filters": [],
"having": "",
"js_columns": [
"color"
],
"js_datapoint_mutator": "d => {\n return {\n ...d,\n color: colors.hexToRGB(d.extraProps.color),\n }\n}",
"js_onclick_href": "",
"js_tooltip": "",
"line_column": "path_json",
"line_type": "json",
"line_width": 150,
"mapbox_style": "mapbox://styles/mapbox/light-v9",
"reverse_long_lat": false,
"row_limit": 5000,
"since": "7 days ago",
"slice_id": 1013,
"time_grain_sqla": null,
"until": "now",
"viewport": {
"altitude": 1.5,
"bearing": 0,
"height": 1094,
"latitude": 37.73671752604488,
"longitude": -122.18885402582598,
"maxLatitude": 85.05113,
"maxPitch": 60,
"maxZoom": 20,
"minLatitude": -85.05113,
"minPitch": 0,
"minZoom": 0,
"pitch": 0,
"width": 669,
"zoom": 9.51847667620428
},
"viz_type": "deck_path",
"where": "",
"granularity_sqla": null,
"autozoom": true,
"url_params": {},
"size": 100
}

View File

@@ -0,0 +1,177 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import random
import textwrap
from typing import Dict, Set
import pandas as pd
import pytest
from pandas import DataFrame
from sqlalchemy import column, Float, String
from superset import db
from superset.connectors.sqla.models import SqlaTable, SqlMetric
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database
from tests.integration_tests.dashboard_utils import (
create_slice,
create_table_for_dashboard,
)
from tests.integration_tests.test_app import app
misc_dash_slices: Set[str] = set()
@pytest.fixture()
def load_energy_table_with_slice():
table_name = "energy_usage"
df = _get_dataframe()
with app.app_context():
_create_energy_table(df, table_name)
yield
_cleanup()
def _get_dataframe():
data = _get_energy_data()
return pd.DataFrame.from_dict(data)
def _create_energy_table(df: DataFrame, table_name: str):
database = get_example_database()
table_description = "Energy consumption"
schema = {"source": String(255), "target": String(255), "value": Float()}
table = create_table_for_dashboard(
df, table_name, database, schema, table_description
)
table.fetch_metadata()
if not any(col.metric_name == "sum__value" for col in table.metrics):
col = str(column("value").compile(db.engine))
table.metrics.append(
SqlMetric(metric_name="sum__value", expression=f"SUM({col})")
)
db.session.merge(table)
db.session.commit()
table.fetch_metadata()
for slice_data in _get_energy_slices():
_create_and_commit_energy_slice(
table,
slice_data["slice_title"],
slice_data["viz_type"],
slice_data["params"],
)
def _create_and_commit_energy_slice(
table: SqlaTable, title: str, viz_type: str, param: Dict[str, str]
):
slice = create_slice(title, viz_type, table, param)
existing_slice = (
db.session.query(Slice).filter_by(slice_name=slice.slice_name).first()
)
if existing_slice:
db.session.delete(existing_slice)
db.session.add(slice)
db.session.commit()
return slice
def _cleanup() -> None:
engine = get_example_database().get_sqla_engine()
engine.execute("DROP TABLE IF EXISTS energy_usage")
for slice_data in _get_energy_slices():
slice = (
db.session.query(Slice)
.filter_by(slice_name=slice_data["slice_title"])
.first()
)
db.session.delete(slice)
metric = (
db.session.query(SqlMetric).filter_by(metric_name="sum__value").one_or_none()
)
if metric:
db.session.delete(metric)
db.session.commit()
def _get_energy_data():
data = []
for i in range(85):
data.append(
{
"source": f"energy_source{i}",
"target": f"energy_target{i}",
"value": random.uniform(0.1, 11.0),
}
)
return data
def _get_energy_slices():
return [
{
"slice_title": "Energy Sankey",
"viz_type": "sankey",
"params": {
"collapsed_fieldsets": "",
"groupby": ["source", "target"],
"metric": "sum__value",
"row_limit": "5000",
"slice_name": "Energy Sankey",
"viz_type": "sankey",
},
},
{
"slice_title": "Energy Force Layout",
"viz_type": "graph_chart",
"params": {
"source": "source",
"target": "target",
"edgeLength": 400,
"repulsion": 1000,
"layout": "force",
"metric": "sum__value",
"row_limit": "5000",
"slice_name": "Force",
"viz_type": "graph_chart",
},
},
{
"slice_title": "Heatmap",
"viz_type": "heatmap",
"params": {
"all_columns_x": "source",
"all_columns_y": "target",
"canvas_image_rendering": "pixelated",
"collapsed_fieldsets": "",
"linear_color_scheme": "blue_white_yellow",
"metric": "sum__value",
"normalize_across": "heatmap",
"slice_name": "Heatmap",
"viz_type": "heatmap",
"xscale_interval": "1",
"yscale_interval": "1",
},
},
]

View File

@@ -0,0 +1,514 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=line-too-long
from typing import Any, Dict, List
# example V0 import/export format
dataset_ui_export: List[Dict[str, Any]] = [
{
"columns": [
{
"column_name": "num_california",
"expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
},
{"column_name": "ds", "is_dttm": True, "type": "DATETIME"},
{"column_name": "state", "type": "VARCHAR(10)"},
{"column_name": "gender", "type": "VARCHAR(16)"},
{"column_name": "name", "type": "VARCHAR(255)"},
{"column_name": "num_boys", "type": "BIGINT"},
{"column_name": "num_girls", "type": "BIGINT"},
{"column_name": "num", "type": "BIGINT"},
],
"filter_select_enabled": True,
"main_dttm_col": "ds",
"metrics": [
{
"expression": "COUNT(*)",
"metric_name": "count",
"metric_type": "count",
"verbose_name": "COUNT(*)",
},
{"expression": "SUM(num)", "metric_name": "sum__num"},
],
"params": '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}',
"table_name": "birth_names_2",
}
]
dataset_cli_export: Dict[str, Any] = {
"databases": [
{
"allow_run_async": True,
"database_name": "examples",
"sqlalchemy_uri": "sqlite:////Users/beto/.superset/superset.db",
"tables": dataset_ui_export,
}
]
}
dashboard_export: Dict[str, Any] = {
"dashboards": [
{
"__Dashboard__": {
"css": "",
"dashboard_title": "Births 2",
"description": None,
"json_metadata": '{"timed_refresh_immune_slices": [], "expanded_slices": {}, "refresh_frequency": 0, "default_filters": "{}", "color_scheme": null, "remote_id": 1}',
"position_json": '{"CHART--jvaBFZx78":{"children":[],"id":"CHART--jvaBFZx78","meta":{"chartId":83,"height":50,"sliceName":"Number of California Births","uuid":"c77bb4b3-09f4-4d9a-a9e2-66a627c64343","width":4},"parents":["ROOT_ID","GRID_ID","ROW-se_5H8KNiO"],"type":"CHART"},"DASHBOARD_VERSION_KEY":"v2","GRID_ID":{"children":["ROW-se_5H8KNiO"],"id":"GRID_ID","parents":["ROOT_ID"],"type":"GRID"},"HEADER_ID":{"id":"HEADER_ID","meta":{"text":"Births"},"type":"HEADER"},"ROOT_ID":{"children":["GRID_ID"],"id":"ROOT_ID","type":"ROOT"},"ROW-se_5H8KNiO":{"children":["CHART--jvaBFZx78"],"id":"ROW-se_5H8KNiO","meta":{"background":"BACKGROUND_TRANSPARENT"},"parents":["ROOT_ID","GRID_ID"],"type":"ROW"}}',
"slices": [
{
"__Slice__": {
"cache_timeout": None,
"datasource_name": "birth_names_2",
"datasource_type": "table",
"id": 83,
"params": '{"adhoc_filters": [], "datasource": "3__table", "granularity_sqla": "ds", "header_font_size": 0.4, "metric": {"aggregate": "SUM", "column": {"column_name": "num_california", "expression": "CASE WHEN state = \'CA\' THEN num ELSE 0 END"}, "expressionType": "SIMPLE", "label": "SUM(num_california)"}, "slice_id": 83, "subheader_font_size": 0.15, "time_range": "100 years ago : now", "time_range_endpoints": ["unknown", "inclusive"], "url_params": {}, "viz_type": "big_number_total", "y_axis_format": "SMART_NUMBER", "remote_id": 83, "datasource_name": "birth_names_2", "schema": null, "database_name": "examples"}',
"slice_name": "Number of California Births",
"viz_type": "big_number_total",
}
}
],
"slug": None,
}
}
],
"datasources": [
{
"__SqlaTable__": {
"cache_timeout": None,
"columns": [
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "ds",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 332,
"is_active": True,
"is_dttm": True,
"python_date_format": None,
"table_id": 3,
"type": "DATETIME",
"uuid": "98e22f20-ed71-4483-b09d-31780ed1fc1b",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "gender",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 333,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": "VARCHAR(16)",
"uuid": "08e08f02-fb81-4461-bba6-c8c8dfef0c02",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "name",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 334,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": "VARCHAR(255)",
"uuid": "c67b14d9-fc4b-427d-a363-a53af015fb5e",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "num",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 335,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": "BIGINT",
"uuid": "69835b93-7169-4a2c-baa7-c1c92f21d10a",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "state",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 336,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": "VARCHAR(10)",
"uuid": "80003ad0-bdd0-48d3-ade3-8d1838e07d7a",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "num_boys",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 337,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": "BIGINT",
"uuid": "8373ed24-4d4e-4307-9eee-8deefeecbb57",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"column_name": "num_girls",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"id": 338,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": "BIGINT",
"uuid": "46f2de5f-c008-4024-a163-0b5c5f1d5580",
"verbose_name": None,
}
},
{
"__TableColumn__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:32"},
"column_name": "num_california",
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:32"},
"description": None,
"expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
"filterable": True,
"groupby": True,
"id": 434,
"is_active": True,
"is_dttm": False,
"python_date_format": None,
"table_id": 3,
"type": None,
"uuid": "35e32aa6-be2b-4086-9c78-4ea3351ec079",
"verbose_name": None,
}
},
],
"database_id": 1000,
"default_endpoint": None,
"description": None,
"extra": None,
"fetch_values_predicate": None,
"filter_select_enabled": True,
"main_dttm_col": "ds",
"metrics": [
{
"__SqlMetric__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"d3format": None,
"description": None,
"expression": "COUNT(*)",
"extra": None,
"id": 9,
"metric_name": "count",
"metric_type": "count",
"table_id": 3,
"uuid": "1042ef50-ebf9-4271-b44e-3aaa891f6c21",
"verbose_name": "COUNT(*)",
"warning_text": None,
}
},
{
"__SqlMetric__": {
"changed_by_fk": None,
"changed_on": {"__datetime__": "2020-10-07T15:50:00"},
"created_by_fk": None,
"created_on": {"__datetime__": "2020-10-07T15:50:00"},
"d3format": None,
"description": None,
"expression": "SUM(num)",
"extra": None,
"id": 10,
"metric_name": "sum__num",
"metric_type": None,
"table_id": 3,
"uuid": "d807f208-e3c6-4b89-b790-41f521216ff6",
"verbose_name": None,
"warning_text": None,
}
},
],
"offset": 0,
"params": '{"remote_id": 3, "database_name": "examples", "import_time": 1604342885}',
"schema": None,
"sql": None,
"table_name": "birth_names_2",
"template_params": None,
}
}
],
}
# example V1 import/export format
database_metadata_config: Dict[str, Any] = {
"version": "1.0.0",
"type": "Database",
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
dataset_metadata_config: Dict[str, Any] = {
"version": "1.0.0",
"type": "SqlaTable",
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
chart_metadata_config: Dict[str, Any] = {
"version": "1.0.0",
"type": "Slice",
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
dashboard_metadata_config: Dict[str, Any] = {
"version": "1.0.0",
"type": "Dashboard",
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
saved_queries_metadata_config: Dict[str, Any] = {
"version": "1.0.0",
"type": "SavedQuery",
"timestamp": "2021-03-30T20:37:54.791187+00:00",
}
database_config: Dict[str, Any] = {
"allow_csv_upload": True,
"allow_ctas": True,
"allow_cvas": True,
"allow_run_async": False,
"cache_timeout": None,
"database_name": "imported_database",
"expose_in_sqllab": True,
"extra": {},
"sqlalchemy_uri": "sqlite:///test.db",
"uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89",
"version": "1.0.0",
}
dataset_config: Dict[str, Any] = {
"table_name": "imported_dataset",
"main_dttm_col": None,
"description": "This is a dataset that was exported",
"default_endpoint": "",
"offset": 66,
"cache_timeout": 55,
"schema": "",
"sql": "",
"params": None,
"template_params": {},
"filter_select_enabled": True,
"fetch_values_predicate": None,
"extra": "dttm > sysdate() -10 ",
"metrics": [
{
"metric_name": "count",
"verbose_name": "",
"metric_type": None,
"expression": "count(1)",
"description": None,
"d3format": None,
"extra": {},
"warning_text": None,
},
],
"columns": [
{
"column_name": "cnt",
"verbose_name": "Count of something",
"is_dttm": False,
"is_active": None,
"type": "NUMBER",
"groupby": False,
"filterable": True,
"expression": "",
"description": None,
"python_date_format": None,
},
],
"version": "1.0.0",
"uuid": "10808100-158b-42c4-842e-f32b99d88dfb",
"database_uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89",
}
chart_config: Dict[str, Any] = {
"slice_name": "Deck Path",
"viz_type": "deck_path",
"params": {
"color_picker": {"a": 1, "b": 135, "g": 122, "r": 0},
"datasource": "12__table",
"js_columns": ["color"],
"js_data_mutator": r"data => data.map(d => ({\n ...d,\n color: colors.hexToRGB(d.extraProps.color)\n}));",
"js_onclick_href": "",
"js_tooltip": "",
"line_column": "path_json",
"line_type": "json",
"line_width": 150,
"mapbox_style": "mapbox://styles/mapbox/light-v9",
"reverse_long_lat": False,
"row_limit": 5000,
"slice_id": 43,
"time_grain_sqla": None,
"time_range": " : ",
"viewport": {
"altitude": 1.5,
"bearing": 0,
"height": 1094,
"latitude": 37.73671752604488,
"longitude": -122.18885402582598,
"maxLatitude": 85.05113,
"maxPitch": 60,
"maxZoom": 20,
"minLatitude": -85.05113,
"minPitch": 0,
"minZoom": 0,
"pitch": 0,
"width": 669,
"zoom": 9.51847667620428,
},
"viz_type": "deck_path",
},
"cache_timeout": None,
"uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
"version": "1.0.0",
"dataset_uuid": "10808100-158b-42c4-842e-f32b99d88dfb",
}
dashboard_config = {
"dashboard_title": "Test dash",
"description": None,
"css": "",
"slug": None,
"uuid": "c4b28c4e-a1fe-4cf8-a5ac-d6f11d6fdd51",
"position": {
"CHART-SVAlICPOSJ": {
"children": [],
"id": "CHART-SVAlICPOSJ",
"meta": {
"chartId": 83,
"height": 50,
"sliceName": "Number of California Births",
"uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
"width": 4,
},
"parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"],
"type": "CHART",
},
"DASHBOARD_VERSION_KEY": "v2",
"GRID_ID": {
"children": ["ROW-dP_CHaK2q"],
"id": "GRID_ID",
"parents": ["ROOT_ID"],
"type": "GRID",
},
"HEADER_ID": {
"id": "HEADER_ID",
"meta": {"text": "Test dash"},
"type": "HEADER",
},
"ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
"ROW-dP_CHaK2q": {
"children": ["CHART-SVAlICPOSJ"],
"id": "ROW-dP_CHaK2q",
"meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"},
"parents": ["ROOT_ID", "GRID_ID"],
"type": "ROW",
},
},
"metadata": {
"timed_refresh_immune_slices": [83],
"filter_scopes": {"83": {"region": {"scope": ["ROOT_ID"], "immune": [83]}},},
"expanded_slices": {"83": True},
"refresh_frequency": 0,
"default_filters": "{}",
"color_scheme": None,
"remote_id": 7,
"import_time": 1604342885,
},
"version": "1.0.0",
}
saved_queries_config = {
"schema": "public",
"label": "Test Saved Query",
"description": None,
"sql": "-- Note: Unless you save your query, these tabs will NOT persist if you clear\nyour cookies or change browsers.\n\n\nSELECT * from birth_names",
"uuid": "05b679b5-8eaf-452c-b874-a7a774cfa4e9",
"version": "1.0.0",
"database_uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89",
}

View File

@@ -0,0 +1,44 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from superset.extensions import db, security_manager
from tests.integration_tests.test_app import app
@pytest.fixture()
def public_role_like_gamma():
with app.app_context():
app.config["PUBLIC_ROLE_LIKE"] = "Gamma"
security_manager.sync_role_definitions()
yield
security_manager.get_public_role().permissions = []
db.session.commit()
@pytest.fixture()
def public_role_like_test_role():
with app.app_context():
app.config["PUBLIC_ROLE_LIKE"] = "TestRole"
security_manager.sync_role_definitions()
yield
security_manager.get_public_role().permissions = []
db.session.commit()

View File

@@ -0,0 +1,28 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
class Row(object):
def __init__(self, values):
self.values = values
def __name__(self): # pylint: disable=no-self-use
return "Row"
def __iter__(self):
return (item for item in self.values)

View File

@@ -0,0 +1,246 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
from typing import Any, Dict, List
from superset.utils.core import AnnotationType, DTTM_ALIAS, TimeRangeEndpoint
from tests.integration_tests.base_tests import get_table_by_name
query_birth_names = {
"extras": {
"where": "",
"time_range_endpoints": (
TimeRangeEndpoint.INCLUSIVE,
TimeRangeEndpoint.EXCLUSIVE,
),
"time_grain_sqla": "P1D",
},
"groupby": ["name"],
"metrics": [{"label": "sum__num"}],
"orderby": [("sum__num", False)],
"row_limit": 100,
"granularity": "ds",
"time_range": "100 years ago : now",
"timeseries_limit": 0,
"timeseries_limit_metric": None,
"order_desc": True,
"filters": [
{"col": "gender", "op": "==", "val": "boy"},
{"col": "num", "op": "IS NOT NULL"},
{"col": "name", "op": "NOT IN", "val": ["<NULL>", '"abc"']},
],
"having": "",
"having_filters": [],
"where": "",
}
QUERY_OBJECTS: Dict[str, Dict[str, object]] = {
"birth_names": query_birth_names,
# `:suffix` are overrides only
"birth_names:include_time": {"groupby": [DTTM_ALIAS, "name"],},
"birth_names:orderby_dup_alias": {
"metrics": [
{
"expressionType": "SIMPLE",
"column": {"column_name": "num_girls", "type": "BIGINT(20)"},
"aggregate": "SUM",
"label": "num_girls",
},
{
"expressionType": "SIMPLE",
"column": {"column_name": "num_boys", "type": "BIGINT(20)"},
"aggregate": "SUM",
"label": "num_boys",
},
],
"orderby": [
[
{
"expressionType": "SIMPLE",
"column": {"column_name": "num_girls", "type": "BIGINT(20)"},
"aggregate": "SUM",
# the same underlying expression, but different label
"label": "SUM(num_girls)",
},
False,
],
# reference the ambiguous alias in SIMPLE metric
[
{
"expressionType": "SIMPLE",
"column": {"column_name": "num_boys", "type": "BIGINT(20)"},
"aggregate": "AVG",
"label": "AVG(num_boys)",
},
False,
],
# reference the ambiguous alias in CUSTOM SQL metric
[
{
"expressionType": "SQL",
"sqlExpression": "MAX(CASE WHEN num_boys > 0 THEN 1 ELSE 0 END)",
"label": "MAX(CASE WHEN...",
},
True,
],
],
},
"birth_names:only_orderby_has_metric": {"metrics": [],},
}
ANNOTATION_LAYERS = {
AnnotationType.FORMULA: {
"annotationType": "FORMULA",
"color": "#ff7f44",
"hideLine": False,
"name": "my formula",
"opacity": "",
"overrides": {"time_range": None},
"show": True,
"showMarkers": False,
"sourceType": "",
"style": "solid",
"value": "3+x",
"width": 5,
},
AnnotationType.EVENT: {
"name": "my event",
"annotationType": "EVENT",
"sourceType": "NATIVE",
"color": "#e04355",
"opacity": "",
"style": "solid",
"width": 5,
"showMarkers": False,
"hideLine": False,
"value": 1,
"overrides": {"time_range": None},
"show": True,
"titleColumn": "",
"descriptionColumns": [],
"timeColumn": "",
"intervalEndColumn": "",
},
AnnotationType.INTERVAL: {
"name": "my interval",
"annotationType": "INTERVAL",
"sourceType": "NATIVE",
"color": "#e04355",
"opacity": "",
"style": "solid",
"width": 1,
"showMarkers": False,
"hideLine": False,
"value": 1,
"overrides": {"time_range": None},
"show": True,
"titleColumn": "",
"descriptionColumns": [],
"timeColumn": "",
"intervalEndColumn": "",
},
AnnotationType.TIME_SERIES: {
"annotationType": "TIME_SERIES",
"color": None,
"descriptionColumns": [],
"hideLine": False,
"intervalEndColumn": "",
"name": "my line",
"opacity": "",
"overrides": {"time_range": None},
"show": True,
"showMarkers": False,
"sourceType": "line",
"style": "dashed",
"timeColumn": "",
"titleColumn": "",
"value": 837,
"width": 5,
},
}
POSTPROCESSING_OPERATIONS = {
"birth_names": [
{
"operation": "aggregate",
"options": {
"groupby": ["gender"],
"aggregates": {
"q1": {
"operator": "percentile",
"column": "sum__num",
"options": {"q": 25},
},
"median": {"operator": "median", "column": "sum__num",},
},
},
},
{"operation": "sort", "options": {"columns": {"q1": False, "gender": True},},},
]
}
def get_query_object(
query_name: str, add_postprocessing_operations: bool
) -> Dict[str, Any]:
if query_name not in QUERY_OBJECTS:
raise Exception(f"QueryObject fixture not defined for datasource: {query_name}")
obj = QUERY_OBJECTS[query_name]
# apply overrides
if ":" in query_name:
parent_query_name = query_name.split(":")[0]
obj = {
**QUERY_OBJECTS[parent_query_name],
**obj,
}
query_object = copy.deepcopy(obj)
if add_postprocessing_operations:
query_object["post_processing"] = _get_postprocessing_operation(query_name)
return query_object
def _get_postprocessing_operation(query_name: str) -> List[Dict[str, Any]]:
if query_name not in QUERY_OBJECTS:
raise Exception(
f"Post-processing fixture not defined for datasource: {query_name}"
)
return copy.deepcopy(POSTPROCESSING_OPERATIONS[query_name])
def get_query_context(
query_name: str, add_postprocessing_operations: bool = False,
) -> Dict[str, Any]:
"""
Create a request payload for retrieving a QueryContext object via the
`api/v1/chart/data` endpoint. By default returns a payload corresponding to one
generated by the "Boy Name Cloud" chart in the examples.
:param query_name: name of an example query, which is always in the format
of `datasource_name[:test_case_name]`, where `:test_case_name` is optional.
:param datasource_id: id of datasource to query.
:param datasource_type: type of datasource to query.
:param add_postprocessing_operations: Add post-processing operations to QueryObject
:return: Request payload
"""
table_name = query_name.split(":")[0]
table = get_table_by_name(table_name)
return {
"datasource": {"id": table.id, "type": table.type},
"queries": [get_query_object(query_name, add_postprocessing_operations)],
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

View File

@@ -0,0 +1,3 @@
t1,t2,t3__sum
c11,c12,c13
c21,c22,c23
1 t1 t2 t3__sum
2 c11 c12 c13
3 c21 c22 c23

View File

@@ -0,0 +1,110 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pandas as pd
import pytest
from pandas import DataFrame
from sqlalchemy import String
from superset import db
from superset.connectors.sqla.models import SqlaTable
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database
from tests.integration_tests.dashboard_utils import (
create_dashboard,
create_slice,
create_table_for_dashboard,
)
from tests.integration_tests.test_app import app
@pytest.fixture()
def load_unicode_dashboard_with_slice():
table_name = "unicode_test"
slice_name = "Unicode Cloud"
df = _get_dataframe()
with app.app_context():
dash = _create_unicode_dashboard(df, table_name, slice_name, None)
yield
_cleanup(dash, slice_name)
@pytest.fixture()
def load_unicode_dashboard_with_position():
table_name = "unicode_test"
slice_name = "Unicode Cloud"
df = _get_dataframe()
position = "{}"
with app.app_context():
dash = _create_unicode_dashboard(df, table_name, slice_name, position)
yield
_cleanup(dash, slice_name)
def _get_dataframe():
data = _get_unicode_data()
return pd.DataFrame.from_dict(data)
def _get_unicode_data():
return [
{"phrase": "Под"},
{"phrase": "řšž"},
{"phrase": "視野無限廣"},
{"phrase": "微風"},
{"phrase": "中国智造"},
{"phrase": "æøå"},
{"phrase": "ëœéè"},
{"phrase": "いろはにほ"},
]
def _create_unicode_dashboard(
df: DataFrame, table_name: str, slice_title: str, position: str
) -> Dashboard:
database = get_example_database()
dtype = {
"phrase": String(500),
}
table = create_table_for_dashboard(df, table_name, database, dtype)
table.fetch_metadata()
if slice_title:
slice = _create_and_commit_unicode_slice(table, slice_title)
return create_dashboard("unicode-test", "Unicode Test", position, [slice])
def _create_and_commit_unicode_slice(table: SqlaTable, title: str):
slice = create_slice(title, "word_cloud", table, {})
o = db.session.query(Slice).filter_by(slice_name=slice.slice_name).one_or_none()
if o:
db.session.delete(o)
db.session.add(slice)
db.session.commit()
return slice
def _cleanup(dash: Dashboard, slice_name: str) -> None:
engine = get_example_database().get_sqla_engine()
engine.execute("DROP TABLE IF EXISTS unicode_test")
db.session.delete(dash)
if slice_name:
slice = db.session.query(Slice).filter_by(slice_name=slice_name).one_or_none()
db.session.delete(slice)
db.session.commit()

View File

@@ -0,0 +1,487 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import string
from random import choice, randint, random, uniform
from typing import Any, Dict, List
import pandas as pd
import pytest
from pandas import DataFrame
from sqlalchemy import DateTime, String, TIMESTAMP
from superset import db
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import get_example_database
from tests.integration_tests.dashboard_utils import (
create_dashboard,
create_table_for_dashboard,
)
from tests.integration_tests.test_app import app
@pytest.fixture()
def load_world_bank_dashboard_with_slices():
dash_id_to_delete, slices_ids_to_delete = _load_data()
yield
with app.app_context():
_cleanup(dash_id_to_delete, slices_ids_to_delete)
@pytest.fixture(scope="module")
def load_world_bank_dashboard_with_slices_module_scope():
dash_id_to_delete, slices_ids_to_delete = _load_data()
yield
with app.app_context():
_cleanup(dash_id_to_delete, slices_ids_to_delete)
def _load_data():
table_name = "wb_health_population"
with app.app_context():
database = get_example_database()
df = _get_dataframe(database)
dtype = {
"year": DateTime if database.backend != "presto" else String(255),
"country_code": String(3),
"country_name": String(255),
"region": String(255),
}
table = create_table_for_dashboard(df, table_name, database, dtype)
slices = _create_world_bank_slices(table)
dash = _create_world_bank_dashboard(table, slices)
slices_ids_to_delete = [slice.id for slice in slices]
dash_id_to_delete = dash.id
return dash_id_to_delete, slices_ids_to_delete
def _create_world_bank_slices(table: SqlaTable) -> List[Slice]:
from superset.examples.world_bank import create_slices
slices = create_slices(table)
_commit_slices(slices)
return slices
def _commit_slices(slices: List[Slice]):
for slice in slices:
o = db.session.query(Slice).filter_by(slice_name=slice.slice_name).one_or_none()
if o:
db.session.delete(o)
db.session.add(slice)
db.session.commit()
def _create_world_bank_dashboard(table: SqlaTable, slices: List[Slice]) -> Dashboard:
from superset.examples.world_bank import dashboard_positions
pos = dashboard_positions
from superset.examples.helpers import update_slice_ids
update_slice_ids(pos, slices)
table.fetch_metadata()
dash = create_dashboard(
"world_health", "World Bank's Data", json.dumps(pos), slices
)
dash.json_metadata = '{"mock_key": "mock_value"}'
db.session.commit()
return dash
def _cleanup(dash_id: int, slices_ids: List[int]) -> None:
engine = get_example_database().get_sqla_engine()
engine.execute("DROP TABLE IF EXISTS wb_health_population")
dash = db.session.query(Dashboard).filter_by(id=dash_id).first()
db.session.delete(dash)
for slice_id in slices_ids:
db.session.query(Slice).filter_by(id=slice_id).delete()
db.session.commit()
def _get_dataframe(database: Database) -> DataFrame:
data = _get_world_bank_data()
df = pd.DataFrame.from_dict(data)
if database.backend == "presto":
df.year = pd.to_datetime(df.year)
df.year = df.year.dt.strftime("%Y-%m-%d %H:%M%:%S")
else:
df.year = pd.to_datetime(df.year)
return df
def _get_world_bank_data() -> List[Dict[Any, Any]]:
data = []
for _ in range(100):
data.append(
{
"country_name": "".join(
choice(string.ascii_uppercase + string.ascii_lowercase + " ")
for _ in range(randint(3, 10))
),
"country_code": "".join(
choice(string.ascii_uppercase + string.ascii_lowercase)
for _ in range(3)
),
"region": "".join(
choice(string.ascii_uppercase + string.ascii_lowercase)
for _ in range(randint(3, 10))
),
"year": "-".join(
[str(randint(1900, 2020)), str(randint(1, 12)), str(randint(1, 28))]
),
"NY_GNP_PCAP_CD": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_1524_LT_FM_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_1524_LT_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_1524_LT_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_LITR_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_LITR_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ADT_LITR_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_ENR_ORPH": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_CMPT_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_CMPT_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_CMPT_ZS": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_ENRR": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_ENRR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_ENRR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_NENR": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_NENR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_PRM_NENR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_ENRR": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_ENRR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_ENRR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_NENR": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_NENR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_SEC_NENR_MA": get_random_float_or_none(0, 100, 0.3),
"SE_TER_ENRR": get_random_float_or_none(0, 100, 0.3),
"SE_TER_ENRR_FE": get_random_float_or_none(0, 100, 0.3),
"SE_XPD_TOTL_GD_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_ANM_CHLD_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_ANM_NPRG_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_1524_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_1524_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_AIDS_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_CON_AIDS_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_COMM_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_IMRT": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_INJR_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_MORT": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_NCOM_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DTH_NMRT": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS_DH": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_AIDS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_MORT": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_MORT_FE": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_MORT_MA": get_random_float_or_none(0, 100, 0.3),
"SH_DYN_NMRT": get_random_float_or_none(0, 100, 0.3),
"SH_FPL_SATI_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_H2O_SAFE_RU_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_H2O_SAFE_UR_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_H2O_SAFE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_0014": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_KW_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_KW_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_1524_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_ARTC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_KNOW_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_KNOW_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_ORPH": get_random_float_or_none(0, 100, 0.3),
"SH_HIV_TOTL": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_HEPB": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_HIB3": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_IBCG": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_IDPT": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_MEAS": get_random_float_or_none(0, 100, 0.3),
"SH_IMM_POL3": get_random_float_or_none(0, 100, 0.3),
"SH_MED_BEDS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MED_CMHW_P3": get_random_float_or_none(0, 100, 0.3),
"SH_MED_NUMW_P3": get_random_float_or_none(0, 100, 0.3),
"SH_MED_PHYS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_NETS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_PREG_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_SPF2_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MLR_TRET_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_DTHS": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_LEVE": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_RISK": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_RISK_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_MMR_WAGE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_PRG_ANEM": get_random_float_or_none(0, 100, 0.3),
"SH_PRG_ARTC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_PRG_SYPH_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_PRV_SMOK_FE": get_random_float_or_none(0, 100, 0.3),
"SH_PRV_SMOK_MA": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ACSN": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ACSN_RU": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ACSN_UR": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ANV4_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ANVC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ARIC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_BFED_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_BRTC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_BRTW_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_DIAB_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_IYCF_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALN_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALN_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALN_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MALR": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MMRT": get_random_float_or_none(0, 100, 0.3),
"SH_STA_MMRT_NE": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ORCF_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_ORTH": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OW15_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OW15_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OW15_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OWGH_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OWGH_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_OWGH_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_PNVC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_STNT_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_STNT_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_STNT_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_WAST_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_WAST_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_STA_WAST_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_SVR_WAST_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_SVR_WAST_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_SVR_WAST_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_CURE_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_DTEC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_INCD": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_MORT": get_random_float_or_none(0, 100, 0.3),
"SH_TBS_PREV": get_random_float_or_none(0, 100, 0.3),
"SH_VAC_TTNS_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_EXTR_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_OOPC_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_OOPC_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PCAP": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PCAP_PP_KD": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PRIV": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PRIV_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PUBL": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PUBL_GX_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_PUBL_ZS": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_TOTL_CD": get_random_float_or_none(0, 100, 0.3),
"SH_XPD_TOTL_ZS": get_random_float_or_none(0, 100, 0.3),
"SI_POV_NAHC": get_random_float_or_none(0, 100, 0.3),
"SI_POV_RUHC": get_random_float_or_none(0, 100, 0.3),
"SI_POV_URHC": get_random_float_or_none(0, 100, 0.3),
"SL_EMP_INSV_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_TLF_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_TLF_TOTL_IN": get_random_float_or_none(0, 100, 0.3),
"SL_UEM_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_UEM_TOTL_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SL_UEM_TOTL_ZS": get_random_float_or_none(0, 100, 0.3),
"SM_POP_NETM": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_DEFC": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_DEFC_ZS": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_SALT_ZS": get_random_float_or_none(0, 100, 0.3),
"SN_ITK_VITA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_ADO_TFRT": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_AMRT_FE": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_AMRT_MA": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_CBRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_CDRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_CONU_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_IMRT_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_IMRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_IMRT_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_LE00_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_LE00_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_LE00_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_SMAM_FE": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_SMAM_MA": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_TFRT_IN": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_TO65_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_TO65_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_DYN_WFRT": get_random_float_or_none(0, 100, 0.3),
"SP_HOU_FEMA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_MTR_1519_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0004_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_TO": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0014_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_0509_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1014_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1519_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_TO": get_random_float_or_none(0, 100, 0.3),
"SP_POP_1564_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2024_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_2529_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3034_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_3539_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4044_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_4549_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5054_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_5559_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6064_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_6569_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_TO": get_random_float_or_none(0, 100, 0.3),
"SP_POP_65UP_TO_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7074_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_7579_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_FE": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_FE_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_MA": get_random_float_or_none(0, 100, 0.3),
"SP_POP_80UP_MA_5Y": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG00_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG00_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG01_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG01_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG02_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG02_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG03_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG03_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG04_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG04_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG05_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG05_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG06_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG06_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG07_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG07_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG08_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG08_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG09_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG09_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG10_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG10_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG11_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG11_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG12_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG12_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG13_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG13_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG14_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG14_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG15_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG15_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG16_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG16_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG17_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG17_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG18_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG18_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG19_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG19_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG20_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG20_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG21_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG21_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG22_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG22_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG23_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG23_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG24_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG24_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG25_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_AG25_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_BRTH_MF": get_random_float_or_none(0, 100, 0.3),
"SP_POP_DPND": get_random_float_or_none(0, 100, 0.3),
"SP_POP_DPND_OL": get_random_float_or_none(0, 100, 0.3),
"SP_POP_DPND_YG": get_random_float_or_none(0, 100, 0.3),
"SP_POP_GROW": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_FE_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_FE_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_MA_IN": get_random_float_or_none(0, 100, 0.3),
"SP_POP_TOTL_MA_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_BRTH_RU_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_BRTH_UR_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_BRTH_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_REG_DTHS_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_RUR_TOTL": get_random_float_or_none(0, 100, 0.3),
"SP_RUR_TOTL_ZG": get_random_float_or_none(0, 100, 0.3),
"SP_RUR_TOTL_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_URB_GROW": get_random_float_or_none(0, 100, 0.3),
"SP_URB_TOTL": get_random_float_or_none(0, 100, 0.3),
"SP_URB_TOTL_IN_ZS": get_random_float_or_none(0, 100, 0.3),
"SP_UWT_TFRT": get_random_float_or_none(0, 100, 0.3),
}
)
return data
def get_random_float_or_none(min_value, max_value, none_probability):
if random() < none_probability:
return None
else:
return uniform(min_value, max_value)