mirror of
https://github.com/apache/superset.git
synced 2026-04-28 12:34:23 +00:00
Compare commits
4 Commits
6.0
...
v2021.10.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4ef1a1db4 | ||
|
|
4155788b89 | ||
|
|
fcc124d9f4 | ||
|
|
61356efd64 |
3
setup.py
3
setup.py
@@ -118,10 +118,11 @@ setup(
|
||||
"pybigquery>=0.4.10",
|
||||
"google-cloud-bigquery>=2.4.0",
|
||||
],
|
||||
"clickhouse": ["clickhouse-sqlalchemy>= 0.1.4, <0.2"],
|
||||
"clickhouse": ["clickhouse-sqlalchemy>=0.1.4, <0.2"],
|
||||
"cockroachdb": ["cockroachdb>=0.3.5, <0.4"],
|
||||
"cors": ["flask-cors>=2.0.0"],
|
||||
"crate": ["crate[sqlalchemy]>=0.26.0, <0.27"],
|
||||
"databricks": ["databricks-dbapi[sqlalchemy]>=0.5.0, <0.6"],
|
||||
"db2": ["ibm-db-sa>=0.3.5, <0.4"],
|
||||
"dremio": ["sqlalchemy-dremio>=1.1.5, <1.2"],
|
||||
"drill": ["sqlalchemy-drill==0.1.dev"],
|
||||
|
||||
23
superset/db_engine_specs/databricks.py
Normal file
23
superset/db_engine_specs/databricks.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.o
|
||||
from superset.db_engine_specs.hive import HiveEngineSpec
|
||||
|
||||
|
||||
class DatabricksHiveEngineSpec(HiveEngineSpec):
|
||||
engine = "databricks"
|
||||
engine_name = "Databricks Hive"
|
||||
driver = "pyhive"
|
||||
@@ -148,8 +148,8 @@ class ReportSchedulePostSchema(Schema):
|
||||
sql = fields.String(
|
||||
description=sql_description, example="SELECT value FROM time_series_table"
|
||||
)
|
||||
chart = fields.Integer(required=False)
|
||||
dashboard = fields.Integer(required=False)
|
||||
chart = fields.Integer(required=False, allow_none=True)
|
||||
dashboard = fields.Integer(required=False, allow_none=True)
|
||||
database = fields.Integer(required=False)
|
||||
owners = fields.List(fields.Integer(description=owners_description))
|
||||
validator_type = fields.String(
|
||||
|
||||
@@ -496,13 +496,16 @@ class TestReportSchedulesApi(SupersetTestCase):
|
||||
db.session.delete(created_model)
|
||||
db.session.commit()
|
||||
|
||||
@pytest.mark.usefixtures("create_report_schedules")
|
||||
@pytest.mark.usefixtures(
|
||||
"load_birth_names_dashboard_with_slices", "create_report_schedules"
|
||||
)
|
||||
def test_create_report_schedule_schema(self):
|
||||
"""
|
||||
ReportSchedule Api: Test create report schedule schema check
|
||||
"""
|
||||
self.login(username="admin")
|
||||
chart = db.session.query(Slice).first()
|
||||
dashboard = db.session.query(Dashboard).first()
|
||||
example_db = get_example_database()
|
||||
|
||||
# Check that a report does not have a database reference
|
||||
@@ -590,6 +593,56 @@ class TestReportSchedulesApi(SupersetTestCase):
|
||||
rv = self.client.post(uri, json=report_schedule_data)
|
||||
assert rv.status_code == 400
|
||||
|
||||
# Test that report can be created with null dashboard
|
||||
report_schedule_data = {
|
||||
"type": ReportScheduleType.ALERT,
|
||||
"name": "new4",
|
||||
"description": "description",
|
||||
"crontab": "0 9 * * *",
|
||||
"recipients": [
|
||||
{
|
||||
"type": ReportRecipientType.EMAIL,
|
||||
"recipient_config_json": {"target": "target@superset.org"},
|
||||
},
|
||||
{
|
||||
"type": ReportRecipientType.SLACK,
|
||||
"recipient_config_json": {"target": "channel"},
|
||||
},
|
||||
],
|
||||
"working_timeout": 3600,
|
||||
"chart": chart.id,
|
||||
"dashboard": None,
|
||||
"database": example_db.id,
|
||||
}
|
||||
uri = "api/v1/report/"
|
||||
rv = self.client.post(uri, json=report_schedule_data)
|
||||
assert rv.status_code == 201
|
||||
|
||||
# Test that report can be created with null chart
|
||||
report_schedule_data = {
|
||||
"type": ReportScheduleType.ALERT,
|
||||
"name": "new5",
|
||||
"description": "description",
|
||||
"crontab": "0 9 * * *",
|
||||
"recipients": [
|
||||
{
|
||||
"type": ReportRecipientType.EMAIL,
|
||||
"recipient_config_json": {"target": "target@superset.org"},
|
||||
},
|
||||
{
|
||||
"type": ReportRecipientType.SLACK,
|
||||
"recipient_config_json": {"target": "channel"},
|
||||
},
|
||||
],
|
||||
"working_timeout": 3600,
|
||||
"chart": None,
|
||||
"dashboard": dashboard.id,
|
||||
"database": example_db.id,
|
||||
}
|
||||
uri = "api/v1/report/"
|
||||
rv = self.client.post(uri, json=report_schedule_data)
|
||||
assert rv.status_code == 201
|
||||
|
||||
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
||||
def test_create_report_schedule_chart_dash_validation(self):
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user