mirror of
https://github.com/apache/superset.git
synced 2026-04-19 08:04:53 +00:00
feat: new reports scheduler (#11711)
* feat(reports): scheduler and delivery system * working version * improvements and fix grace_period * add tests and fix bugs * fix report API test * test MySQL test fail * delete-orphans * fix MySQL tests * address comments * lint
This commit is contained in:
committed by
GitHub
parent
501b9d47c5
commit
f27ebc4be5
@@ -40,6 +40,7 @@ from superset.models.reports import (
|
||||
)
|
||||
|
||||
from tests.base_tests import SupersetTestCase
|
||||
from tests.reports.utils import insert_report_schedule
|
||||
from superset.utils.core import get_example_database
|
||||
|
||||
|
||||
@@ -47,48 +48,6 @@ REPORTS_COUNT = 10
|
||||
|
||||
|
||||
class TestReportSchedulesApi(SupersetTestCase):
|
||||
def insert_report_schedule(
|
||||
self,
|
||||
type: str,
|
||||
name: str,
|
||||
crontab: str,
|
||||
sql: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
chart: Optional[Slice] = None,
|
||||
dashboard: Optional[Dashboard] = None,
|
||||
database: Optional[Database] = None,
|
||||
owners: Optional[List[User]] = None,
|
||||
validator_type: Optional[str] = None,
|
||||
validator_config_json: Optional[str] = None,
|
||||
log_retention: Optional[int] = None,
|
||||
grace_period: Optional[int] = None,
|
||||
recipients: Optional[List[ReportRecipients]] = None,
|
||||
logs: Optional[List[ReportExecutionLog]] = None,
|
||||
) -> ReportSchedule:
|
||||
owners = owners or []
|
||||
recipients = recipients or []
|
||||
logs = logs or []
|
||||
report_schedule = ReportSchedule(
|
||||
type=type,
|
||||
name=name,
|
||||
crontab=crontab,
|
||||
sql=sql,
|
||||
description=description,
|
||||
chart=chart,
|
||||
dashboard=dashboard,
|
||||
database=database,
|
||||
owners=owners,
|
||||
validator_type=validator_type,
|
||||
validator_config_json=validator_config_json,
|
||||
log_retention=log_retention,
|
||||
grace_period=grace_period,
|
||||
recipients=recipients,
|
||||
logs=logs,
|
||||
)
|
||||
db.session.add(report_schedule)
|
||||
db.session.commit()
|
||||
return report_schedule
|
||||
|
||||
@pytest.fixture()
|
||||
def create_report_schedules(self):
|
||||
with self.create_app().app_context():
|
||||
@@ -116,7 +75,7 @@ class TestReportSchedulesApi(SupersetTestCase):
|
||||
)
|
||||
)
|
||||
report_schedules.append(
|
||||
self.insert_report_schedule(
|
||||
insert_report_schedule(
|
||||
type=ReportScheduleType.ALERT,
|
||||
name=f"name{cx}",
|
||||
crontab=f"*/{cx} * * * *",
|
||||
@@ -169,10 +128,6 @@ class TestReportSchedulesApi(SupersetTestCase):
|
||||
"last_value_row_json": report_schedule.last_value_row_json,
|
||||
"log_retention": report_schedule.log_retention,
|
||||
"name": report_schedule.name,
|
||||
"owners": [
|
||||
{"first_name": "admin", "id": 1, "last_name": "user"},
|
||||
{"first_name": "alpha", "id": 5, "last_name": "user"},
|
||||
],
|
||||
"recipients": [
|
||||
{
|
||||
"id": report_schedule.recipients[0].id,
|
||||
@@ -184,7 +139,16 @@ class TestReportSchedulesApi(SupersetTestCase):
|
||||
"validator_config_json": report_schedule.validator_config_json,
|
||||
"validator_type": report_schedule.validator_type,
|
||||
}
|
||||
assert data["result"] == expected_result
|
||||
for key in expected_result:
|
||||
assert data["result"][key] == expected_result[key]
|
||||
# needed because order may vary
|
||||
assert {"first_name": "admin", "id": 1, "last_name": "user"} in data["result"][
|
||||
"owners"
|
||||
]
|
||||
assert {"first_name": "alpha", "id": 5, "last_name": "user"} in data["result"][
|
||||
"owners"
|
||||
]
|
||||
assert len(data["result"]["owners"]) == 2
|
||||
|
||||
def test_info_report_schedule(self):
|
||||
"""
|
||||
|
||||
531
tests/reports/commands_tests.py
Normal file
531
tests/reports/commands_tests.py
Normal file
@@ -0,0 +1,531 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from contextlib2 import contextmanager
|
||||
from freezegun import freeze_time
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from superset import db
|
||||
from superset.models.core import Database
|
||||
from superset.models.dashboard import Dashboard
|
||||
from superset.models.reports import (
|
||||
ReportExecutionLog,
|
||||
ReportLogState,
|
||||
ReportRecipients,
|
||||
ReportRecipientType,
|
||||
ReportSchedule,
|
||||
ReportScheduleType,
|
||||
ReportScheduleValidatorType,
|
||||
)
|
||||
from superset.models.slice import Slice
|
||||
from superset.reports.commands.exceptions import (
|
||||
AlertQueryMultipleColumnsError,
|
||||
AlertQueryMultipleRowsError,
|
||||
ReportScheduleNotFoundError,
|
||||
ReportScheduleNotificationError,
|
||||
ReportSchedulePreviousWorkingError,
|
||||
)
|
||||
from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand
|
||||
from superset.utils.core import get_example_database
|
||||
from tests.reports.utils import insert_report_schedule
|
||||
from tests.test_app import app
|
||||
from tests.utils import read_fixture
|
||||
|
||||
|
||||
def get_target_from_report_schedule(report_schedule) -> List[str]:
|
||||
return [
|
||||
json.loads(recipient.recipient_config_json)["target"]
|
||||
for recipient in report_schedule.recipients
|
||||
]
|
||||
|
||||
|
||||
def assert_log(state: str, error_message: Optional[str] = None):
|
||||
db.session.commit()
|
||||
logs = db.session.query(ReportExecutionLog).all()
|
||||
assert len(logs) == 1
|
||||
assert logs[0].error_message == error_message
|
||||
assert logs[0].state == state
|
||||
|
||||
|
||||
def create_report_notification(
|
||||
email_target: Optional[str] = None,
|
||||
slack_channel: Optional[str] = None,
|
||||
chart: Optional[Slice] = None,
|
||||
dashboard: Optional[Dashboard] = None,
|
||||
database: Optional[Database] = None,
|
||||
sql: Optional[str] = None,
|
||||
report_type: Optional[str] = None,
|
||||
validator_type: Optional[str] = None,
|
||||
validator_config_json: Optional[str] = None,
|
||||
) -> ReportSchedule:
|
||||
report_type = report_type or ReportScheduleType.REPORT
|
||||
target = email_target or slack_channel
|
||||
config_json = {"target": target}
|
||||
if slack_channel:
|
||||
recipient = ReportRecipients(
|
||||
type=ReportRecipientType.SLACK,
|
||||
recipient_config_json=json.dumps(config_json),
|
||||
)
|
||||
else:
|
||||
recipient = ReportRecipients(
|
||||
type=ReportRecipientType.EMAIL,
|
||||
recipient_config_json=json.dumps(config_json),
|
||||
)
|
||||
|
||||
report_schedule = insert_report_schedule(
|
||||
type=report_type,
|
||||
name=f"report",
|
||||
crontab=f"0 9 * * *",
|
||||
description=f"Daily report",
|
||||
sql=sql,
|
||||
chart=chart,
|
||||
dashboard=dashboard,
|
||||
database=database,
|
||||
recipients=[recipient],
|
||||
validator_type=validator_type,
|
||||
validator_config_json=validator_config_json,
|
||||
)
|
||||
return report_schedule
|
||||
|
||||
|
||||
@pytest.yield_fixture()
|
||||
def create_report_email_chart():
|
||||
with app.app_context():
|
||||
chart = db.session.query(Slice).first()
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com", chart=chart
|
||||
)
|
||||
yield report_schedule
|
||||
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.yield_fixture()
|
||||
def create_report_email_dashboard():
|
||||
with app.app_context():
|
||||
dashboard = db.session.query(Dashboard).first()
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com", dashboard=dashboard
|
||||
)
|
||||
yield report_schedule
|
||||
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.yield_fixture()
|
||||
def create_report_slack_chart():
|
||||
with app.app_context():
|
||||
chart = db.session.query(Slice).first()
|
||||
report_schedule = create_report_notification(
|
||||
slack_channel="slack_channel", chart=chart
|
||||
)
|
||||
yield report_schedule
|
||||
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.yield_fixture()
|
||||
def create_report_slack_chart_working():
|
||||
with app.app_context():
|
||||
chart = db.session.query(Slice).first()
|
||||
report_schedule = create_report_notification(
|
||||
slack_channel="slack_channel", chart=chart
|
||||
)
|
||||
report_schedule.last_state = ReportLogState.WORKING
|
||||
db.session.commit()
|
||||
yield report_schedule
|
||||
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.yield_fixture(
|
||||
params=["alert1", "alert2", "alert3", "alert4", "alert5", "alert6", "alert7"]
|
||||
)
|
||||
def create_alert_email_chart(request):
|
||||
param_config = {
|
||||
"alert1": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": ">", "threshold": 9}',
|
||||
},
|
||||
"alert2": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": ">=", "threshold": 10}',
|
||||
},
|
||||
"alert3": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<", "threshold": 11}',
|
||||
},
|
||||
"alert4": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<=", "threshold": 10}',
|
||||
},
|
||||
"alert5": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "!=", "threshold": 11}',
|
||||
},
|
||||
"alert6": {
|
||||
"sql": "SELECT 'something' as metric",
|
||||
"validator_type": ReportScheduleValidatorType.NOT_NULL,
|
||||
"validator_config_json": "{}",
|
||||
},
|
||||
"alert7": {
|
||||
"sql": "SELECT {{ 5 + 5 }} as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "!=", "threshold": 11}',
|
||||
},
|
||||
}
|
||||
with app.app_context():
|
||||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
report_type=ReportScheduleType.ALERT,
|
||||
database=example_database,
|
||||
sql=param_config[request.param]["sql"],
|
||||
validator_type=param_config[request.param]["validator_type"],
|
||||
validator_config_json=param_config[request.param]["validator_config_json"],
|
||||
)
|
||||
yield report_schedule
|
||||
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def create_test_table_context(database: Database):
|
||||
database.get_sqla_engine().execute(
|
||||
"CREATE TABLE test_table AS SELECT 1 as first, 2 as second"
|
||||
)
|
||||
database.get_sqla_engine().execute(
|
||||
"INSERT INTO test_table (first, second) VALUES (1, 2)"
|
||||
)
|
||||
database.get_sqla_engine().execute(
|
||||
"INSERT INTO test_table (first, second) VALUES (3, 4)"
|
||||
)
|
||||
|
||||
yield db.session
|
||||
database.get_sqla_engine().execute("DROP TABLE test_table")
|
||||
|
||||
|
||||
@pytest.yield_fixture(
|
||||
params=["alert1", "alert2", "alert3", "alert4", "alert5", "alert6"]
|
||||
)
|
||||
def create_no_alert_email_chart(request):
|
||||
param_config = {
|
||||
"alert1": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<", "threshold": 10}',
|
||||
},
|
||||
"alert2": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": ">=", "threshold": 11}',
|
||||
},
|
||||
"alert3": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<", "threshold": 10}',
|
||||
},
|
||||
"alert4": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<=", "threshold": 9}',
|
||||
},
|
||||
"alert5": {
|
||||
"sql": "SELECT 10 as metric",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "!=", "threshold": 10}',
|
||||
},
|
||||
"alert6": {
|
||||
"sql": "SELECT first from test_table where first=0",
|
||||
"validator_type": ReportScheduleValidatorType.NOT_NULL,
|
||||
"validator_config_json": "{}",
|
||||
},
|
||||
}
|
||||
with app.app_context():
|
||||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
with create_test_table_context(example_database):
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
report_type=ReportScheduleType.ALERT,
|
||||
database=example_database,
|
||||
sql=param_config[request.param]["sql"],
|
||||
validator_type=param_config[request.param]["validator_type"],
|
||||
validator_config_json=param_config[request.param][
|
||||
"validator_config_json"
|
||||
],
|
||||
)
|
||||
yield report_schedule
|
||||
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.yield_fixture(params=["alert1", "alert2"])
|
||||
def create_mul_alert_email_chart(request):
|
||||
param_config = {
|
||||
"alert1": {
|
||||
"sql": "SELECT first from test_table",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<", "threshold": 10}',
|
||||
},
|
||||
"alert2": {
|
||||
"sql": "SELECT first, second from test_table",
|
||||
"validator_type": ReportScheduleValidatorType.OPERATOR,
|
||||
"validator_config_json": '{"op": "<", "threshold": 10}',
|
||||
},
|
||||
}
|
||||
with app.app_context():
|
||||
chart = db.session.query(Slice).first()
|
||||
example_database = get_example_database()
|
||||
with create_test_table_context(example_database):
|
||||
|
||||
report_schedule = create_report_notification(
|
||||
email_target="target@email.com",
|
||||
chart=chart,
|
||||
report_type=ReportScheduleType.ALERT,
|
||||
database=example_database,
|
||||
sql=param_config[request.param]["sql"],
|
||||
validator_type=param_config[request.param]["validator_type"],
|
||||
validator_config_json=param_config[request.param][
|
||||
"validator_config_json"
|
||||
],
|
||||
)
|
||||
yield report_schedule
|
||||
|
||||
# needed for MySQL
|
||||
logs = (
|
||||
db.session.query(ReportExecutionLog)
|
||||
.filter(ReportExecutionLog.report_schedule == report_schedule)
|
||||
.all()
|
||||
)
|
||||
for log in logs:
|
||||
db.session.delete(log)
|
||||
db.session.commit()
|
||||
db.session.delete(report_schedule)
|
||||
db.session.commit()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_report_email_chart")
|
||||
@patch("superset.reports.notifications.email.send_email_smtp")
|
||||
@patch("superset.utils.screenshots.ChartScreenshot.compute_and_cache")
|
||||
def test_email_chart_report_schedule(
|
||||
screenshot_mock, email_mock, create_report_email_chart
|
||||
):
|
||||
"""
|
||||
ExecuteReport Command: Test chart email report schedule
|
||||
"""
|
||||
# setup screenshot mock
|
||||
screenshot = read_fixture("sample.png")
|
||||
screenshot_mock.return_value = screenshot
|
||||
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_report_email_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
|
||||
notification_targets = get_target_from_report_schedule(
|
||||
create_report_email_chart
|
||||
)
|
||||
# Assert the email smtp address
|
||||
assert email_mock.call_args[0][0] == notification_targets[0]
|
||||
# Assert the email inline screenshot
|
||||
smtp_images = email_mock.call_args[1]["images"]
|
||||
assert smtp_images[list(smtp_images.keys())[0]] == screenshot
|
||||
# Assert logs are correct
|
||||
assert_log(ReportLogState.SUCCESS)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_report_email_dashboard")
|
||||
@patch("superset.reports.notifications.email.send_email_smtp")
|
||||
@patch("superset.utils.screenshots.DashboardScreenshot.compute_and_cache")
|
||||
def test_email_dashboard_report_schedule(
|
||||
screenshot_mock, email_mock, create_report_email_dashboard
|
||||
):
|
||||
"""
|
||||
ExecuteReport Command: Test dashboard email report schedule
|
||||
"""
|
||||
# setup screenshot mock
|
||||
screenshot = read_fixture("sample.png")
|
||||
screenshot_mock.return_value = screenshot
|
||||
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_report_email_dashboard.id, datetime.utcnow()
|
||||
).run()
|
||||
|
||||
notification_targets = get_target_from_report_schedule(
|
||||
create_report_email_dashboard
|
||||
)
|
||||
# Assert the email smtp address
|
||||
assert email_mock.call_args[0][0] == notification_targets[0]
|
||||
# Assert the email inline screenshot
|
||||
smtp_images = email_mock.call_args[1]["images"]
|
||||
assert smtp_images[list(smtp_images.keys())[0]] == screenshot
|
||||
# Assert logs are correct
|
||||
assert_log(ReportLogState.SUCCESS)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_report_slack_chart")
|
||||
@patch("superset.reports.notifications.slack.WebClient.files_upload")
|
||||
@patch("superset.utils.screenshots.ChartScreenshot.compute_and_cache")
|
||||
def test_slack_chart_report_schedule(
|
||||
screenshot_mock, file_upload_mock, create_report_slack_chart
|
||||
):
|
||||
"""
|
||||
ExecuteReport Command: Test chart slack report schedule
|
||||
"""
|
||||
# setup screenshot mock
|
||||
screenshot = read_fixture("sample.png")
|
||||
screenshot_mock.return_value = screenshot
|
||||
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_report_slack_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
|
||||
notification_targets = get_target_from_report_schedule(
|
||||
create_report_slack_chart
|
||||
)
|
||||
assert file_upload_mock.call_args[1]["channels"] == notification_targets[0]
|
||||
assert file_upload_mock.call_args[1]["file"] == screenshot
|
||||
|
||||
# Assert logs are correct
|
||||
assert_log(ReportLogState.SUCCESS)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_report_slack_chart")
|
||||
def test_report_schedule_not_found(create_report_slack_chart):
|
||||
"""
|
||||
ExecuteReport Command: Test report schedule not found
|
||||
"""
|
||||
max_id = db.session.query(func.max(ReportSchedule.id)).scalar()
|
||||
with pytest.raises(ReportScheduleNotFoundError):
|
||||
AsyncExecuteReportScheduleCommand(max_id + 1, datetime.utcnow()).run()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_report_slack_chart_working")
|
||||
def test_report_schedule_working(create_report_slack_chart_working):
|
||||
"""
|
||||
ExecuteReport Command: Test report schedule still working
|
||||
"""
|
||||
# setup screenshot mock
|
||||
with pytest.raises(ReportSchedulePreviousWorkingError):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_report_slack_chart_working.id, datetime.utcnow()
|
||||
).run()
|
||||
|
||||
assert_log(
|
||||
ReportLogState.ERROR, error_message=ReportSchedulePreviousWorkingError.message
|
||||
)
|
||||
assert create_report_slack_chart_working.last_state == ReportLogState.WORKING
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_report_email_dashboard")
|
||||
@patch("superset.reports.notifications.email.send_email_smtp")
|
||||
@patch("superset.utils.screenshots.DashboardScreenshot.compute_and_cache")
|
||||
def test_email_dashboard_report_fails(
|
||||
screenshot_mock, email_mock, create_report_email_dashboard
|
||||
):
|
||||
"""
|
||||
ExecuteReport Command: Test dashboard email report schedule notification fails
|
||||
"""
|
||||
# setup screenshot mock
|
||||
from smtplib import SMTPException
|
||||
|
||||
screenshot = read_fixture("sample.png")
|
||||
screenshot_mock.return_value = screenshot
|
||||
email_mock.side_effect = SMTPException("Could not connect to SMTP XPTO")
|
||||
|
||||
with pytest.raises(ReportScheduleNotificationError):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_report_email_dashboard.id, datetime.utcnow()
|
||||
).run()
|
||||
|
||||
assert_log(ReportLogState.ERROR, error_message="Could not connect to SMTP XPTO")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_alert_email_chart")
|
||||
@patch("superset.reports.notifications.email.send_email_smtp")
|
||||
@patch("superset.utils.screenshots.ChartScreenshot.compute_and_cache")
|
||||
def test_slack_chart_alert(screenshot_mock, email_mock, create_alert_email_chart):
|
||||
"""
|
||||
ExecuteReport Command: Test chart slack alert
|
||||
"""
|
||||
# setup screenshot mock
|
||||
screenshot = read_fixture("sample.png")
|
||||
screenshot_mock.return_value = screenshot
|
||||
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_alert_email_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
|
||||
notification_targets = get_target_from_report_schedule(create_alert_email_chart)
|
||||
# Assert the email smtp address
|
||||
assert email_mock.call_args[0][0] == notification_targets[0]
|
||||
# Assert the email inline screenshot
|
||||
smtp_images = email_mock.call_args[1]["images"]
|
||||
assert smtp_images[list(smtp_images.keys())[0]] == screenshot
|
||||
# Assert logs are correct
|
||||
assert_log(ReportLogState.SUCCESS)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_no_alert_email_chart")
|
||||
def test_email_chart_no_alert(create_no_alert_email_chart):
|
||||
"""
|
||||
ExecuteReport Command: Test chart email no alert
|
||||
"""
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_no_alert_email_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
assert_log(ReportLogState.NOOP)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("create_mul_alert_email_chart")
|
||||
def test_email_mul_alert(create_mul_alert_email_chart):
|
||||
"""
|
||||
ExecuteReport Command: Test chart email multiple rows
|
||||
"""
|
||||
with freeze_time("2020-01-01T00:00:00Z"):
|
||||
with pytest.raises(
|
||||
(AlertQueryMultipleRowsError, AlertQueryMultipleColumnsError)
|
||||
):
|
||||
AsyncExecuteReportScheduleCommand(
|
||||
create_mul_alert_email_chart.id, datetime.utcnow()
|
||||
).run()
|
||||
68
tests/reports/utils.py
Normal file
68
tests/reports/utils.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from typing import List, Optional
|
||||
|
||||
from flask_appbuilder.security.sqla.models import User
|
||||
|
||||
from superset import db
|
||||
from superset.models.core import Database
|
||||
from superset.models.dashboard import Dashboard
|
||||
from superset.models.reports import ReportExecutionLog, ReportRecipients, ReportSchedule
|
||||
from superset.models.slice import Slice
|
||||
|
||||
|
||||
def insert_report_schedule(
|
||||
type: str,
|
||||
name: str,
|
||||
crontab: str,
|
||||
sql: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
chart: Optional[Slice] = None,
|
||||
dashboard: Optional[Dashboard] = None,
|
||||
database: Optional[Database] = None,
|
||||
owners: Optional[List[User]] = None,
|
||||
validator_type: Optional[str] = None,
|
||||
validator_config_json: Optional[str] = None,
|
||||
log_retention: Optional[int] = None,
|
||||
grace_period: Optional[int] = None,
|
||||
recipients: Optional[List[ReportRecipients]] = None,
|
||||
logs: Optional[List[ReportExecutionLog]] = None,
|
||||
) -> ReportSchedule:
|
||||
owners = owners or []
|
||||
recipients = recipients or []
|
||||
logs = logs or []
|
||||
report_schedule = ReportSchedule(
|
||||
type=type,
|
||||
name=name,
|
||||
crontab=crontab,
|
||||
sql=sql,
|
||||
description=description,
|
||||
chart=chart,
|
||||
dashboard=dashboard,
|
||||
database=database,
|
||||
owners=owners,
|
||||
validator_type=validator_type,
|
||||
validator_config_json=validator_config_json,
|
||||
log_retention=log_retention,
|
||||
grace_period=grace_period,
|
||||
recipients=recipients,
|
||||
logs=logs,
|
||||
)
|
||||
db.session.add(report_schedule)
|
||||
db.session.commit()
|
||||
return report_schedule
|
||||
Reference in New Issue
Block a user