# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # isort:skip_file """Unit tests for Superset""" from datetime import datetime, timedelta from typing import Any from unittest.mock import patch import pytz import pytest import prison from parameterized import parameterized from sqlalchemy.sql import func from superset import db, security_manager from superset.models.core import Database from superset.models.slice import Slice from superset.models.dashboard import Dashboard from superset.reports.models import ( ReportSchedule, ReportCreationMethod, ReportRecipients, ReportExecutionLog, ReportScheduleType, ReportRecipientType, ReportState, ) from superset.utils.database import get_example_database from superset.utils import json from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, # noqa: F401 load_birth_names_data, # noqa: F401 ) from tests.integration_tests.fixtures.dashboard_with_tabs import ( load_mutltiple_tabs_dashboard, # noqa: F401 ) from tests.integration_tests.reports.utils import insert_report_schedule REPORTS_COUNT = 10 REPORTS_ROLE_NAME = "reports_role" REPORTS_GAMMA_USER = "reports_gamma" class TestReportSchedulesApi(SupersetTestCase): @pytest.fixture def gamma_user_with_alerts_role(self): with self.create_app().app_context(): user = self.create_user( REPORTS_GAMMA_USER, "general", "Gamma", email=f"{REPORTS_GAMMA_USER}@superset.org", ) security_manager.add_role(REPORTS_ROLE_NAME) read_perm = security_manager.find_permission_view_menu( "can_read", "ReportSchedule", ) write_perm = security_manager.find_permission_view_menu( "can_write", "ReportSchedule", ) reports_role = security_manager.find_role(REPORTS_ROLE_NAME) security_manager.add_permission_role(reports_role, read_perm) security_manager.add_permission_role(reports_role, write_perm) user.roles.append(reports_role) yield user # rollback changes (assuming cascade delete) db.session.delete(reports_role) db.session.delete(user) db.session.commit() @pytest.fixture def create_working_admin_report_schedule(self): with self.create_app().app_context(): admin_user = self.get_user("admin") chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule = insert_report_schedule( type=ReportScheduleType.ALERT, name="name_admin_working", crontab="* * * * *", sql="SELECT value from table", description="Report working", chart=chart, database=example_db, owners=[admin_user], last_state=ReportState.WORKING, ) yield db.session.delete(report_schedule) db.session.commit() @pytest.fixture def create_working_gamma_report_schedule(self, gamma_user_with_alerts_role): with self.create_app().app_context(): chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule = insert_report_schedule( type=ReportScheduleType.ALERT, name="name_gamma_working", crontab="* * * * *", sql="SELECT value from table", description="Report working", chart=chart, database=example_db, owners=[gamma_user_with_alerts_role], last_state=ReportState.WORKING, ) yield db.session.delete(report_schedule) db.session.commit() @pytest.fixture def create_working_shared_report_schedule(self, gamma_user_with_alerts_role): with self.create_app().app_context(): admin_user = self.get_user("admin") alpha_user = self.get_user("alpha") chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule = insert_report_schedule( type=ReportScheduleType.ALERT, name="name_shared_working", crontab="* * * * *", sql="SELECT value from table", description="Report working", chart=chart, database=example_db, owners=[admin_user, alpha_user, gamma_user_with_alerts_role], last_state=ReportState.WORKING, ) yield db.session.delete(report_schedule) db.session.commit() @pytest.fixture def create_report_schedules(self): with self.create_app().app_context(): report_schedules = [] admin_user = self.get_user("admin") alpha_user = self.get_user("alpha") chart = db.session.query(Slice).first() example_db = get_example_database() for cx in range(REPORTS_COUNT): recipients = [] logs = [] for cy in range(cx): config_json = {"target": f"target{cy}@email.com"} recipients.append( ReportRecipients( type=ReportRecipientType.EMAIL, recipient_config_json=json.dumps(config_json), ) ) logs.append( ReportExecutionLog( scheduled_dttm=datetime(2020, 1, 1), state=ReportState.ERROR, error_message=f"Error {cy}", ) ) report_schedules.append( insert_report_schedule( type=ReportScheduleType.ALERT, name=f"name{cx}", crontab=f"*/{cx} * * * *", sql=f"SELECT value from table{cx}", # noqa: S608 description=f"Some description {cx}", chart=chart, database=example_db, owners=[admin_user, alpha_user], recipients=recipients, logs=logs, ) ) yield report_schedules report_schedules = db.session.query(ReportSchedule).all() # rollback changes (assuming cascade delete) for report_schedule in report_schedules: db.session.delete(report_schedule) db.session.commit() @pytest.fixture def create_alpha_users(self): with self.create_app().app_context(): users = [ self.create_user( "alpha1", "password", "Alpha", email="alpha1@superset.org" ), self.create_user( "alpha2", "password", "Alpha", email="alpha2@superset.org" ), ] yield users # rollback changes (assuming cascade delete) for user in users: db.session.delete(user) db.session.commit() @with_feature_flags(ALERT_REPORTS=False) @pytest.mark.usefixtures("create_report_schedules") def test_get_report_schedule_disabled(self): """ ReportSchedule Api: Test get report schedule 404s when feature is disabled """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .first() ) self.login(ADMIN_USERNAME) uri = f"api/v1/report/{report_schedule.id}" rv = self.client.get(uri) assert rv.status_code == 404 @pytest.mark.usefixtures("create_report_schedules") def test_get_report_schedule(self): """ ReportSchedule Api: Test get report schedule """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .first() ) self.login(ADMIN_USERNAME) uri = f"api/v1/report/{report_schedule.id}" rv = self.get_assert_metric(uri, "get") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 expected_result = { "active": report_schedule.active, "chart": { "id": report_schedule.chart.id, "slice_name": report_schedule.chart.slice_name, "viz_type": report_schedule.chart.viz_type, }, "context_markdown": report_schedule.context_markdown, "crontab": report_schedule.crontab, "dashboard": None, "database": { "id": report_schedule.database.id, "database_name": report_schedule.database.database_name, }, "description": report_schedule.description, "grace_period": report_schedule.grace_period, "id": report_schedule.id, "last_eval_dttm": report_schedule.last_eval_dttm, "last_state": report_schedule.last_state, "last_value": report_schedule.last_value, "last_value_row_json": report_schedule.last_value_row_json, "log_retention": report_schedule.log_retention, "name": report_schedule.name, "recipients": [ { "id": report_schedule.recipients[0].id, "recipient_config_json": '{"target": "target0@email.com"}', "type": "Email", } ], "timezone": report_schedule.timezone, "type": report_schedule.type, "validator_config_json": report_schedule.validator_config_json, "validator_type": report_schedule.validator_type, } for key in expected_result: assert data["result"][key] == expected_result[key] # needed because order may vary assert { "email": "admin@fab.org", "first_name": "admin", "id": 1, "last_name": "user", } in data["result"]["owners"] assert { "email": "alpha@fab.org", "first_name": "alpha", "id": 5, "last_name": "user", } in data["result"]["owners"] assert len(data["result"]["owners"]) == 2 def test_info_report_schedule(self): """ ReportSchedule API: Test info """ self.login(ADMIN_USERNAME) uri = "api/v1/report/_info" # noqa: F541 rv = self.get_assert_metric(uri, "info") assert rv.status_code == 200 def test_info_security_report(self): """ ReportSchedule API: Test info security """ self.login(ADMIN_USERNAME) params = {"keys": ["permissions"]} uri = f"api/v1/report/_info?q={prison.dumps(params)}" rv = self.get_assert_metric(uri, "info") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 assert "can_read" in data["permissions"] assert "can_write" in data["permissions"] assert len(data["permissions"]) == 2 @pytest.mark.usefixtures("create_report_schedules") def test_get_report_schedule_not_found(self): """ ReportSchedule Api: Test get report schedule not found """ max_id = db.session.query(func.max(ReportSchedule.id)).scalar() self.login(ADMIN_USERNAME) uri = f"api/v1/report/{max_id + 1}" rv = self.get_assert_metric(uri, "get") assert rv.status_code == 404 @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule(self): """ ReportSchedule Api: Test get list report schedules """ self.login(ADMIN_USERNAME) uri = "api/v1/report/" # noqa: F541 rv = self.get_assert_metric(uri, "get_list") expected_fields = [ "active", "changed_by", "changed_on", "changed_on_delta_humanized", "chart_id", "created_by", "created_on", "creation_method", "crontab", "crontab_humanized", "dashboard_id", "description", "extra", "id", "last_eval_dttm", "last_state", "name", "owners", "recipients", "timezone", "type", ] assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == REPORTS_COUNT data_keys = sorted(list(data["result"][0].keys())) # noqa: C414 assert expected_fields == data_keys # Assert nested fields expected_owners_fields = ["email", "first_name", "id", "last_name"] data_keys = sorted(list(data["result"][0]["owners"][0].keys())) # noqa: C414 assert expected_owners_fields == data_keys expected_recipients_fields = ["id", "type"] data_keys = sorted(list(data["result"][1]["recipients"][0].keys())) # noqa: C414 assert expected_recipients_fields == data_keys @parameterized.expand( [ ( "admin", { "name_admin_working", "name_gamma_working", "name_shared_working", }, ), ( "alpha", { "name_admin_working", "name_gamma_working", "name_shared_working", }, ), ( REPORTS_GAMMA_USER, { "name_gamma_working", "name_shared_working", }, ), ], ) @pytest.mark.usefixtures( "create_working_admin_report_schedule", "create_working_gamma_report_schedule", "create_working_shared_report_schedule", "gamma_user_with_alerts_role", ) def test_get_list_report_schedule_perms(self, username, report_names): """ ReportSchedule Api: Test get list report schedules for different roles """ self.login(username) uri = "api/v1/report/" # noqa: F541 rv = self.get_assert_metric(uri, "get_list") assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert {report["name"] for report in data["result"]} == report_names def test_get_list_report_schedule_gamma(self): """ ReportSchedule Api: Test get list report schedules for regular gamma user """ self.login(GAMMA_USERNAME) uri = "api/v1/report/" # noqa: F541 rv = self.client.get(uri) assert rv.status_code == 403 @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_sorting(self): """ ReportSchedule Api: Test sorting on get list report schedules """ self.login(ADMIN_USERNAME) uri = "api/v1/report/" order_columns = [ "active", "created_by.first_name", "changed_by.first_name", "changed_on", "changed_on_delta_humanized", "created_on", "crontab", "description", "last_eval_dttm", "name", "type", "crontab_humanized", ] for order_column in order_columns: arguments = {"order_column": order_column, "order_direction": "asc"} uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_filter_name(self): """ ReportSchedule Api: Test filter name on get list report schedules """ self.login(ADMIN_USERNAME) # Test normal contains filter arguments = { "columns": ["name"], "filters": [{"col": "name", "opr": "ct", "value": "2"}], } uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") expected_result = { "name": "name2", } assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == 1 assert data["result"][0] == expected_result @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_filter_custom(self): """ ReportSchedule Api: Test custom filter on get list report schedules """ self.login(ADMIN_USERNAME) # Test custom all text filter arguments = { "columns": ["name"], "filters": [{"col": "name", "opr": "report_all_text", "value": "table3"}], } uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") expected_result = { "name": "name3", } assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == 1 assert data["result"][0] == expected_result @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_filter_active(self): """ ReportSchedule Api: Test active filter on get list report schedules """ self.login(ADMIN_USERNAME) arguments = { "columns": ["name"], "filters": [{"col": "active", "opr": "eq", "value": True}], } uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == REPORTS_COUNT @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_filter_type(self): """ ReportSchedule Api: Test type filter on get list report schedules """ self.login(ADMIN_USERNAME) arguments = { "columns": ["name"], "filters": [ {"col": "type", "opr": "eq", "value": ReportScheduleType.ALERT} ], } uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == REPORTS_COUNT # Test type filter arguments = { "columns": ["name"], "filters": [ {"col": "type", "opr": "eq", "value": ReportScheduleType.REPORT} ], } uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == 0 @pytest.mark.usefixtures("create_report_schedules") def test_get_related_report_schedule(self): """ ReportSchedule Api: Test get related report schedule """ self.login(ADMIN_USERNAME) related_columns = ["created_by", "chart", "dashboard", "database"] for related_column in related_columns: uri = f"api/v1/report/related/{related_column}" rv = self.client.get(uri) assert rv.status_code == 200 @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_create_report_schedule(self): """ ReportSchedule Api: Test create report schedule """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "crontab": "0 9 * * *", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "grace_period": 14400, "working_timeout": 3600, "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 201 created_model = db.session.query(ReportSchedule).get(data.get("id")) assert created_model is not None assert created_model.name == report_schedule_data["name"] assert created_model.grace_period == report_schedule_data["grace_period"] assert created_model.working_timeout == report_schedule_data["working_timeout"] assert created_model.description == report_schedule_data["description"] assert created_model.crontab == report_schedule_data["crontab"] assert created_model.chart.id == report_schedule_data["chart"] assert created_model.database.id == report_schedule_data["database"] assert created_model.creation_method == report_schedule_data["creation_method"] # Rollback changes db.session.delete(created_model) db.session.commit() @pytest.mark.usefixtures("create_report_schedules") def test_create_report_schedule_uniqueness(self): """ ReportSchedule Api: Test create report schedule uniqueness """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "name3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"name": ['An alert named "name3" already exists']}} # Check that uniqueness is composed by name and type report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name3", "description": "description", "crontab": "0 9 * * *", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "chart": chart.id, } uri = "api/v1/report/" rv = self.client.post(uri, json=report_schedule_data) assert rv.status_code == 201 data = json.loads(rv.data.decode("utf-8")) # Rollback changes created_model = db.session.query(ReportSchedule).get(data.get("id")) db.session.delete(created_model) db.session.commit() @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_create_report_schedule_schema(self): """ ReportSchedule Api: Test create report schedule schema check """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() dashboard = db.session.query(Dashboard).first() example_db = get_example_database() # Check that a report does not have a database reference report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 400 # Test that report can be created with null grace period report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": 3600, "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 201 # Test that grace period and working timeout cannot be < 1 report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": -10, "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 400 report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "grace_period": -10, "working_timeout": 3600, "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 400 # Test that report can be created with null dashboard report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new4", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": 3600, "chart": chart.id, "dashboard": None, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 201 # Test that report can be created with null chart report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new5", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": 3600, "chart": None, "dashboard": dashboard.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 201 # Test that report cannot be created with null timezone report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new5", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": 3600, "timezone": None, "dashboard": dashboard.id, "database": example_db.id, } rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 400 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"timezone": ["Field may not be null."]}} # Test that report cannot be created with an invalid timezone report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new5", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": 3600, "timezone": "this is not a timezone", "dashboard": dashboard.id, "database": example_db.id, } rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 400 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": { "timezone": [f"Must be one of: {', '.join(pytz.all_timezones)}."] } } # Test that report should reflect the timezone value passed in report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new6", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "working_timeout": 3600, "timezone": "America/Los_Angeles", "dashboard": dashboard.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert data["result"]["timezone"] == "America/Los_Angeles" assert rv.status_code == 201 @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_unsaved_report_schedule_schema(self): """ ReportSchedule Api: Test create report schedule with unsaved chart """ self.login(ADMIN_USERNAME) db.session.query(Slice).first() # noqa: F841 db.session.query(Dashboard).first() # noqa: F841 get_example_database() # noqa: F841 report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name3", "description": "description", "creation_method": ReportCreationMethod.CHARTS, "crontab": "0 9 * * *", "chart": 0, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 422 assert ( data["message"]["chart"] == "Please save your chart first, then try creating a new email report." ) @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_no_dashboard_report_schedule_schema(self): """ ReportSchedule Api: Test create report schedule with no dashboard id """ self.login(ADMIN_USERNAME) db.session.query(Slice).first() # noqa: F841 db.session.query(Dashboard).first() # noqa: F841 get_example_database() # noqa: F841 report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name3", "description": "description", "creation_method": ReportCreationMethod.DASHBOARDS, "crontab": "0 9 * * *", } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 422 assert ( data["message"]["dashboard"] == "Please save your dashboard first, then try creating a new email report." ) @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_create_multiple_creation_method_report_schedule_charts(self): """ ReportSchedule Api: Test create multiple reports with the same creation method """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() db.session.query(Dashboard).first() # noqa: F841 get_example_database() # noqa: F841 report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name4", "description": "description", "creation_method": ReportCreationMethod.CHARTS, "crontab": "0 9 * * *", "working_timeout": 3600, "chart": chart.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 201 # this second time it should receive an error because the chart has an attached report # noqa: E501 # with the same creation method from the same user. report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name5", "description": "description", "creation_method": ReportCreationMethod.CHARTS, "crontab": "0 9 * * *", "working_timeout": 3600, "chart": chart.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 409 assert data == { "errors": [ { "message": "Resource already has an attached report.", "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { "issue_codes": [ { "code": 1010, "message": "Issue 1010 - Superset encountered an error while running a command.", # noqa: E501 } ] }, } ] } @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_create_multiple_creation_method_report_schedule_dashboards(self): """ ReportSchedule Api: Test create multiple reports with the same creation method """ self.login(ADMIN_USERNAME) db.session.query(Slice).first() # noqa: F841 dashboard = db.session.query(Dashboard).first() get_example_database() # noqa: F841 report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name4", "description": "description", "creation_method": ReportCreationMethod.DASHBOARDS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": dashboard.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 201 # this second time it should receive an error because the dashboard has an attached report # noqa: E501 # with the same creation method from the same user. report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name5", "description": "description", "creation_method": ReportCreationMethod.DASHBOARDS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": dashboard.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 409 assert data == { "errors": [ { "message": "Resource already has an attached report.", "error_type": "GENERIC_COMMAND_ERROR", "level": "warning", "extra": { "issue_codes": [ { "code": 1010, "message": "Issue 1010 - Superset encountered an error while running a command.", # noqa: E501 } ] }, } ] } @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_create_report_schedule_chart_dash_validation(self): """ ReportSchedule Api: Test create report schedule chart and dashboard validation """ self.login(ADMIN_USERNAME) # Test we can submit a chart or a dashboard not both chart = db.session.query(Slice).first() dashboard = db.session.query(Dashboard).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "crontab": "0 9 * * *", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "chart": chart.id, "dashboard": dashboard.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"chart": "Choose a chart or dashboard not both"}} @pytest.mark.usefixtures("create_report_schedules") def test_create_report_schedule_chart_db_validation(self): """ ReportSchedule Api: Test create report schedule chart and database validation """ self.login(ADMIN_USERNAME) # Test database required for alerts chart = db.session.query(Slice).first() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "chart": chart.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"database": "Database is required for alerts"}} @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_create_report_schedule_relations_exist(self): """ ReportSchedule Api: Test create report schedule relations (chart, dash, db) exist """ self.login(ADMIN_USERNAME) # Test chart and database do not exist chart_max_id = db.session.query(func.max(Slice.id)).scalar() database_max_id = db.session.query(func.max(Database.id)).scalar() examples_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "chart": chart_max_id + 1, "database": database_max_id + 1, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": { "chart": "Chart does not exist", "database": "Database does not exist", } } # Test dashboard does not exist dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "crontab": "0 9 * * *", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "dashboard": dashboard_max_id + 1, "database": examples_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"dashboard": "Dashboard does not exist"}} # @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") # TODO (AAfghahi): I am going to enable this when the report schedule feature is fully finished # noqa: E501 # def test_create_report_schedule_no_creation_method(self): # """ # ReportSchedule Api: Test create report schedule # """ # self.login(ADMIN_USERNAME) # chart = db.session.query(Slice).first() # example_db = get_example_database() # report_schedule_data = { # "type": ReportScheduleType.ALERT, # "name": "new3", # "description": "description", # "crontab": "0 9 * * *", # "recipients": [ # { # "type": ReportRecipientType.EMAIL, # "recipient_config_json": {"target": "target@superset.org"}, # }, # { # "type": ReportRecipientType.SLACK, # "recipient_config_json": {"target": "channel"}, # }, # ], # "grace_period": 14400, # "working_timeout": 3600, # "chart": chart.id, # "database": example_db.id, # } # uri = "api/v1/report/" # rv = self.client.post(uri, json=report_schedule_data) # response = json.loads(rv.data.decode("utf-8")) # assert response == { # "message": {"creation_method": ["Missing data for required field."]} # } # assert rv.status_code == 400 @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_create_report_schedule_invalid_creation_method(self): """ ReportSchedule API: Test create report schedule """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "creation_method": "BAD_CREATION_METHOD", "crontab": "0 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "grace_period": 14400, "working_timeout": 3600, "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") response = json.loads(rv.data.decode("utf-8")) assert response == { "message": { "creation_method": [ "Must be one of: charts, dashboards, alerts_reports." ] } } assert rv.status_code == 400 @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_create_report_schedule_valid_schedule(self): """ ReportSchedule API: Test create report schedule when a minimum interval is set in config. """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "Alert with a valid frequency", "description": "description", "creation_method": "alerts_reports", "crontab": "5,10 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "grace_period": 14400, "working_timeout": 3600, "chart": chart.id, "database": example_db.id, } with patch.dict( "flask.current_app.config", { "ALERT_MINIMUM_INTERVAL": int(timedelta(minutes=2).total_seconds()), "REPORT_MINIMUM_INTERVAL": int(timedelta(minutes=5).total_seconds()), }, ): uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 201 report_schedule_data["type"] = ReportScheduleType.REPORT report_schedule_data["name"] = "Report with a valid frequency" del report_schedule_data["database"] rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 201 @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_create_report_schedule_invalid_schedule(self): """ ReportSchedule API: Test create report schedule when a minimum interval is set in config and the scheduled frequency exceeds it. """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "Invalid Frequency", "description": "description", "creation_method": "alerts_reports", "crontab": "5,10 9 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, }, { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": "channel"}, }, ], "grace_period": 14400, "working_timeout": 3600, "chart": chart.id, "database": example_db.id, } with patch.dict( "flask.current_app.config", { "ALERT_MINIMUM_INTERVAL": int(timedelta(minutes=6).total_seconds()), "REPORT_MINIMUM_INTERVAL": int(timedelta(minutes=8).total_seconds()), }, ): uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") response = json.loads(rv.data.decode("utf-8")) assert response == { "message": { "crontab": ( "Alert schedule frequency exceeding limit. " "Please configure a schedule with a minimum interval of 6 minutes per execution." # noqa: E501 ) } } assert rv.status_code == 422 report_schedule_data["type"] = ReportScheduleType.REPORT del report_schedule_data["database"] rv = self.post_assert_metric(uri, report_schedule_data, "post") response = json.loads(rv.data.decode("utf-8")) assert response == { "message": { "crontab": ( "Report schedule frequency exceeding limit. " "Please configure a schedule with a minimum interval of 8 minutes per execution." # noqa: E501 ) } } assert rv.status_code == 422 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_valid_schedule(self) -> None: """ ReportSchedule API: Test update report schedule when a minimum interval is set in config. """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT previous_cron = report_schedule.crontab update_payload: dict[str, Any] = { "crontab": "5,10 * * * *", } with patch.dict( "flask.current_app.config", { "ALERT_MINIMUM_INTERVAL": int(timedelta(minutes=5).total_seconds()), "REPORT_MINIMUM_INTERVAL": int(timedelta(minutes=3).total_seconds()), }, ): # Test alert minimum interval uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, update_payload, "put") assert rv.status_code == 200 # Test report minimum interval update_payload["crontab"] = "5,8 * * * *" update_payload["type"] = ReportScheduleType.REPORT update_payload["database"] = None uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, update_payload, "put") assert rv.status_code == 200 with patch.dict( "flask.current_app.config", { "ALERT_MINIMUM_INTERVAL": 0, "REPORT_MINIMUM_INTERVAL": 0, }, ): # Undo changes update_payload["crontab"] = previous_cron update_payload["type"] = ReportScheduleType.ALERT update_payload["database"] = get_example_database().id uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, update_payload, "put") assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_invalid_schedule(self) -> None: """ ReportSchedule API: Test update report schedule when a minimum interval is set in config and the scheduled frequency exceeds it. """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT update_payload: dict[str, Any] = { "crontab": "5,10 * * * *", } with patch.dict( "flask.current_app.config", { "ALERT_MINIMUM_INTERVAL": int(timedelta(minutes=6).total_seconds()), "REPORT_MINIMUM_INTERVAL": int(timedelta(minutes=4).total_seconds()), }, ): # Exceed alert minimum interval uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, update_payload, "put") assert rv.status_code == 422 response = json.loads(rv.data.decode("utf-8")) assert response == { "message": { "crontab": ( "Alert schedule frequency exceeding limit. " "Please configure a schedule with a minimum interval of 6 minutes per execution." # noqa: E501 ) } } # Exceed report minimum interval update_payload["crontab"] = "5,8 * * * *" update_payload["type"] = ReportScheduleType.REPORT update_payload["database"] = None uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, update_payload, "put") assert rv.status_code == 422 response = json.loads(rv.data.decode("utf-8")) assert response == { "message": { "crontab": ( "Report schedule frequency exceeding limit. " "Please configure a schedule with a minimum interval of 4 minutes per execution." # noqa: E501 ) } } @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule(self): """ ReportSchedule Api: Test update report schedule """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "changed", "description": "description", "crontab": "0 10 * * *", "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "target@superset.org"}, } ], "chart": chart.id, "database": example_db.id, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 200 updated_model = db.session.query(ReportSchedule).get(report_schedule.id) assert updated_model is not None assert updated_model.name == report_schedule_data["name"] assert updated_model.description == report_schedule_data["description"] assert len(updated_model.recipients) == 1 assert updated_model.crontab == report_schedule_data["crontab"] assert updated_model.chart_id == report_schedule_data["chart"] assert updated_model.database_id == report_schedule_data["database"] @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_clear_recipients(self): """ ReportSchedule API: clear recipients on empty list """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) assert len(report_schedule.recipients) == 2 self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 200 db.session.expire(report_schedule) assert report_schedule.recipients == [] @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_empty_email_target(self): """ ReportSchedule API: Test update with empty email target returns 400 """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": ""}, } ], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 400 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_invalid_email(self): """ ReportSchedule API: Test update with invalid email returns 400 """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": {"target": "notanemail"}, } ], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 400 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_invalid_cc_email(self): """ ReportSchedule API: Test update with invalid ccTarget """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": { "target": "valid@example.com", "ccTarget": "bademail", }, } ], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 400 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_invalid_bcc_email(self): """ ReportSchedule API: Test update with invalid bccTarget """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": { "target": "valid@example.com", "bccTarget": "bademail", }, } ], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 400 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_slack_empty_target_allowed(self): """ ReportSchedule API: Test that Slack recipients skip email validation """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [ { "type": ReportRecipientType.SLACK, "recipient_config_json": {"target": ""}, } ], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_valid_email_with_cc_bcc(self): """ ReportSchedule API: Test update with valid email fields """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = { "recipients": [ { "type": ReportRecipientType.EMAIL, "recipient_config_json": { "target": "valid@example.com", "ccTarget": "cc@example.com", "bccTarget": "bcc@example.com", }, } ], } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 200 @pytest.mark.usefixtures("create_working_shared_report_schedule") def test_update_report_schedule_state_working(self): """ ReportSchedule Api: Test update state in a working report """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name_shared_working") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = {"active": False} uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 200 report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name_shared_working") .one_or_none() ) assert report_schedule.last_state == ReportState.NOOP @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_uniqueness(self): """ ReportSchedule Api: Test update report schedule uniqueness """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(ADMIN_USERNAME) report_schedule_data = {"name": "name3", "description": "changed_description"} uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 422 assert data == {"message": {"name": ['An alert named "name3" already exists']}} @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_not_found(self): """ ReportSchedule Api: Test update report schedule not found """ max_id = db.session.query(func.max(ReportSchedule.id)).scalar() self.login(ADMIN_USERNAME) report_schedule_data = {"name": "changed"} uri = f"api/v1/report/{max_id + 1}" rv = self.client.put(uri, json=report_schedule_data) assert rv.status_code == 404 @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_update_report_schedule_chart_dash_validation(self): """ ReportSchedule Api: Test update report schedule chart and dashboard validation """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) # Test we can submit a chart or a dashboard not both chart = db.session.query(Slice).first() dashboard = db.session.query(Dashboard).first() example_db = get_example_database() report_schedule_data = { "chart": chart.id, "dashboard": dashboard.id, "database": example_db.id, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"chart": "Choose a chart or dashboard not both"}} @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_database_not_allowed_on_report(self): """ ReportSchedule API: Test update report schedule rejects database on Report type """ self.login(ADMIN_USERNAME) example_db = get_example_database() # Create a Report-type schedule (name1 is an Alert, so create one) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) # Change to Report type first (clearing database) uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric( uri, {"type": ReportScheduleType.REPORT, "database": None}, "put", ) assert rv.status_code == 200 # Test 1: Report + database (no type in payload) → 422 rv = self.put_assert_metric(uri, {"database": example_db.id}, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": {"database": "Database reference is not allowed on a report"} } # Test 2: Report + database + explicit type=Report → 422 rv = self.put_assert_metric( uri, {"type": ReportScheduleType.REPORT, "database": example_db.id}, "put", ) assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": {"database": "Database reference is not allowed on a report"} } @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_nonexistent_database_returns_not_allowed(self): """ ReportSchedule API: Test Report + nonexistent DB returns 'not allowed', not 'does not exist' — type invariant takes precedence. """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) uri = f"api/v1/report/{report_schedule.id}" # Transition to Report type first rv = self.put_assert_metric( uri, {"type": ReportScheduleType.REPORT, "database": None}, "put", ) assert rv.status_code == 200 # Report + nonexistent DB → 422 "not allowed" (not "does not exist") database_max_id = db.session.query(func.max(Database.id)).scalar() rv = self.put_assert_metric(uri, {"database": database_max_id + 1}, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": {"database": "Database reference is not allowed on a report"} } @pytest.mark.usefixtures("create_report_schedules") def test_update_alert_schedule_database_allowed(self): """ ReportSchedule API: Test update alert schedule accepts database """ self.login(ADMIN_USERNAME) example_db = get_example_database() report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT # Test 3: Alert + database (no type in payload) → 200 uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, {"database": example_db.id}, "put") assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_type_transitions(self): """ ReportSchedule API: Test type transitions with database validation """ self.login(ADMIN_USERNAME) example_db = get_example_database() report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name3") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT assert report_schedule.database_id is not None uri = f"api/v1/report/{report_schedule.id}" # Test 4: Alert + database update (same type) → 200 rv = self.put_assert_metric( uri, {"database": example_db.id}, "put", ) assert rv.status_code == 200 # Test 5: Alert → Report + database → 422 rv = self.put_assert_metric( uri, { "type": ReportScheduleType.REPORT, "database": example_db.id, }, "put", ) assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": {"database": "Database reference is not allowed on a report"} } # Test 6: Alert → Report without clearing database → 422 rv = self.put_assert_metric(uri, {"type": ReportScheduleType.REPORT}, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": {"database": "Database reference is not allowed on a report"} } # Test 7: Alert → Report with database: null (explicit clear) → 200 rv = self.put_assert_metric( uri, {"type": ReportScheduleType.REPORT, "database": None}, "put", ) assert rv.status_code == 200 # Now schedule is a Report with no database. # Test 8: Report → Alert without providing database → 422 rv = self.put_assert_metric( uri, {"type": ReportScheduleType.ALERT}, "put", ) assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"database": "Database is required for alerts"}} # Test 9: Report → Alert with database → 200 (valid transition) rv = self.put_assert_metric( uri, {"type": ReportScheduleType.ALERT, "database": example_db.id}, "put", ) assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") def test_update_alert_schedule_database_null_rejected(self): """ ReportSchedule API: Test alert schedule rejects null database """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT uri = f"api/v1/report/{report_schedule.id}" # Test 8: Alert + database: null → 422 rv = self.put_assert_metric(uri, {"database": None}, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"database": "Database is required for alerts"}} @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_422_does_not_mutate(self): """ ReportSchedule API: Test that a rejected PUT does not mutate the model """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT original_type = report_schedule.type original_database_id = report_schedule.database_id assert original_database_id is not None uri = f"api/v1/report/{report_schedule.id}" # Alert→Report without clearing database → 422 rv = self.put_assert_metric(uri, {"type": ReportScheduleType.REPORT}, "put") assert rv.status_code == 422 # Re-query and verify no mutation db.session.expire(report_schedule) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.id == report_schedule.id) .one_or_none() ) assert report_schedule.type == original_type assert report_schedule.database_id == original_database_id @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_create_report_schedule_database_not_allowed(self): """ ReportSchedule API: Test POST rejects database on Report type at schema level """ self.login(ADMIN_USERNAME) chart = db.session.query(Slice).first() example_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "report_with_db", "description": "should fail", "crontab": "0 9 * * *", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "chart": chart.id, "database": example_db.id, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 400 data = json.loads(rv.data.decode("utf-8")) assert "database" in data.get("message", {}) @pytest.mark.usefixtures("create_report_schedules") def test_update_report_to_alert_nonexistent_database(self): """ ReportSchedule API: Test Report→Alert with nonexistent database returns 422 """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name4") .one_or_none() ) assert report_schedule.type == ReportScheduleType.ALERT uri = f"api/v1/report/{report_schedule.id}" # First transition to Report (clearing database) rv = self.put_assert_metric( uri, {"type": ReportScheduleType.REPORT, "database": None}, "put", ) assert rv.status_code == 200 # Now transition back to Alert with nonexistent database database_max_id = db.session.query(func.max(Database.id)).scalar() rv = self.put_assert_metric( uri, { "type": ReportScheduleType.ALERT, "database": database_max_id + 1, }, "put", ) assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"database": "Database does not exist"}} @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_update_report_schedule_relations_exist(self): """ ReportSchedule Api: Test update report schedule relations exist relations (chart, dash, db) exist """ self.login(ADMIN_USERNAME) report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) # Test chart and database do not exist chart_max_id = db.session.query(func.max(Slice.id)).scalar() database_max_id = db.session.query(func.max(Database.id)).scalar() examples_db = get_example_database() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "crontab": "0 9 * * *", "chart": chart_max_id + 1, "database": database_max_id + 1, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == { "message": { "chart": "Chart does not exist", "database": "Database does not exist", } } # Test dashboard does not exist dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar() report_schedule_data = { "type": ReportScheduleType.ALERT, "name": "new3", "description": "description", "crontab": "0 9 * * *", "dashboard": dashboard_max_id + 1, "database": examples_db.id, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert data == {"message": {"dashboard": "Dashboard does not exist"}} @pytest.mark.usefixtures("create_report_schedules") @pytest.mark.usefixtures("create_alpha_users") def test_update_report_not_owned(self): """ ReportSchedule API: Test update report not owned """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(username="alpha2", password="password") # noqa: S106 report_schedule_data = { "active": False, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 403 @pytest.mark.usefixtures("create_report_schedules") def test_update_report_preserve_ownership(self): """ ReportSchedule API: Test update report preserves owner list (if un-changed) """ self.login(username="admin") existing_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) current_owners = existing_report.owners report_schedule_data = { "description": "Updated description", } uri = f"api/v1/report/{existing_report.id}" self.put_assert_metric(uri, report_schedule_data, "put") # noqa: F841 updated_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) assert set(updated_report.owners) == set(current_owners) @pytest.mark.usefixtures("create_report_schedules") def test_update_report_clear_owner_list(self): """ ReportSchedule API: Test update report admin can clear ownership config """ self.login(username="admin") existing_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) report_schedule_data = { "owners": [], } uri = f"api/v1/report/{existing_report.id}" self.put_assert_metric(uri, report_schedule_data, "put") # noqa: F841 updated_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) assert updated_report.owners == [] @pytest.mark.usefixtures("create_report_schedules") def test_update_report_populate_owner(self): """ ReportSchedule API: Test update admin can update report with no owners to a different owner """ gamma = self.get_user("gamma") self.login(username="admin") # Modify an existing report to make remove all owners existing_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) report_update_data = { "owners": [], } uri = f"api/v1/report/{existing_report.id}" self.put_assert_metric(uri, report_update_data, "put") updated_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) assert updated_report.owners == [] # Populate the field report_update_data = { "owners": [gamma.id], } uri = f"api/v1/report/{updated_report.id}" self.put_assert_metric(uri, report_update_data, "put") # noqa: F841 updated_report = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) assert updated_report.owners == [gamma] @pytest.mark.usefixtures("create_report_schedules") def test_delete_report_schedule(self): """ ReportSchedule Api: Test update report schedule """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name1") .one_or_none() ) self.login(ADMIN_USERNAME) uri = f"api/v1/report/{report_schedule.id}" rv = self.delete_assert_metric(uri, "delete") assert rv.status_code == 200 deleted_report_schedule = db.session.query(ReportSchedule).get( report_schedule.id ) assert deleted_report_schedule is None deleted_recipients = ( db.session.query(ReportRecipients) .filter(ReportRecipients.report_schedule_id == report_schedule.id) .all() ) assert deleted_recipients == [] deleted_logs = ( db.session.query(ReportExecutionLog) .filter(ReportExecutionLog.report_schedule_id == report_schedule.id) .all() ) assert deleted_logs == [] @pytest.mark.usefixtures("create_report_schedules") def test_delete_report_schedule_not_found(self): """ ReportSchedule Api: Test delete report schedule not found """ max_id = db.session.query(func.max(ReportSchedule.id)).scalar() self.login(ADMIN_USERNAME) uri = f"api/v1/report/{max_id + 1}" rv = self.delete_assert_metric(uri, "delete") assert rv.status_code == 404 @pytest.mark.usefixtures("create_report_schedules") @pytest.mark.usefixtures("create_alpha_users") def test_delete_report_not_owned(self): """ ReportSchedule API: Test delete try not owned """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) self.login(username="alpha2", password="password") # noqa: S106 uri = f"api/v1/report/{report_schedule.id}" rv = self.delete_assert_metric(uri, "delete") assert rv.status_code == 403 @pytest.mark.usefixtures("create_report_schedules") def test_bulk_delete_report_schedule(self): """ ReportSchedule Api: Test bulk delete report schedules """ query_report_schedules = db.session.query(ReportSchedule) report_schedules = query_report_schedules.all() report_schedules_ids = [ report_schedule.id for report_schedule in report_schedules ] self.login(ADMIN_USERNAME) uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" rv = self.delete_assert_metric(uri, "bulk_delete") assert rv.status_code == 200 deleted_report_schedules = query_report_schedules.all() assert deleted_report_schedules == [] response = json.loads(rv.data.decode("utf-8")) expected_response = { "message": f"Deleted {len(report_schedules_ids)} report schedules" } assert response == expected_response @pytest.mark.usefixtures("create_report_schedules") def test_bulk_delete_report_schedule_not_found(self): """ ReportSchedule Api: Test bulk delete report schedule not found """ report_schedules = db.session.query(ReportSchedule).all() report_schedules_ids = [ report_schedule.id for report_schedule in report_schedules ] max_id = db.session.query(func.max(ReportSchedule.id)).scalar() report_schedules_ids.append(max_id + 1) self.login(ADMIN_USERNAME) uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" rv = self.delete_assert_metric(uri, "bulk_delete") assert rv.status_code == 404 @pytest.mark.usefixtures("create_report_schedules") @pytest.mark.usefixtures("create_alpha_users") def test_bulk_delete_report_not_owned(self): """ ReportSchedule API: Test bulk delete try not owned """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) report_schedules_ids = [report_schedule.id] self.login(username="alpha2", password="password") # noqa: S106 uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" rv = self.delete_assert_metric(uri, "bulk_delete") assert rv.status_code == 403 @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_logs(self): """ ReportSchedule Api: Test get list report schedules logs """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name3") .one_or_none() ) self.login(ADMIN_USERNAME) uri = f"api/v1/report/{report_schedule.id}/log/" rv = self.client.get(uri) assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == 3 @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_logs_sorting(self): """ ReportSchedule Api: Test get list report schedules logs """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name3") .one_or_none() ) self.login(ADMIN_USERNAME) uri = f"api/v1/report/{report_schedule.id}/log/" order_columns = [ "state", "value", "error_message", "end_dttm", "start_dttm", "scheduled_dttm", ] for order_column in order_columns: arguments = {"order_column": order_column, "order_direction": "asc"} uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") if rv.status_code == 400: raise Exception(json.loads(rv.data.decode("utf-8"))) assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_logs_filters(self): """ ReportSchedule Api: Test get list report schedules log filters """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name3") .one_or_none() ) self.login(ADMIN_USERNAME) arguments = { "columns": ["name"], "filters": [{"col": "state", "opr": "eq", "value": ReportState.SUCCESS}], } uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == 0 @pytest.mark.usefixtures("create_report_schedules") def test_report_schedule_logs_no_mutations(self): """ ReportSchedule Api: Test assert there's no way to alter logs """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name3") .one_or_none() ) data = {"state": ReportState.ERROR, "error_message": "New error changed"} self.login(ADMIN_USERNAME) uri = f"api/v1/report/{report_schedule.id}/log/" rv = self.client.post(uri, json=data) assert rv.status_code == 405 uri = f"api/v1/report/{report_schedule.id}/log/{report_schedule.logs[0].id}" rv = self.client.put(uri, json=data) assert rv.status_code == 405 rv = self.client.delete(uri) assert rv.status_code == 405 @with_feature_flags(ALERT_REPORT_TABS=True) @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_schedules" ) def test_create_report_schedule_with_invalid_anchors(self): """ ReportSchedule Api: Test get report schedule 404s when feature is disabled """ report_schedule = db.session.query(Dashboard).first() get_example_database() # noqa: F841 anchors = ["TAB-AsMaxdYL_t", "TAB-YT6eNksV-", "TAB-l_9I0aNYZ"] report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "random_name1", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": report_schedule.id, "extra": {"dashboard": {"anchor": json.dumps(anchors)}}, } self.login(ADMIN_USERNAME) uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 422 assert "message" in data assert "extra" in data["message"] assert all(anchor in data["message"]["extra"][0] for anchor in anchors) is True @with_feature_flags(ALERT_REPORT_TABS=True) @pytest.mark.usefixtures("load_mutltiple_tabs_dashboard", "create_report_schedules") def test_create_report_schedule_with_multiple_anchors(self): """ ReportSchedule Api: Test report schedule with all tabs """ report_dashboard = ( db.session.query(Dashboard) .filter(Dashboard.slug == "multi_tabs_test") .first() ) get_example_database() # noqa: F841 self.login(ADMIN_USERNAME) tabs_uri = f"/api/v1/dashboard/{report_dashboard.id}/tabs" rv = self.client.get(tabs_uri) data = json.loads(rv.data.decode("utf-8")) tabs_keys = list(data.get("result").get("all_tabs").keys()) extra_json = {"dashboard": {"anchor": json.dumps(tabs_keys)}} report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "random_name2", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": report_dashboard.id, "extra": extra_json, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 201 report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.dashboard_id == report_dashboard.id) .first() ) assert json.loads(report_schedule.extra_json) == extra_json @pytest.mark.usefixtures("create_report_schedules") def test_create_report_schedule_with_garbage_native_filters(self): """ ReportSchedule API: POST with nativeFilters containing garbage data returns 422 """ dashboard = db.session.query(Dashboard).first() self.login(ADMIN_USERNAME) report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "garbage_native_filters_test", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": dashboard.id, "extra": {"dashboard": {"nativeFilters": [{"garbage": True}]}}, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert "message" in data assert "extra" in data["message"] @pytest.mark.usefixtures("create_report_schedules") def test_create_report_schedule_with_missing_native_filter_keys(self): """ ReportSchedule API: POST with nativeFilters missing required keys returns 422 """ dashboard = db.session.query(Dashboard).first() self.login(ADMIN_USERNAME) report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "missing_keys_native_filters_test", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": dashboard.id, "extra": { "dashboard": { "nativeFilters": [{"nativeFilterId": "NATIVE_FILTER-abc"}] } }, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert "message" in data assert "extra" in data["message"] @pytest.mark.usefixtures("create_report_schedules") def test_create_report_schedule_with_nonexistent_native_filter_id(self): """ ReportSchedule API: POST with nativeFilterId not on dashboard returns 422 """ dashboard = db.session.query(Dashboard).first() self.login(ADMIN_USERNAME) report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "nonexistent_filter_id_test", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": dashboard.id, "extra": { "dashboard": { "nativeFilters": [ { "nativeFilterId": "NATIVE_FILTER-does-not-exist", "filterType": "filter_select", "columnName": "col", "filterValues": ["a"], } ] } }, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert "message" in data assert "extra" in data["message"] def test_create_report_schedule_with_valid_native_filter_empty_values(self): """ ReportSchedule API: POST with valid nativeFilterId and empty filterValues returns 201 """ # Create a dashboard with a native filter in json_metadata filter_id = "NATIVE_FILTER-valid123" dashboard = Dashboard() dashboard.dashboard_title = "dash_with_native_filter" dashboard.slug = "dash_with_native_filter" dashboard.json_metadata = json.dumps( {"native_filter_configuration": [{"id": filter_id, "name": "Test Filter"}]} ) db.session.add(dashboard) db.session.commit() self.login(ADMIN_USERNAME) report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "valid_native_filter_empty_values", "description": "description", "creation_method": ReportCreationMethod.ALERTS_REPORTS, "crontab": "0 9 * * *", "working_timeout": 3600, "dashboard": dashboard.id, "extra": { "dashboard": { "nativeFilters": [ { "nativeFilterId": filter_id, "filterType": "filter_select", "columnName": "col", "filterValues": [], } ] } }, } uri = "api/v1/report/" rv = self.post_assert_metric(uri, report_schedule_data, "post") assert rv.status_code == 201 created_id = json.loads(rv.data.decode("utf-8")).get("id") created_model = db.session.query(ReportSchedule).get(created_id) db.session.delete(created_model) db.session.delete(dashboard) db.session.commit() @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_with_garbage_native_filters(self): """ ReportSchedule API: PUT with nativeFilters containing garbage data returns 422 """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) dashboard = db.session.query(Dashboard).first() self.login(ADMIN_USERNAME) report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name2", "crontab": "0 10 * * *", "dashboard": dashboard.id, "extra": {"dashboard": {"nativeFilters": [{"garbage": True}]}}, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert "message" in data assert "extra" in data["message"] @pytest.mark.usefixtures("create_report_schedules") def test_update_report_schedule_with_stale_native_filter_id(self): """ ReportSchedule API: PUT with nativeFilterId no longer on dashboard returns 422 """ report_schedule = ( db.session.query(ReportSchedule) .filter(ReportSchedule.name == "name2") .one_or_none() ) # Dashboard with no native filters configured dashboard = db.session.query(Dashboard).first() self.login(ADMIN_USERNAME) report_schedule_data = { "type": ReportScheduleType.REPORT, "name": "name2", "crontab": "0 10 * * *", "dashboard": dashboard.id, "extra": { "dashboard": { "nativeFilters": [ { "nativeFilterId": "NATIVE_FILTER-stale", "filterType": "filter_select", "columnName": "col", "filterValues": ["val"], } ] } }, } uri = f"api/v1/report/{report_schedule.id}" rv = self.put_assert_metric(uri, report_schedule_data, "put") assert rv.status_code == 422 data = json.loads(rv.data.decode("utf-8")) assert "message" in data assert "extra" in data["message"] @patch("superset.commands.dashboard.update.send_email_smtp") def test_dashboard_update_deletes_native_filter_deactivates_reports( self, mock_send_email: Any ): """ Dashboard API: removing a native filter deactivates referencing reports and emails each owner """ filter_id = "NATIVE_FILTER-todelete" # Create dashboard with that filter dashboard = Dashboard() dashboard.dashboard_title = "dash_filter_delete" dashboard.slug = "dash_filter_delete" dashboard.json_metadata = json.dumps( {"native_filter_configuration": [{"id": filter_id, "name": "To Delete"}]} ) db.session.add(dashboard) db.session.flush() admin = self.get_user("admin") # Create report referencing that filter report = insert_report_schedule( type=ReportScheduleType.REPORT, name="report_with_filter", crontab="0 9 * * *", owners=[admin], dashboard=dashboard, extra={ "dashboard": { "nativeFilters": [ { "nativeFilterId": filter_id, "filterType": "filter_select", "columnName": "col", "filterValues": [], } ] } }, ) db.session.commit() self.login(ADMIN_USERNAME) # Update dashboard removing the native filter uri = f"api/v1/dashboard/{dashboard.id}" rv = self.put_assert_metric( uri, {"json_metadata": json.dumps({"native_filter_configuration": []})}, "put", ) assert rv.status_code == 200 db.session.refresh(report) assert report.active is False assert mock_send_email.called db.session.delete(report) db.session.delete(dashboard) db.session.commit() @patch("superset.commands.dashboard.update.send_email_smtp") def test_dashboard_update_unrelated_filter_removal_no_side_effects( self, mock_send_email: Any ): """ Dashboard API: removing a filter not referenced by any report has no side effects """ filter_id = "NATIVE_FILTER-unreferenced" dashboard = Dashboard() dashboard.dashboard_title = "dash_no_reports" dashboard.slug = "dash_no_reports" dashboard.json_metadata = json.dumps( {"native_filter_configuration": [{"id": filter_id, "name": "Unused"}]} ) db.session.add(dashboard) db.session.commit() self.login(ADMIN_USERNAME) uri = f"api/v1/dashboard/{dashboard.id}" rv = self.put_assert_metric( uri, {"json_metadata": json.dumps({"native_filter_configuration": []})}, "put", ) assert rv.status_code == 200 assert not mock_send_email.called db.session.delete(dashboard) db.session.commit() @patch("superset.commands.dashboard.update.send_email_smtp") def test_dashboard_update_deleted_filter_multiple_reports_notifies_all_owners( self, mock_send_email: Any ): """ Dashboard API: removing a filter referenced by multiple reports deactivates all of them and emails each owner once per report """ filter_id = "NATIVE_FILTER-shared" dashboard = Dashboard() dashboard.dashboard_title = "dash_shared_filter" dashboard.slug = "dash_shared_filter" dashboard.json_metadata = json.dumps( {"native_filter_configuration": [{"id": filter_id, "name": "Shared"}]} ) db.session.add(dashboard) db.session.flush() admin = self.get_user("admin") native_filter_extra = { "dashboard": { "nativeFilters": [ { "nativeFilterId": filter_id, "filterType": "filter_select", "columnName": "col", "filterValues": [], } ] } } report_a = insert_report_schedule( type=ReportScheduleType.REPORT, name="report_shared_filter_a", crontab="0 9 * * *", owners=[admin], dashboard=dashboard, extra=native_filter_extra, ) report_b = insert_report_schedule( type=ReportScheduleType.REPORT, name="report_shared_filter_b", crontab="0 10 * * *", owners=[admin], dashboard=dashboard, extra=native_filter_extra, ) db.session.commit() self.login(ADMIN_USERNAME) uri = f"api/v1/dashboard/{dashboard.id}" rv = self.put_assert_metric( uri, {"json_metadata": json.dumps({"native_filter_configuration": []})}, "put", ) assert rv.status_code == 200 db.session.refresh(report_a) db.session.refresh(report_b) assert report_a.active is False assert report_b.active is False # One email call per report (admin owns both) assert mock_send_email.call_count == 2 db.session.delete(report_a) db.session.delete(report_b) db.session.delete(dashboard) db.session.commit()