diff --git a/superset/reports/commands/alert.py b/superset/reports/commands/alert.py index ccf6d436cd..600f849224 100644 --- a/superset/reports/commands/alert.py +++ b/superset/reports/commands/alert.py @@ -36,6 +36,9 @@ from superset.reports.commands.exceptions import ( logger = logging.getLogger(__name__) +ALERT_SQL_LIMIT = 2 +# All sql statements have an applied LIMIT, +# to avoid heavy loads done by a user mistake OPERATOR_FUNCTIONS = {">=": ge, ">": gt, "<=": le, "<": lt, "==": eq, "!=": ne} @@ -117,7 +120,10 @@ class AlertCommand(BaseCommand): ) rendered_sql = sql_template.process_template(self._report_schedule.sql) try: - df = self._report_schedule.database.get_df(rendered_sql) + limited_rendered_sql = self._report_schedule.database.apply_limit_to_sql( + rendered_sql, ALERT_SQL_LIMIT + ) + df = self._report_schedule.database.get_df(limited_rendered_sql) except Exception as ex: raise AlertQueryError(message=str(ex)) diff --git a/tests/reports/commands_tests.py b/tests/reports/commands_tests.py index c61cf21fca..3fefaf1909 100644 --- a/tests/reports/commands_tests.py +++ b/tests/reports/commands_tests.py @@ -17,7 +17,7 @@ import json from datetime import datetime, timedelta from typing import List, Optional -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest from contextlib2 import contextmanager @@ -49,10 +49,17 @@ from superset.reports.commands.exceptions import ( ) from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand from superset.utils.core import get_example_database +from tests.fixtures.world_bank_dashboard import ( + load_world_bank_dashboard_with_slices_module_scope, +) from tests.reports.utils import insert_report_schedule from tests.test_app import app from tests.utils import read_fixture +pytestmark = pytest.mark.usefixtures( + "load_world_bank_dashboard_with_slices_module_scope" +) + def get_target_from_report_schedule(report_schedule) -> List[str]: return [ @@ -129,6 +136,22 @@ def cleanup_report_schedule(report_schedule: ReportSchedule) -> None: db.session.commit() +@contextmanager +def create_test_table_context(database: Database): + database.get_sqla_engine().execute( + "CREATE TABLE test_table AS SELECT 1 as first, 2 as second" + ) + database.get_sqla_engine().execute( + "INSERT INTO test_table (first, second) VALUES (1, 2)" + ) + database.get_sqla_engine().execute( + "INSERT INTO test_table (first, second) VALUES (3, 4)" + ) + + yield db.session + database.get_sqla_engine().execute("DROP TABLE test_table") + + @pytest.yield_fixture() def create_report_email_chart(): with app.app_context(): @@ -233,7 +256,7 @@ def create_alert_slack_chart_grace(): @pytest.yield_fixture( - params=["alert1", "alert2", "alert3", "alert4", "alert5", "alert6", "alert7"] + params=["alert1", "alert2", "alert3", "alert4", "alert5", "alert6", "alert7",] ) def create_alert_email_chart(request): param_config = { @@ -276,35 +299,22 @@ def create_alert_email_chart(request): with app.app_context(): chart = db.session.query(Slice).first() example_database = get_example_database() + with create_test_table_context(example_database): - report_schedule = create_report_notification( - email_target="target@email.com", - chart=chart, - report_type=ReportScheduleType.ALERT, - database=example_database, - sql=param_config[request.param]["sql"], - validator_type=param_config[request.param]["validator_type"], - validator_config_json=param_config[request.param]["validator_config_json"], - ) - yield report_schedule + report_schedule = create_report_notification( + email_target="target@email.com", + chart=chart, + report_type=ReportScheduleType.ALERT, + database=example_database, + sql=param_config[request.param]["sql"], + validator_type=param_config[request.param]["validator_type"], + validator_config_json=param_config[request.param][ + "validator_config_json" + ], + ) + yield report_schedule - cleanup_report_schedule(report_schedule) - - -@contextmanager -def create_test_table_context(database: Database): - database.get_sqla_engine().execute( - "CREATE TABLE test_table AS SELECT 1 as first, 2 as second" - ) - database.get_sqla_engine().execute( - "INSERT INTO test_table (first, second) VALUES (1, 2)" - ) - database.get_sqla_engine().execute( - "INSERT INTO test_table (first, second) VALUES (3, 4)" - ) - - yield db.session - database.get_sqla_engine().execute("DROP TABLE test_table") + cleanup_report_schedule(report_schedule) @pytest.yield_fixture( @@ -373,12 +383,12 @@ def create_no_alert_email_chart(request): def create_mul_alert_email_chart(request): param_config = { "alert1": { - "sql": "SELECT first from test_table", + "sql": "SELECT first, second from test_table", "validator_type": ReportScheduleValidatorType.OPERATOR, "validator_config_json": '{"op": "<", "threshold": 10}', }, "alert2": { - "sql": "SELECT first, second from test_table", + "sql": "SELECT first from test_table", "validator_type": ReportScheduleValidatorType.OPERATOR, "validator_config_json": '{"op": "<", "threshold": 10}', }, @@ -626,6 +636,28 @@ def test_report_schedule_success_grace_end(create_alert_slack_chart_grace): assert create_alert_slack_chart_grace.last_state == ReportState.NOOP +@pytest.mark.usefixtures("create_alert_email_chart") +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.compute_and_cache") +def test_alert_limit_is_applied(screenshot_mock, email_mock, create_alert_email_chart): + """ + ExecuteReport Command: Test that all alerts apply a SQL limit to stmts + """ + + with patch.object( + create_alert_email_chart.database.db_engine_spec, "execute", return_value=None + ) as execute_mock: + with patch.object( + create_alert_email_chart.database.db_engine_spec, + "fetch_data", + return_value=None, + ) as fetch_data_mock: + AsyncExecuteReportScheduleCommand( + create_alert_email_chart.id, datetime.utcnow() + ).run() + assert "LIMIT 2" in execute_mock.call_args[0][1] + + @pytest.mark.usefixtures("create_report_email_dashboard") @patch("superset.reports.notifications.email.send_email_smtp") @patch("superset.utils.screenshots.DashboardScreenshot.compute_and_cache")