2020-11-25 03:50:30 -05:00
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
2021-07-21 19:53:45 -04:00
from contextlib import contextmanager
2020-12-09 13:19:07 -05:00
from datetime import datetime , timedelta
2022-01-11 13:48:50 -05:00
from typing import Any , Dict , List , Optional
2021-03-16 12:06:29 -04:00
from unittest . mock import Mock , patch
2021-03-30 11:46:16 -04:00
from uuid import uuid4
2020-11-25 03:50:30 -05:00
import pytest
2021-02-24 16:31:31 -05:00
from flask_sqlalchemy import BaseQuery
2020-11-25 03:50:30 -05:00
from freezegun import freeze_time
from sqlalchemy . sql import func
2021-04-12 11:51:32 -04:00
from superset import db , security_manager
2020-11-25 03:50:30 -05:00
from superset . models . core import Database
from superset . models . dashboard import Dashboard
from superset . models . reports import (
2021-04-15 17:07:49 -04:00
ReportDataFormat ,
2020-11-25 03:50:30 -05:00
ReportExecutionLog ,
ReportRecipients ,
ReportRecipientType ,
ReportSchedule ,
ReportScheduleType ,
ReportScheduleValidatorType ,
2020-12-09 13:19:07 -05:00
ReportState ,
2020-11-25 03:50:30 -05:00
)
from superset . models . slice import Slice
from superset . reports . commands . exceptions import (
2020-12-21 14:07:30 -05:00
AlertQueryError ,
AlertQueryInvalidTypeError ,
2020-11-25 03:50:30 -05:00
AlertQueryMultipleColumnsError ,
AlertQueryMultipleRowsError ,
2021-04-15 17:07:49 -04:00
ReportScheduleCsvFailedError ,
ReportScheduleCsvTimeout ,
2020-11-25 03:50:30 -05:00
ReportScheduleNotFoundError ,
ReportScheduleNotificationError ,
ReportSchedulePreviousWorkingError ,
2021-03-08 09:21:18 -05:00
ReportScheduleScreenshotFailedError ,
ReportScheduleScreenshotTimeout ,
2020-12-09 13:19:07 -05:00
ReportScheduleWorkingTimeoutError ,
2020-11-25 03:50:30 -05:00
)
from superset . reports . commands . execute import AsyncExecuteReportScheduleCommand
2021-04-12 16:18:17 -04:00
from superset . reports . commands . log_prune import AsyncPruneReportScheduleLogCommand
2022-01-16 01:32:50 -05:00
from superset . utils . database import get_example_database
2021-07-01 11:03:07 -04:00
from tests . integration_tests . fixtures . birth_names_dashboard import (
load_birth_names_dashboard_with_slices ,
2021-12-16 19:11:47 -05:00
load_birth_names_data ,
2021-07-01 11:03:07 -04:00
)
2022-01-11 13:48:50 -05:00
from tests . integration_tests . fixtures . tabbed_dashboard import tabbed_dashboard
2021-07-01 11:03:07 -04:00
from tests . integration_tests . fixtures . world_bank_dashboard import (
2021-02-17 13:03:35 -05:00
load_world_bank_dashboard_with_slices_module_scope ,
2021-12-16 19:11:47 -05:00
load_world_bank_data ,
2021-02-17 13:03:35 -05:00
)
2021-07-01 11:03:07 -04:00
from tests . integration_tests . reports . utils import insert_report_schedule
from tests . integration_tests . test_app import app
from tests . integration_tests . utils import read_fixture
2020-11-25 03:50:30 -05:00
2021-02-17 13:03:35 -05:00
pytestmark = pytest . mark . usefixtures (
" load_world_bank_dashboard_with_slices_module_scope "
)
2021-04-15 17:07:49 -04:00
TEST_ID = str ( uuid4 ( ) )
CSV_FILE = read_fixture ( " trends.csv " )
SCREENSHOT_FILE = read_fixture ( " sample.png " )
OWNER_EMAIL = " admin@fab.org "
2021-03-30 11:46:16 -04:00
2020-11-25 03:50:30 -05:00
2021-02-24 16:31:31 -05:00
def get_target_from_report_schedule ( report_schedule : ReportSchedule ) - > List [ str ] :
2020-11-25 03:50:30 -05:00
return [
json . loads ( recipient . recipient_config_json ) [ " target " ]
for recipient in report_schedule . recipients
]
2021-02-24 16:31:31 -05:00
def get_error_logs_query ( report_schedule : ReportSchedule ) - > BaseQuery :
return (
db . session . query ( ReportExecutionLog )
. filter (
ReportExecutionLog . report_schedule == report_schedule ,
ReportExecutionLog . state == ReportState . ERROR ,
)
. order_by ( ReportExecutionLog . end_dttm . desc ( ) )
)
def get_notification_error_sent_count ( report_schedule : ReportSchedule ) - > int :
logs = get_error_logs_query ( report_schedule ) . all ( )
notification_sent_logs = [
log . error_message
for log in logs
if log . error_message == " Notification sent with error "
]
return len ( notification_sent_logs )
2020-11-25 03:50:30 -05:00
def assert_log ( state : str , error_message : Optional [ str ] = None ) :
db . session . commit ( )
logs = db . session . query ( ReportExecutionLog ) . all ( )
2021-04-26 13:56:48 -04:00
2021-02-24 16:31:31 -05:00
if state == ReportState . ERROR :
2021-04-26 13:56:48 -04:00
# On error we send an email
2021-02-24 16:31:31 -05:00
assert len ( logs ) == 3
else :
assert len ( logs ) == 2
2020-12-09 13:19:07 -05:00
log_states = [ log . state for log in logs ]
assert ReportState . WORKING in log_states
assert state in log_states
assert error_message in [ log . error_message for log in logs ]
2020-11-25 03:50:30 -05:00
def create_report_notification (
email_target : Optional [ str ] = None ,
slack_channel : Optional [ str ] = None ,
chart : Optional [ Slice ] = None ,
dashboard : Optional [ Dashboard ] = None ,
database : Optional [ Database ] = None ,
sql : Optional [ str ] = None ,
report_type : Optional [ str ] = None ,
validator_type : Optional [ str ] = None ,
validator_config_json : Optional [ str ] = None ,
2021-02-24 16:31:31 -05:00
grace_period : Optional [ int ] = None ,
2021-04-15 17:07:49 -04:00
report_format : Optional [ ReportDataFormat ] = None ,
2021-07-27 17:02:27 -04:00
name : Optional [ str ] = None ,
2022-01-11 13:48:50 -05:00
extra : Optional [ Dict [ str , Any ] ] = None ,
2021-12-22 15:16:04 -05:00
force_screenshot : bool = False ,
2020-11-25 03:50:30 -05:00
) - > ReportSchedule :
report_type = report_type or ReportScheduleType . REPORT
target = email_target or slack_channel
config_json = { " target " : target }
2021-04-12 11:51:32 -04:00
owner = (
db . session . query ( security_manager . user_model )
2021-04-15 17:07:49 -04:00
. filter_by ( email = OWNER_EMAIL )
2021-04-12 11:51:32 -04:00
. one_or_none ( )
)
2020-11-25 03:50:30 -05:00
if slack_channel :
recipient = ReportRecipients (
type = ReportRecipientType . SLACK ,
recipient_config_json = json . dumps ( config_json ) ,
)
else :
recipient = ReportRecipients (
type = ReportRecipientType . EMAIL ,
recipient_config_json = json . dumps ( config_json ) ,
)
2021-07-27 17:02:27 -04:00
if name is None :
name = " report_with_csv " if report_format else " report "
2020-11-25 03:50:30 -05:00
report_schedule = insert_report_schedule (
type = report_type ,
2021-07-27 17:02:27 -04:00
name = name ,
crontab = " 0 9 * * * " ,
description = " Daily report " ,
2020-11-25 03:50:30 -05:00
sql = sql ,
chart = chart ,
dashboard = dashboard ,
database = database ,
recipients = [ recipient ] ,
2021-04-12 11:51:32 -04:00
owners = [ owner ] ,
2020-11-25 03:50:30 -05:00
validator_type = validator_type ,
validator_config_json = validator_config_json ,
2021-02-24 16:31:31 -05:00
grace_period = grace_period ,
2021-04-15 17:07:49 -04:00
report_format = report_format or ReportDataFormat . VISUALIZATION ,
2022-01-11 13:48:50 -05:00
extra = extra ,
2021-12-22 15:16:04 -05:00
force_screenshot = force_screenshot ,
2020-11-25 03:50:30 -05:00
)
return report_schedule
2020-12-09 13:19:07 -05:00
def cleanup_report_schedule ( report_schedule : ReportSchedule ) - > None :
db . session . query ( ReportExecutionLog ) . filter (
ReportExecutionLog . report_schedule == report_schedule
) . delete ( )
db . session . query ( ReportRecipients ) . filter (
ReportRecipients . report_schedule == report_schedule
) . delete ( )
db . session . delete ( report_schedule )
db . session . commit ( )
2021-02-17 13:03:35 -05:00
@contextmanager
def create_test_table_context ( database : Database ) :
database . get_sqla_engine ( ) . execute (
" CREATE TABLE test_table AS SELECT 1 as first, 2 as second "
)
database . get_sqla_engine ( ) . execute (
" INSERT INTO test_table (first, second) VALUES (1, 2) "
)
database . get_sqla_engine ( ) . execute (
" INSERT INTO test_table (first, second) VALUES (3, 4) "
)
yield db . session
database . get_sqla_engine ( ) . execute ( " DROP TABLE test_table " )
2021-04-12 16:18:17 -04:00
@pytest.fixture ( )
2020-11-25 03:50:30 -05:00
def create_report_email_chart ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
report_schedule = create_report_notification (
email_target = " target@email.com " , chart = chart
)
yield report_schedule
2020-12-09 13:19:07 -05:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2021-12-22 15:16:04 -05:00
@pytest.fixture ( )
def create_report_email_chart_force_screenshot ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
report_schedule = create_report_notification (
email_target = " target@email.com " , chart = chart , force_screenshot = True
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-04-15 17:07:49 -04:00
@pytest.fixture ( )
def create_report_email_chart_with_csv ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
2021-07-27 17:02:27 -04:00
chart . query_context = ' { " mock " : " query_context " } '
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_format = ReportDataFormat . DATA ,
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-07-28 12:43:04 -04:00
@pytest.fixture ( )
def create_report_email_chart_with_text ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
chart . query_context = ' { " mock " : " query_context " } '
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_format = ReportDataFormat . TEXT ,
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-07-27 17:02:27 -04:00
@pytest.fixture ( )
def create_report_email_chart_with_csv_no_query_context ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
chart . query_context = None
2021-04-15 17:07:49 -04:00
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_format = ReportDataFormat . DATA ,
2021-07-27 17:02:27 -04:00
name = " report_csv_no_query_context " ,
2021-04-15 17:07:49 -04:00
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-04-12 16:18:17 -04:00
@pytest.fixture ( )
2020-11-25 03:50:30 -05:00
def create_report_email_dashboard ( ) :
with app . app_context ( ) :
dashboard = db . session . query ( Dashboard ) . first ( )
report_schedule = create_report_notification (
email_target = " target@email.com " , dashboard = dashboard
)
yield report_schedule
2020-12-09 13:19:07 -05:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2022-03-04 15:30:40 -05:00
@pytest.fixture ( )
def create_report_email_dashboard_force_screenshot ( ) :
with app . app_context ( ) :
dashboard = db . session . query ( Dashboard ) . first ( )
report_schedule = create_report_notification (
email_target = " target@email.com " , dashboard = dashboard , force_screenshot = True
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2022-01-11 13:48:50 -05:00
@pytest.fixture ( )
def create_report_email_tabbed_dashboard ( tabbed_dashboard ) :
with app . app_context ( ) :
report_schedule = create_report_notification (
email_target = " target@email.com " ,
dashboard = tabbed_dashboard ,
2022-03-29 13:03:09 -04:00
extra = {
" dashboard_tab_ids " : [
" TAB-j53G4gtKGF " ,
" TAB-nerWR09Ju " ,
]
} ,
2022-01-11 13:48:50 -05:00
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-04-12 16:18:17 -04:00
@pytest.fixture ( )
2020-11-25 03:50:30 -05:00
def create_report_slack_chart ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
report_schedule = create_report_notification (
slack_channel = " slack_channel " , chart = chart
)
yield report_schedule
2020-12-09 13:19:07 -05:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2021-04-15 17:07:49 -04:00
@pytest.fixture ( )
def create_report_slack_chart_with_csv ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
2021-07-27 17:02:27 -04:00
chart . query_context = ' { " mock " : " query_context " } '
2021-04-15 17:07:49 -04:00
report_schedule = create_report_notification (
slack_channel = " slack_channel " ,
chart = chart ,
report_format = ReportDataFormat . DATA ,
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-07-29 13:13:28 -04:00
@pytest.fixture ( )
def create_report_slack_chart_with_text ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
chart . query_context = ' { " mock " : " query_context " } '
report_schedule = create_report_notification (
slack_channel = " slack_channel " ,
chart = chart ,
report_format = ReportDataFormat . TEXT ,
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-04-12 16:18:17 -04:00
@pytest.fixture ( )
2020-11-25 03:50:30 -05:00
def create_report_slack_chart_working ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
report_schedule = create_report_notification (
slack_channel = " slack_channel " , chart = chart
)
2020-12-09 13:19:07 -05:00
report_schedule . last_state = ReportState . WORKING
report_schedule . last_eval_dttm = datetime ( 2020 , 1 , 1 , 0 , 0 )
db . session . commit ( )
2021-04-08 06:23:31 -04:00
log = ReportExecutionLog (
scheduled_dttm = report_schedule . last_eval_dttm ,
start_dttm = report_schedule . last_eval_dttm ,
end_dttm = report_schedule . last_eval_dttm ,
state = ReportState . WORKING ,
report_schedule = report_schedule ,
uuid = uuid4 ( ) ,
)
db . session . add ( log )
db . session . commit ( )
2020-12-09 13:19:07 -05:00
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-04-12 16:18:17 -04:00
@pytest.fixture ( )
2020-12-09 13:19:07 -05:00
def create_alert_slack_chart_success ( ) :
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
report_schedule = create_report_notification (
slack_channel = " slack_channel " ,
chart = chart ,
report_type = ReportScheduleType . ALERT ,
)
report_schedule . last_state = ReportState . SUCCESS
report_schedule . last_eval_dttm = datetime ( 2020 , 1 , 1 , 0 , 0 )
log = ReportExecutionLog (
report_schedule = report_schedule ,
state = ReportState . SUCCESS ,
start_dttm = report_schedule . last_eval_dttm ,
end_dttm = report_schedule . last_eval_dttm ,
scheduled_dttm = report_schedule . last_eval_dttm ,
)
db . session . add ( log )
2020-11-25 03:50:30 -05:00
db . session . commit ( )
yield report_schedule
2020-12-09 13:19:07 -05:00
cleanup_report_schedule ( report_schedule )
2021-09-20 17:26:30 -04:00
@pytest.fixture (
2022-03-29 13:03:09 -04:00
params = [
" alert1 " ,
]
2021-09-20 17:26:30 -04:00
)
def create_alert_slack_chart_grace ( request ) :
param_config = {
" alert1 " : {
" sql " : " SELECT count(*) from test_table " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
}
2020-12-09 13:19:07 -05:00
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
2021-09-20 17:26:30 -04:00
example_database = get_example_database ( )
with create_test_table_context ( example_database ) :
report_schedule = create_report_notification (
slack_channel = " slack_channel " ,
chart = chart ,
report_type = ReportScheduleType . ALERT ,
database = example_database ,
sql = param_config [ request . param ] [ " sql " ] ,
validator_type = param_config [ request . param ] [ " validator_type " ] ,
validator_config_json = param_config [ request . param ] [
" validator_config_json "
] ,
)
report_schedule . last_state = ReportState . GRACE
report_schedule . last_eval_dttm = datetime ( 2020 , 1 , 1 , 0 , 0 )
log = ReportExecutionLog (
report_schedule = report_schedule ,
state = ReportState . SUCCESS ,
start_dttm = report_schedule . last_eval_dttm ,
end_dttm = report_schedule . last_eval_dttm ,
scheduled_dttm = report_schedule . last_eval_dttm ,
)
db . session . add ( log )
db . session . commit ( )
yield report_schedule
2020-12-09 13:19:07 -05:00
2021-09-20 17:26:30 -04:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2021-04-15 17:07:49 -04:00
@pytest.fixture (
2021-12-17 01:51:23 -05:00
params = [
" alert1 " ,
" alert2 " ,
" alert3 " ,
" alert4 " ,
" alert5 " ,
" alert6 " ,
" alert7 " ,
" alert8 " ,
]
2020-11-25 03:50:30 -05:00
)
def create_alert_email_chart ( request ) :
param_config = {
" alert1 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " > " , " threshold " : 9} ' ,
} ,
" alert2 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " >= " , " threshold " : 10} ' ,
} ,
" alert3 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 11} ' ,
} ,
" alert4 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " <= " , " threshold " : 10} ' ,
} ,
" alert5 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " != " , " threshold " : 11} ' ,
} ,
" alert6 " : {
" sql " : " SELECT ' something ' as metric " ,
" validator_type " : ReportScheduleValidatorType . NOT_NULL ,
" validator_config_json " : " {} " ,
} ,
" alert7 " : {
" sql " : " SELECT {{ 5 + 5 }} as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " != " , " threshold " : 11} ' ,
} ,
2021-12-17 01:51:23 -05:00
" alert8 " : {
" sql " : " SELECT 55 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " > " , " threshold " : 54.999} ' ,
} ,
2020-11-25 03:50:30 -05:00
}
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
example_database = get_example_database ( )
2021-02-17 13:03:35 -05:00
with create_test_table_context ( example_database ) :
2020-11-25 03:50:30 -05:00
2021-02-17 13:03:35 -05:00
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_type = ReportScheduleType . ALERT ,
database = example_database ,
sql = param_config [ request . param ] [ " sql " ] ,
validator_type = param_config [ request . param ] [ " validator_type " ] ,
validator_config_json = param_config [ request . param ] [
" validator_config_json "
] ,
2021-12-22 15:16:04 -05:00
force_screenshot = True ,
2021-02-17 13:03:35 -05:00
)
yield report_schedule
2020-11-25 03:50:30 -05:00
2021-02-17 13:03:35 -05:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2021-04-15 17:07:49 -04:00
@pytest.fixture (
2021-02-23 04:50:55 -05:00
params = [
" alert1 " ,
" alert2 " ,
" alert3 " ,
" alert4 " ,
" alert5 " ,
" alert6 " ,
" alert7 " ,
" alert8 " ,
" alert9 " ,
]
2020-11-25 03:50:30 -05:00
)
def create_no_alert_email_chart ( request ) :
param_config = {
" alert1 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
" alert2 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " >= " , " threshold " : 11} ' ,
} ,
" alert3 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
" alert4 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " <= " , " threshold " : 9} ' ,
} ,
" alert5 " : {
" sql " : " SELECT 10 as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " != " , " threshold " : 10} ' ,
} ,
" alert6 " : {
2021-02-15 12:09:47 -05:00
" sql " : " SELECT first from test_table where 1=0 " ,
2020-11-25 03:50:30 -05:00
" validator_type " : ReportScheduleValidatorType . NOT_NULL ,
" validator_config_json " : " {} " ,
} ,
2021-02-15 12:09:47 -05:00
" alert7 " : {
" sql " : " SELECT first from test_table where 1=0 " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " > " , " threshold " : 0} ' ,
} ,
2021-02-23 04:50:55 -05:00
" alert8 " : {
" sql " : " SELECT Null as metric " ,
" validator_type " : ReportScheduleValidatorType . NOT_NULL ,
" validator_config_json " : " {} " ,
} ,
" alert9 " : {
" sql " : " SELECT Null as metric " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " > " , " threshold " : 0} ' ,
} ,
2020-11-25 03:50:30 -05:00
}
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
example_database = get_example_database ( )
with create_test_table_context ( example_database ) :
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_type = ReportScheduleType . ALERT ,
database = example_database ,
sql = param_config [ request . param ] [ " sql " ] ,
validator_type = param_config [ request . param ] [ " validator_type " ] ,
validator_config_json = param_config [ request . param ] [
" validator_config_json "
] ,
)
yield report_schedule
2020-12-09 13:19:07 -05:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2021-04-15 17:07:49 -04:00
@pytest.fixture ( params = [ " alert1 " , " alert2 " ] )
2020-11-25 03:50:30 -05:00
def create_mul_alert_email_chart ( request ) :
param_config = {
" alert1 " : {
2021-02-17 13:03:35 -05:00
" sql " : " SELECT first, second from test_table " ,
2020-11-25 03:50:30 -05:00
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
" alert2 " : {
2021-02-17 13:03:35 -05:00
" sql " : " SELECT first from test_table " ,
2020-11-25 03:50:30 -05:00
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
}
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
example_database = get_example_database ( )
with create_test_table_context ( example_database ) :
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_type = ReportScheduleType . ALERT ,
database = example_database ,
sql = param_config [ request . param ] [ " sql " ] ,
validator_type = param_config [ request . param ] [ " validator_type " ] ,
validator_config_json = param_config [ request . param ] [
" validator_config_json "
] ,
)
yield report_schedule
2020-12-09 13:19:07 -05:00
cleanup_report_schedule ( report_schedule )
2020-11-25 03:50:30 -05:00
2021-04-15 17:07:49 -04:00
@pytest.fixture ( params = [ " alert1 " , " alert2 " ] )
2020-12-21 14:07:30 -05:00
def create_invalid_sql_alert_email_chart ( request ) :
param_config = {
" alert1 " : {
" sql " : " SELECT ' string ' " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
" alert2 " : {
" sql " : " SELECT first from foo_table " ,
" validator_type " : ReportScheduleValidatorType . OPERATOR ,
" validator_config_json " : ' { " op " : " < " , " threshold " : 10} ' ,
} ,
}
with app . app_context ( ) :
chart = db . session . query ( Slice ) . first ( )
example_database = get_example_database ( )
with create_test_table_context ( example_database ) :
report_schedule = create_report_notification (
email_target = " target@email.com " ,
chart = chart ,
report_type = ReportScheduleType . ALERT ,
database = example_database ,
sql = param_config [ request . param ] [ " sql " ] ,
validator_type = param_config [ request . param ] [ " validator_type " ] ,
validator_config_json = param_config [ request . param ] [
" validator_config_json "
] ,
2021-02-24 16:31:31 -05:00
grace_period = 60 * 60 ,
2020-12-21 14:07:30 -05:00
)
yield report_schedule
cleanup_report_schedule ( report_schedule )
2021-02-22 17:53:14 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart "
)
2020-11-25 03:50:30 -05:00
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2020-11-25 03:50:30 -05:00
def test_email_chart_report_schedule (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_report_email_chart ,
2020-11-25 03:50:30 -05:00
) :
"""
2021-04-22 12:42:52 -04:00
ExecuteReport Command : Test chart email report schedule with screenshot
2020-11-25 03:50:30 -05:00
"""
# setup screenshot mock
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_email_chart . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_chart
)
2020-12-23 06:31:08 -05:00
# assert that the link sent is correct
assert (
2021-12-22 11:26:38 -05:00
' <a href= " http://0.0.0.0:8080/superset/explore/? '
2022-01-26 13:04:26 -05:00
" form_data= % 7B %22s lice_id %22% 3A % 20 "
2021-12-22 11:26:38 -05:00
f " { create_report_email_chart . chart . id } %7D& "
2022-01-26 13:04:26 -05:00
' standalone=0&force=false " >Explore in Superset</a> '
2021-12-22 11:26:38 -05:00
in email_mock . call_args [ 0 ] [ 2 ]
)
# Assert the email smtp address
2021-12-22 15:16:04 -05:00
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email inline screenshot
smtp_images = email_mock . call_args [ 1 ] [ " images " ]
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == SCREENSHOT_FILE
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " ,
" create_report_email_chart_force_screenshot " ,
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
def test_email_chart_report_schedule_force_screenshot (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_report_email_chart_force_screenshot ,
2021-12-22 15:16:04 -05:00
) :
"""
ExecuteReport Command : Test chart email report schedule with screenshot
In this test ` ` force_screenshot ` ` is true , and the screenshot URL should
reflect that .
"""
# setup screenshot mock
screenshot_mock . return_value = SCREENSHOT_FILE
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart_force_screenshot . id , datetime . utcnow ( )
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_chart_force_screenshot
)
# assert that the link sent is correct
assert (
' <a href= " http://0.0.0.0:8080/superset/explore/? '
2022-01-26 13:04:26 -05:00
" form_data= % 7B %22s lice_id %22% 3A % 20 "
2021-12-22 15:16:04 -05:00
f " { create_report_email_chart_force_screenshot . chart . id } %7D& "
2022-01-26 13:04:26 -05:00
' standalone=0&force=true " >Explore in Superset</a> '
2021-12-22 15:16:04 -05:00
in email_mock . call_args [ 0 ] [ 2 ]
)
# Assert the email smtp address
2021-12-22 11:26:38 -05:00
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email inline screenshot
smtp_images = email_mock . call_args [ 1 ] [ " images " ]
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == SCREENSHOT_FILE
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_alert_email_chart "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
def test_email_chart_alert_schedule (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_alert_email_chart ,
2021-12-22 11:26:38 -05:00
) :
"""
ExecuteReport Command : Test chart email alert schedule with screenshot
"""
# setup screenshot mock
screenshot_mock . return_value = SCREENSHOT_FILE
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
) . run ( )
notification_targets = get_target_from_report_schedule ( create_alert_email_chart )
# assert that the link sent is correct
assert (
' <a href= " http://0.0.0.0:8080/superset/explore/? '
2022-01-26 13:04:26 -05:00
" form_data= % 7B %22s lice_id %22% 3A % 20 "
2021-12-22 11:26:38 -05:00
f " { create_alert_email_chart . chart . id } %7D& "
2022-01-26 13:04:26 -05:00
' standalone=0&force=true " >Explore in Superset</a> '
2020-12-23 06:31:08 -05:00
in email_mock . call_args [ 0 ] [ 2 ]
)
2020-11-25 03:50:30 -05:00
# Assert the email smtp address
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email inline screenshot
smtp_images = email_mock . call_args [ 1 ] [ " images " ]
2021-04-15 17:07:49 -04:00
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == SCREENSHOT_FILE
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
2021-04-22 12:42:52 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
def test_email_chart_report_dry_run (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_report_email_chart ,
2021-04-22 12:42:52 -04:00
) :
"""
ExecuteReport Command : Test chart email report schedule dry run
"""
# setup screenshot mock
screenshot_mock . return_value = SCREENSHOT_FILE
app . config [ " ALERT_REPORTS_NOTIFICATION_DRY_RUN " ] = True
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart . id , datetime . utcnow ( )
) . run ( )
email_mock . assert_not_called ( )
app . config [ " ALERT_REPORTS_NOTIFICATION_DRY_RUN " ] = False
2021-04-15 17:07:49 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart_with_csv "
)
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.csv.get_chart_csv_data " )
def test_email_chart_report_schedule_with_csv (
2022-03-29 13:03:09 -04:00
csv_mock ,
email_mock ,
mock_open ,
mock_urlopen ,
create_report_email_chart_with_csv ,
2021-04-15 17:07:49 -04:00
) :
"""
ExecuteReport Command : Test chart email report schedule with CSV
"""
# setup csv mock
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 200
response . read . return_value = CSV_FILE
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart_with_csv . id , datetime . utcnow ( )
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_chart_with_csv
)
# assert that the link sent is correct
assert (
2021-12-22 11:26:38 -05:00
' <a href= " http://0.0.0.0:8080/superset/explore/? '
2022-01-26 13:04:26 -05:00
" form_data= % 7B %22s lice_id %22% 3A % 20 "
2021-12-22 11:26:38 -05:00
f " { create_report_email_chart_with_csv . chart . id } %7D& "
2022-01-26 13:04:26 -05:00
' standalone=0&force=false " >Explore in Superset</a> '
2021-04-15 17:07:49 -04:00
in email_mock . call_args [ 0 ] [ 2 ]
)
# Assert the email smtp address
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email csv file
smtp_images = email_mock . call_args [ 1 ] [ " data " ]
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == CSV_FILE
2020-11-25 03:50:30 -05:00
# Assert logs are correct
2020-12-09 13:19:07 -05:00
assert_log ( ReportState . SUCCESS )
2020-11-25 03:50:30 -05:00
2021-07-27 17:02:27 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " ,
" create_report_email_chart_with_csv_no_query_context " ,
)
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.csv.get_chart_csv_data " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
def test_email_chart_report_schedule_with_csv_no_query_context (
screenshot_mock ,
csv_mock ,
email_mock ,
mock_open ,
mock_urlopen ,
create_report_email_chart_with_csv_no_query_context ,
) :
"""
ExecuteReport Command : Test chart email report schedule with CSV ( no query context )
"""
# setup screenshot mock
screenshot_mock . return_value = SCREENSHOT_FILE
# setup csv mock
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 200
response . read . return_value = CSV_FILE
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID ,
create_report_email_chart_with_csv_no_query_context . id ,
datetime . utcnow ( ) ,
) . run ( )
# verify that when query context is null we request a screenshot
screenshot_mock . assert_called_once ( )
2021-07-28 12:43:04 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart_with_text "
)
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-08-18 20:00:29 -04:00
@patch ( " superset.utils.csv.get_chart_dataframe " )
2021-07-28 12:43:04 -04:00
def test_email_chart_report_schedule_with_text (
2021-08-18 20:00:29 -04:00
dataframe_mock ,
email_mock ,
mock_open ,
mock_urlopen ,
create_report_email_chart_with_text ,
2021-07-28 12:43:04 -04:00
) :
"""
2021-07-29 13:13:28 -04:00
ExecuteReport Command : Test chart email report schedule with text
2021-07-28 12:43:04 -04:00
"""
2021-08-18 20:00:29 -04:00
# setup dataframe mock
2021-07-28 12:43:04 -04:00
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 200
2021-08-18 20:00:29 -04:00
response . read . return_value = json . dumps (
{
" result " : [
{
" data " : {
" t1 " : { 0 : " c11 " , 1 : " c21 " } ,
" t2 " : { 0 : " c12 " , 1 : " c22 " } ,
" t3__sum " : { 0 : " c13 " , 1 : " c23 " } ,
} ,
" colnames " : [ ( " t1 " , ) , ( " t2 " , ) , ( " t3__sum " , ) ] ,
" indexnames " : [ ( 0 , ) , ( 1 , ) ] ,
} ,
] ,
}
) . encode ( " utf-8 " )
2021-07-28 12:43:04 -04:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart_with_text . id , datetime . utcnow ( )
) . run ( )
# assert that the data is embedded correctly
2021-08-18 20:00:29 -04:00
table_html = """ <table border= " 1 " class= " dataframe " >
2021-07-28 12:43:04 -04:00
< thead >
< tr >
2021-08-18 20:00:29 -04:00
< th > < / th >
2021-07-28 12:43:04 -04:00
< th > t1 < / th >
< th > t2 < / th >
< th > t3__sum < / th >
< / tr >
< / thead >
< tbody >
< tr >
2021-08-18 20:00:29 -04:00
< th > 0 < / th >
2021-07-28 12:43:04 -04:00
< td > c11 < / td >
< td > c12 < / td >
< td > c13 < / td >
< / tr >
< tr >
2021-08-18 20:00:29 -04:00
< th > 1 < / th >
2021-07-28 12:43:04 -04:00
< td > c21 < / td >
< td > c22 < / td >
< td > c23 < / td >
< / tr >
< / tbody >
< / table > """
assert table_html in email_mock . call_args [ 0 ] [ 2 ]
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
2021-02-22 17:53:14 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_dashboard "
)
2020-11-25 03:50:30 -05:00
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.DashboardScreenshot.get_screenshot " )
2020-11-25 03:50:30 -05:00
def test_email_dashboard_report_schedule (
screenshot_mock , email_mock , create_report_email_dashboard
) :
"""
ExecuteReport Command : Test dashboard email report schedule
"""
# setup screenshot mock
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_email_dashboard . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_dashboard
)
# Assert the email smtp address
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email inline screenshot
smtp_images = email_mock . call_args [ 1 ] [ " images " ]
2021-04-15 17:07:49 -04:00
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
# Assert logs are correct
2020-12-09 13:19:07 -05:00
assert_log ( ReportState . SUCCESS )
2020-11-25 03:50:30 -05:00
2022-03-04 15:30:40 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " ,
" create_report_email_dashboard_force_screenshot " ,
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.screenshots.DashboardScreenshot.get_screenshot " )
def test_email_dashboard_report_schedule_force_screenshot (
screenshot_mock , email_mock , create_report_email_dashboard_force_screenshot
) :
"""
ExecuteReport Command : Test dashboard email report schedule
"""
# setup screenshot mock
screenshot_mock . return_value = SCREENSHOT_FILE
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID ,
create_report_email_dashboard_force_screenshot . id ,
datetime . utcnow ( ) ,
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_dashboard_force_screenshot
)
# Assert the email smtp address
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email inline screenshot
smtp_images = email_mock . call_args [ 1 ] [ " images " ]
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == SCREENSHOT_FILE
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
2021-02-22 17:53:14 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_slack_chart "
)
2020-11-25 03:50:30 -05:00
@patch ( " superset.reports.notifications.slack.WebClient.files_upload " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2020-11-25 03:50:30 -05:00
def test_slack_chart_report_schedule (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
file_upload_mock ,
create_report_slack_chart ,
2020-11-25 03:50:30 -05:00
) :
"""
ExecuteReport Command : Test chart slack report schedule
"""
# setup screenshot mock
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_slack_chart . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_slack_chart
)
2022-02-17 15:48:57 -05:00
2020-11-25 03:50:30 -05:00
assert file_upload_mock . call_args [ 1 ] [ " channels " ] == notification_targets [ 0 ]
2021-04-15 17:07:49 -04:00
assert file_upload_mock . call_args [ 1 ] [ " file " ] == SCREENSHOT_FILE
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_slack_chart_with_csv "
)
@patch ( " superset.reports.notifications.slack.WebClient.files_upload " )
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.utils.csv.get_chart_csv_data " )
def test_slack_chart_report_schedule_with_csv (
csv_mock ,
mock_open ,
mock_urlopen ,
file_upload_mock ,
create_report_slack_chart_with_csv ,
) :
"""
ExecuteReport Command : Test chart slack report schedule with CSV
"""
# setup csv mock
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 200
response . read . return_value = CSV_FILE
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_slack_chart_with_csv . id , datetime . utcnow ( )
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_slack_chart_with_csv
)
assert file_upload_mock . call_args [ 1 ] [ " channels " ] == notification_targets [ 0 ]
assert file_upload_mock . call_args [ 1 ] [ " file " ] == CSV_FILE
2020-11-25 03:50:30 -05:00
# Assert logs are correct
2020-12-09 13:19:07 -05:00
assert_log ( ReportState . SUCCESS )
2020-11-25 03:50:30 -05:00
2021-07-29 13:13:28 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_slack_chart_with_text "
)
@patch ( " superset.reports.notifications.slack.WebClient.chat_postMessage " )
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
2021-08-18 20:00:29 -04:00
@patch ( " superset.utils.csv.get_chart_dataframe " )
2021-07-29 13:13:28 -04:00
def test_slack_chart_report_schedule_with_text (
2021-08-18 20:00:29 -04:00
dataframe_mock ,
2021-07-29 13:13:28 -04:00
mock_open ,
mock_urlopen ,
post_message_mock ,
create_report_slack_chart_with_text ,
) :
"""
ExecuteReport Command : Test chart slack report schedule with text
"""
2021-08-18 20:00:29 -04:00
# setup dataframe mock
2021-07-29 13:13:28 -04:00
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 200
2021-08-18 20:00:29 -04:00
response . read . return_value = json . dumps (
{
" result " : [
{
" data " : {
" t1 " : { 0 : " c11 " , 1 : " c21 " } ,
" t2 " : { 0 : " c12 " , 1 : " c22 " } ,
" t3__sum " : { 0 : " c13 " , 1 : " c23 " } ,
} ,
" colnames " : [ ( " t1 " , ) , ( " t2 " , ) , ( " t3__sum " , ) ] ,
" indexnames " : [ ( 0 , ) , ( 1 , ) ] ,
} ,
] ,
}
) . encode ( " utf-8 " )
2021-07-29 13:13:28 -04:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_slack_chart_with_text . id , datetime . utcnow ( )
) . run ( )
2021-08-18 20:00:29 -04:00
table_markdown = """ | | t1 | t2 | t3__sum |
| - - - : | : - - - - - | : - - - - - | : - - - - - - - - - - |
| 0 | c11 | c12 | c13 |
| 1 | c21 | c22 | c23 | """
2021-07-29 13:13:28 -04:00
assert table_markdown in post_message_mock . call_args [ 1 ] [ " text " ]
2022-02-17 15:48:57 -05:00
assert (
f " <http://0.0.0.0:8080/superset/explore/?form_data=%7B%22slice_id%22%3A%20 { create_report_slack_chart_with_text . chart . id } %7D&standalone=0&force=false|Explore in Superset> "
in post_message_mock . call_args [ 1 ] [ " text " ]
)
2021-07-29 13:13:28 -04:00
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
2020-11-25 03:50:30 -05:00
@pytest.mark.usefixtures ( " create_report_slack_chart " )
def test_report_schedule_not_found ( create_report_slack_chart ) :
"""
ExecuteReport Command : Test report schedule not found
"""
max_id = db . session . query ( func . max ( ReportSchedule . id ) ) . scalar ( )
with pytest . raises ( ReportScheduleNotFoundError ) :
2021-04-15 17:07:49 -04:00
AsyncExecuteReportScheduleCommand ( TEST_ID , max_id + 1 , datetime . utcnow ( ) ) . run ( )
2020-11-25 03:50:30 -05:00
@pytest.mark.usefixtures ( " create_report_slack_chart_working " )
def test_report_schedule_working ( create_report_slack_chart_working ) :
"""
ExecuteReport Command : Test report schedule still working
"""
# setup screenshot mock
2020-12-09 13:19:07 -05:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
with pytest . raises ( ReportSchedulePreviousWorkingError ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_slack_chart_working . id , datetime . utcnow ( )
2020-12-09 13:19:07 -05:00
) . run ( )
assert_log (
ReportState . WORKING ,
error_message = ReportSchedulePreviousWorkingError . message ,
)
assert create_report_slack_chart_working . last_state == ReportState . WORKING
@pytest.mark.usefixtures ( " create_report_slack_chart_working " )
def test_report_schedule_working_timeout ( create_report_slack_chart_working ) :
"""
ExecuteReport Command : Test report schedule still working but should timed out
"""
current_time = create_report_slack_chart_working . last_eval_dttm + timedelta (
seconds = create_report_slack_chart_working . working_timeout + 1
)
with freeze_time ( current_time ) :
2021-04-08 06:23:31 -04:00
2020-12-09 13:19:07 -05:00
with pytest . raises ( ReportScheduleWorkingTimeoutError ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_slack_chart_working . id , datetime . utcnow ( )
2020-12-09 13:19:07 -05:00
) . run ( )
# Only needed for MySQL, understand why
db . session . commit ( )
logs = db . session . query ( ReportExecutionLog ) . all ( )
2021-04-08 06:23:31 -04:00
# Two logs, first is created by fixture
assert len ( logs ) == 2
2021-05-10 08:27:12 -04:00
assert ReportScheduleWorkingTimeoutError . message in [
log . error_message for log in logs
]
2020-12-09 13:19:07 -05:00
assert create_report_slack_chart_working . last_state == ReportState . ERROR
@pytest.mark.usefixtures ( " create_alert_slack_chart_success " )
def test_report_schedule_success_grace ( create_alert_slack_chart_success ) :
"""
ExecuteReport Command : Test report schedule on success to grace
"""
# set current time to within the grace period
current_time = create_alert_slack_chart_success . last_eval_dttm + timedelta (
seconds = create_alert_slack_chart_success . grace_period - 10
)
with freeze_time ( current_time ) :
2020-11-25 03:50:30 -05:00
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_slack_chart_success . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
2020-12-09 13:19:07 -05:00
db . session . commit ( )
assert create_alert_slack_chart_success . last_state == ReportState . GRACE
@pytest.mark.usefixtures ( " create_alert_slack_chart_grace " )
2021-09-20 17:26:30 -04:00
@patch ( " superset.reports.notifications.slack.WebClient.files_upload " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
def test_report_schedule_success_grace_end (
screenshot_mock , file_upload_mock , create_alert_slack_chart_grace
) :
2020-12-09 13:19:07 -05:00
"""
ExecuteReport Command : Test report schedule on grace to noop
"""
2021-09-20 17:26:30 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
# set current time to after the grace period
2020-12-09 13:19:07 -05:00
current_time = create_alert_slack_chart_grace . last_eval_dttm + timedelta (
seconds = create_alert_slack_chart_grace . grace_period + 1
2020-11-25 03:50:30 -05:00
)
2020-12-09 13:19:07 -05:00
with freeze_time ( current_time ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_slack_chart_grace . id , datetime . utcnow ( )
2020-12-09 13:19:07 -05:00
) . run ( )
db . session . commit ( )
2021-09-20 17:26:30 -04:00
assert create_alert_slack_chart_grace . last_state == ReportState . SUCCESS
2020-11-25 03:50:30 -05:00
2021-02-17 13:03:35 -05:00
@pytest.mark.usefixtures ( " create_alert_email_chart " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2021-04-15 17:07:49 -04:00
def test_alert_limit_is_applied (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_alert_email_chart ,
2021-04-15 17:07:49 -04:00
) :
2021-02-17 13:03:35 -05:00
"""
ExecuteReport Command : Test that all alerts apply a SQL limit to stmts
"""
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2021-02-17 13:03:35 -05:00
with patch . object (
create_alert_email_chart . database . db_engine_spec , " execute " , return_value = None
) as execute_mock :
with patch . object (
create_alert_email_chart . database . db_engine_spec ,
" fetch_data " ,
return_value = None ,
) as fetch_data_mock :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
2021-02-17 13:03:35 -05:00
) . run ( )
assert " LIMIT 2 " in execute_mock . call_args [ 0 ] [ 1 ]
2021-02-22 17:53:14 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_dashboard "
)
2020-11-25 03:50:30 -05:00
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.DashboardScreenshot.get_screenshot " )
2020-11-25 03:50:30 -05:00
def test_email_dashboard_report_fails (
screenshot_mock , email_mock , create_report_email_dashboard
) :
"""
ExecuteReport Command : Test dashboard email report schedule notification fails
"""
# setup screenshot mock
from smtplib import SMTPException
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
email_mock . side_effect = SMTPException ( " Could not connect to SMTP XPTO " )
with pytest . raises ( ReportScheduleNotificationError ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_email_dashboard . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
2020-12-09 13:19:07 -05:00
assert_log ( ReportState . ERROR , error_message = " Could not connect to SMTP XPTO " )
2020-11-25 03:50:30 -05:00
2021-02-22 17:53:14 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_alert_email_chart "
)
2020-11-25 03:50:30 -05:00
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2021-04-01 16:06:45 -04:00
@patch.dict (
" superset.extensions.feature_flag_manager._feature_flags " ,
ALERTS_ATTACH_REPORTS = True ,
)
2021-04-15 17:07:49 -04:00
def test_slack_chart_alert (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_alert_email_chart ,
2021-04-15 17:07:49 -04:00
) :
2020-11-25 03:50:30 -05:00
"""
ExecuteReport Command : Test chart slack alert
"""
# setup screenshot mock
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
notification_targets = get_target_from_report_schedule ( create_alert_email_chart )
# Assert the email smtp address
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the email inline screenshot
smtp_images = email_mock . call_args [ 1 ] [ " images " ]
2021-04-15 17:07:49 -04:00
assert smtp_images [ list ( smtp_images . keys ( ) ) [ 0 ] ] == SCREENSHOT_FILE
2020-11-25 03:50:30 -05:00
# Assert logs are correct
2020-12-09 13:19:07 -05:00
assert_log ( ReportState . SUCCESS )
2020-11-25 03:50:30 -05:00
2021-04-01 16:06:45 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_alert_email_chart "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch.dict (
" superset.extensions.feature_flag_manager._feature_flags " ,
ALERTS_ATTACH_REPORTS = False ,
)
def test_slack_chart_alert_no_attachment ( email_mock , create_alert_email_chart ) :
"""
ExecuteReport Command : Test chart slack alert
"""
# setup screenshot mock
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
2021-04-01 16:06:45 -04:00
) . run ( )
notification_targets = get_target_from_report_schedule ( create_alert_email_chart )
# Assert the email smtp address
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the there is no attached image
2022-01-11 13:48:50 -05:00
assert email_mock . call_args [ 1 ] [ " images " ] == { }
2021-04-01 16:06:45 -04:00
# Assert logs are correct
assert_log ( ReportState . SUCCESS )
2021-03-16 12:06:29 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_slack_chart "
)
@patch ( " superset.reports.notifications.slack.WebClient " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
def test_slack_token_callable_chart_report (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
slack_client_mock_class ,
create_report_slack_chart ,
2021-03-16 12:06:29 -04:00
) :
"""
ExecuteReport Command : Test chart slack alert ( slack token callable )
"""
slack_client_mock_class . return_value = Mock ( )
app . config [ " SLACK_API_TOKEN " ] = Mock ( return_value = " cool_code " )
# setup screenshot mock
2021-04-15 17:07:49 -04:00
screenshot_mock . return_value = SCREENSHOT_FILE
2021-03-16 12:06:29 -04:00
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_slack_chart . id , datetime . utcnow ( )
2021-03-16 12:06:29 -04:00
) . run ( )
app . config [ " SLACK_API_TOKEN " ] . assert_called_once ( )
assert slack_client_mock_class . called_with ( token = " cool_code " , proxy = " " )
assert_log ( ReportState . SUCCESS )
2020-11-25 03:50:30 -05:00
@pytest.mark.usefixtures ( " create_no_alert_email_chart " )
def test_email_chart_no_alert ( create_no_alert_email_chart ) :
"""
ExecuteReport Command : Test chart email no alert
"""
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_no_alert_email_chart . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
2020-12-09 13:19:07 -05:00
assert_log ( ReportState . NOOP )
2020-11-25 03:50:30 -05:00
@pytest.mark.usefixtures ( " create_mul_alert_email_chart " )
def test_email_mul_alert ( create_mul_alert_email_chart ) :
"""
ExecuteReport Command : Test chart email multiple rows
"""
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
with pytest . raises (
( AlertQueryMultipleRowsError , AlertQueryMultipleColumnsError )
) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_mul_alert_email_chart . id , datetime . utcnow ( )
2020-11-25 03:50:30 -05:00
) . run ( )
2020-12-21 14:07:30 -05:00
2021-03-08 09:21:18 -05:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_alert_email_chart "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
def test_soft_timeout_alert ( email_mock , create_alert_email_chart ) :
"""
ExecuteReport Command : Test soft timeout on alert queries
"""
from celery . exceptions import SoftTimeLimitExceeded
2021-04-15 17:07:49 -04:00
2021-03-08 09:21:18 -05:00
from superset . reports . commands . exceptions import AlertQueryTimeout
with patch . object (
create_alert_email_chart . database . db_engine_spec , " execute " , return_value = None
) as execute_mock :
execute_mock . side_effect = SoftTimeLimitExceeded ( )
with pytest . raises ( AlertQueryTimeout ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
2021-03-08 09:21:18 -05:00
) . run ( )
notification_targets = get_target_from_report_schedule ( create_alert_email_chart )
# Assert the email smtp address, asserts a notification was sent with the error
2021-04-15 17:07:49 -04:00
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
2021-03-08 09:21:18 -05:00
assert_log (
ReportState . ERROR , error_message = " A timeout occurred while executing the query. "
)
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_alert_email_chart "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2021-04-01 16:06:45 -04:00
@patch.dict (
" superset.extensions.feature_flag_manager._feature_flags " ,
ALERTS_ATTACH_REPORTS = True ,
)
2021-03-08 09:21:18 -05:00
def test_soft_timeout_screenshot ( screenshot_mock , email_mock , create_alert_email_chart ) :
"""
ExecuteReport Command : Test soft timeout on screenshot
"""
from celery . exceptions import SoftTimeLimitExceeded
screenshot_mock . side_effect = SoftTimeLimitExceeded ( )
with pytest . raises ( ReportScheduleScreenshotTimeout ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
2021-03-08 09:21:18 -05:00
) . run ( )
# Assert the email smtp address, asserts a notification was sent with the error
2021-04-15 17:07:49 -04:00
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
2021-03-08 09:21:18 -05:00
assert_log (
ReportState . ERROR , error_message = " A timeout occurred while taking a screenshot. "
)
2021-04-15 17:07:49 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart_with_csv "
)
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.csv.get_chart_csv_data " )
def test_soft_timeout_csv (
2022-03-29 13:03:09 -04:00
csv_mock ,
email_mock ,
mock_open ,
mock_urlopen ,
create_report_email_chart_with_csv ,
2021-04-15 17:07:49 -04:00
) :
"""
ExecuteReport Command : Test fail on generating csv
"""
from celery . exceptions import SoftTimeLimitExceeded
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . side_effect = SoftTimeLimitExceeded ( )
with pytest . raises ( ReportScheduleCsvTimeout ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart_with_csv . id , datetime . utcnow ( )
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_chart_with_csv
)
# Assert the email smtp address, asserts a notification was sent with the error
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
assert_log (
2022-03-29 13:03:09 -04:00
ReportState . ERROR ,
error_message = " A timeout occurred while generating a csv. " ,
2021-04-15 17:07:49 -04:00
)
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart_with_csv "
)
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.csv.get_chart_csv_data " )
def test_generate_no_csv (
2022-03-29 13:03:09 -04:00
csv_mock ,
email_mock ,
mock_open ,
mock_urlopen ,
create_report_email_chart_with_csv ,
2021-04-15 17:07:49 -04:00
) :
"""
ExecuteReport Command : Test fail on generating csv
"""
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 200
response . read . return_value = None
with pytest . raises ( ReportScheduleCsvFailedError ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart_with_csv . id , datetime . utcnow ( )
) . run ( )
notification_targets = get_target_from_report_schedule (
create_report_email_chart_with_csv
)
# Assert the email smtp address, asserts a notification was sent with the error
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
assert_log (
ReportState . ERROR ,
error_message = " Report Schedule execution failed when generating a csv. " ,
)
2021-03-08 09:21:18 -05:00
@pytest.mark.usefixtures (
2021-04-01 16:06:45 -04:00
" load_birth_names_dashboard_with_slices " , " create_report_email_chart "
2021-03-08 09:21:18 -05:00
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2021-04-01 16:06:45 -04:00
def test_fail_screenshot ( screenshot_mock , email_mock , create_report_email_chart ) :
2021-03-08 09:21:18 -05:00
"""
ExecuteReport Command : Test soft timeout on screenshot
"""
from celery . exceptions import SoftTimeLimitExceeded
2021-04-15 17:07:49 -04:00
2021-03-08 09:21:18 -05:00
from superset . reports . commands . exceptions import AlertQueryTimeout
screenshot_mock . side_effect = Exception ( " Unexpected error " )
with pytest . raises ( ReportScheduleScreenshotFailedError ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_report_email_chart . id , datetime . utcnow ( )
2021-03-08 09:21:18 -05:00
) . run ( )
2021-04-01 16:06:45 -04:00
notification_targets = get_target_from_report_schedule ( create_report_email_chart )
2021-03-08 09:21:18 -05:00
# Assert the email smtp address, asserts a notification was sent with the error
2021-04-15 17:07:49 -04:00
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
2021-03-08 09:21:18 -05:00
assert_log (
ReportState . ERROR , error_message = " Failed taking a screenshot Unexpected error "
)
2021-04-15 17:07:49 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_chart_with_csv "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch ( " superset.utils.csv.urllib.request.urlopen " )
@patch ( " superset.utils.csv.urllib.request.OpenerDirector.open " )
@patch ( " superset.utils.csv.get_chart_csv_data " )
def test_fail_csv (
csv_mock , mock_open , mock_urlopen , email_mock , create_report_email_chart_with_csv
) :
"""
ExecuteReport Command : Test error on csv
"""
response = Mock ( )
mock_open . return_value = response
mock_urlopen . return_value = response
mock_urlopen . return_value . getcode . return_value = 500
with pytest . raises ( ReportScheduleCsvFailedError ) :
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_chart_with_csv . id , datetime . utcnow ( )
) . run ( )
2022-01-11 13:48:50 -05:00
get_target_from_report_schedule ( create_report_email_chart_with_csv )
2021-04-15 17:07:49 -04:00
# Assert the email smtp address, asserts a notification was sent with the error
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
assert_log (
ReportState . ERROR , error_message = " Failed generating csv <urlopen error 500> "
)
2021-04-01 16:06:45 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_alert_email_chart "
)
@patch ( " superset.reports.notifications.email.send_email_smtp " )
@patch.dict (
" superset.extensions.feature_flag_manager._feature_flags " ,
ALERTS_ATTACH_REPORTS = False ,
)
def test_email_disable_screenshot ( email_mock , create_alert_email_chart ) :
"""
ExecuteReport Command : Test soft timeout on screenshot
"""
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_alert_email_chart . id , datetime . utcnow ( )
2021-04-01 16:06:45 -04:00
) . run ( )
notification_targets = get_target_from_report_schedule ( create_alert_email_chart )
# Assert the email smtp address, asserts a notification was sent with the error
assert email_mock . call_args [ 0 ] [ 0 ] == notification_targets [ 0 ]
# Assert the there is no attached image
2022-01-11 13:48:50 -05:00
assert email_mock . call_args [ 1 ] [ " images " ] == { }
2021-04-01 16:06:45 -04:00
assert_log ( ReportState . SUCCESS )
2020-12-21 14:07:30 -05:00
@pytest.mark.usefixtures ( " create_invalid_sql_alert_email_chart " )
2021-02-24 16:31:31 -05:00
@patch ( " superset.reports.notifications.email.send_email_smtp " )
def test_invalid_sql_alert ( email_mock , create_invalid_sql_alert_email_chart ) :
2020-12-21 14:07:30 -05:00
"""
ExecuteReport Command : Test alert with invalid SQL statements
"""
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2020-12-21 14:07:30 -05:00
) . run ( )
2021-02-24 16:31:31 -05:00
notification_targets = get_target_from_report_schedule (
create_invalid_sql_alert_email_chart
)
# Assert the email smtp address, asserts a notification was sent with the error
2021-04-15 17:07:49 -04:00
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
2021-02-24 16:31:31 -05:00
@pytest.mark.usefixtures ( " create_invalid_sql_alert_email_chart " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
def test_grace_period_error ( email_mock , create_invalid_sql_alert_email_chart ) :
"""
ExecuteReport Command : Test alert grace period on error
"""
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
# Only needed for MySQL, understand why
db . session . commit ( )
notification_targets = get_target_from_report_schedule (
create_invalid_sql_alert_email_chart
)
# Assert the email smtp address, asserts a notification was sent with the error
2021-04-15 17:07:49 -04:00
assert email_mock . call_args [ 0 ] [ 0 ] == OWNER_EMAIL
2021-02-24 16:31:31 -05:00
assert (
get_notification_error_sent_count ( create_invalid_sql_alert_email_chart ) == 1
)
with freeze_time ( " 2020-01-01T00:30:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
db . session . commit ( )
assert (
get_notification_error_sent_count ( create_invalid_sql_alert_email_chart ) == 1
)
# Grace period ends, assert a notification was sent
with freeze_time ( " 2020-01-01T01:30:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
db . session . commit ( )
assert (
get_notification_error_sent_count ( create_invalid_sql_alert_email_chart ) == 2
)
@pytest.mark.usefixtures ( " create_invalid_sql_alert_email_chart " )
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2021-03-08 09:21:18 -05:00
@patch ( " superset.utils.screenshots.ChartScreenshot.get_screenshot " )
2021-02-24 16:31:31 -05:00
def test_grace_period_error_flap (
2022-03-29 13:03:09 -04:00
screenshot_mock ,
email_mock ,
create_invalid_sql_alert_email_chart ,
2021-02-24 16:31:31 -05:00
) :
"""
ExecuteReport Command : Test alert grace period on error
"""
with freeze_time ( " 2020-01-01T00:00:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
db . session . commit ( )
# Assert we have 1 notification sent on the log
assert (
get_notification_error_sent_count ( create_invalid_sql_alert_email_chart ) == 1
)
with freeze_time ( " 2020-01-01T00:30:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
db . session . commit ( )
assert (
get_notification_error_sent_count ( create_invalid_sql_alert_email_chart ) == 1
)
# Change report_schedule to valid
create_invalid_sql_alert_email_chart . sql = " SELECT 1 AS metric "
create_invalid_sql_alert_email_chart . grace_period = 0
db . session . merge ( create_invalid_sql_alert_email_chart )
db . session . commit ( )
with freeze_time ( " 2020-01-01T00:31:00Z " ) :
# One success
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
# Grace period ends
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
db . session . commit ( )
create_invalid_sql_alert_email_chart . sql = " SELECT ' first ' "
create_invalid_sql_alert_email_chart . grace_period = 10
db . session . merge ( create_invalid_sql_alert_email_chart )
db . session . commit ( )
# assert that after a success, when back to error we send the error notification
# again
with freeze_time ( " 2020-01-01T00:32:00Z " ) :
with pytest . raises ( ( AlertQueryError , AlertQueryInvalidTypeError ) ) :
AsyncExecuteReportScheduleCommand (
2021-04-15 17:07:49 -04:00
TEST_ID , create_invalid_sql_alert_email_chart . id , datetime . utcnow ( )
2021-02-24 16:31:31 -05:00
) . run ( )
db . session . commit ( )
assert (
get_notification_error_sent_count ( create_invalid_sql_alert_email_chart ) == 2
)
2021-04-12 16:18:17 -04:00
@pytest.mark.usefixtures (
" load_birth_names_dashboard_with_slices " , " create_report_email_dashboard "
)
@patch ( " superset.reports.dao.ReportScheduleDAO.bulk_delete_logs " )
def test_prune_log_soft_time_out ( bulk_delete_logs , create_report_email_dashboard ) :
from datetime import datetime , timedelta
2021-10-25 05:57:38 -04:00
from celery . exceptions import SoftTimeLimitExceeded
2021-04-12 16:18:17 -04:00
bulk_delete_logs . side_effect = SoftTimeLimitExceeded ( )
with pytest . raises ( SoftTimeLimitExceeded ) as excinfo :
AsyncPruneReportScheduleLogCommand ( ) . run ( )
assert str ( excinfo . value ) == " SoftTimeLimitExceeded() "
2022-01-11 13:48:50 -05:00
2022-03-29 13:03:09 -04:00
@pytest.mark.usefixtures (
" create_report_email_tabbed_dashboard " ,
)
2022-01-11 13:48:50 -05:00
@patch ( " superset.reports.notifications.email.send_email_smtp " )
2022-03-29 13:03:09 -04:00
@patch (
" superset.reports.commands.execute.DashboardScreenshot " ,
)
2022-01-11 13:48:50 -05:00
def test_when_tabs_are_selected_it_takes_screenshots_for_every_tabs (
dashboard_screenshot_mock ,
send_email_smtp_mock ,
create_report_email_tabbed_dashboard ,
) :
dashboard_screenshot_mock . get_screenshot . return_value = b " test-image "
dashboard = create_report_email_tabbed_dashboard . dashboard
AsyncExecuteReportScheduleCommand (
TEST_ID , create_report_email_tabbed_dashboard . id , datetime . utcnow ( )
) . run ( )
tabs = json . loads ( create_report_email_tabbed_dashboard . extra ) [ " dashboard_tab_ids " ]
assert dashboard_screenshot_mock . call_count == 2
for index , tab in enumerate ( tabs ) :
assert dashboard_screenshot_mock . call_args_list [ index ] . args == (
2022-03-04 15:30:40 -05:00
f " http://0.0.0.0:8080/superset/dashboard/ { dashboard . id } /?standalone=3&force=false# { tab } " ,
2022-01-11 13:48:50 -05:00
f " { dashboard . digest } " ,
)
assert send_email_smtp_mock . called is True
assert len ( send_email_smtp_mock . call_args . kwargs [ " images " ] ) == 2