Skip to content

Commit

Permalink
update execution logs and states for alerts (apache#16736)
Browse files Browse the repository at this point in the history
  • Loading branch information
eschutho authored Sep 20, 2021
1 parent bce6ac1 commit fc98eac
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 34 deletions.
30 changes: 20 additions & 10 deletions superset/reports/commands/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
)
from superset.reports.commands.alert import AlertCommand
from superset.reports.commands.exceptions import (
ReportScheduleAlertEndGracePeriodError,
ReportScheduleAlertGracePeriodError,
ReportScheduleCsvFailedError,
ReportScheduleCsvTimeout,
Expand Down Expand Up @@ -403,7 +402,7 @@ def send_error(self, name: str, message: str) -> None:

def is_in_grace_period(self) -> bool:
"""
Checks if an alert is on it's grace period
Checks if an alert is in it's grace period
"""
last_success = ReportScheduleDAO.find_last_success_log(
self._report_schedule, session=self._session
Expand All @@ -418,7 +417,7 @@ def is_in_grace_period(self) -> bool:

def is_in_error_grace_period(self) -> bool:
"""
Checks if an alert/report on error is on it's notification grace period
Checks if an alert/report on error is in it's notification grace period
"""
last_success = ReportScheduleDAO.find_last_error_notification(
self._report_schedule, session=self._session
Expand All @@ -435,7 +434,7 @@ def is_in_error_grace_period(self) -> bool:

def is_on_working_timeout(self) -> bool:
"""
Checks if an alert is on a working timeout
Checks if an alert is in a working timeout
"""
last_working = ReportScheduleDAO.find_last_entered_working_log(
self._report_schedule, session=self._session
Expand Down Expand Up @@ -533,19 +532,30 @@ class ReportSuccessState(BaseReportState):
current_states = [ReportState.SUCCESS, ReportState.GRACE]

def next(self) -> None:
self.set_state_and_log(ReportState.WORKING)
if self._report_schedule.type == ReportScheduleType.ALERT:
if self.is_in_grace_period():
self.set_state_and_log(
ReportState.GRACE,
error_message=str(ReportScheduleAlertGracePeriodError()),
)
return
self.set_state_and_log(
ReportState.NOOP,
error_message=str(ReportScheduleAlertEndGracePeriodError()),
)
return
self.set_state_and_log(ReportState.WORKING)
try:
if not AlertCommand(self._report_schedule).run():
self.set_state_and_log(ReportState.NOOP)
return
except CommandException as ex:
self.send_error(
f"Error occurred for {self._report_schedule.type}:"
f" {self._report_schedule.name}",
str(ex),
)
self.set_state_and_log(
ReportState.ERROR,
error_message=REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER,
)
raise ex

try:
self.send()
self.set_state_and_log(ReportState.SUCCESS)
Expand Down
72 changes: 48 additions & 24 deletions tests/integration_tests/reports/commands_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,30 +365,47 @@ def create_alert_slack_chart_success():
cleanup_report_schedule(report_schedule)


@pytest.fixture()
def create_alert_slack_chart_grace():
@pytest.fixture(
params=["alert1",]
)
def create_alert_slack_chart_grace(request):
param_config = {
"alert1": {
"sql": "SELECT count(*) from test_table",
"validator_type": ReportScheduleValidatorType.OPERATOR,
"validator_config_json": '{"op": "<", "threshold": 10}',
},
}
with app.app_context():
chart = db.session.query(Slice).first()
report_schedule = create_report_notification(
slack_channel="slack_channel",
chart=chart,
report_type=ReportScheduleType.ALERT,
)
report_schedule.last_state = ReportState.GRACE
report_schedule.last_eval_dttm = datetime(2020, 1, 1, 0, 0)

log = ReportExecutionLog(
report_schedule=report_schedule,
state=ReportState.SUCCESS,
start_dttm=report_schedule.last_eval_dttm,
end_dttm=report_schedule.last_eval_dttm,
scheduled_dttm=report_schedule.last_eval_dttm,
)
db.session.add(log)
db.session.commit()
yield report_schedule
example_database = get_example_database()
with create_test_table_context(example_database):
report_schedule = create_report_notification(
slack_channel="slack_channel",
chart=chart,
report_type=ReportScheduleType.ALERT,
database=example_database,
sql=param_config[request.param]["sql"],
validator_type=param_config[request.param]["validator_type"],
validator_config_json=param_config[request.param][
"validator_config_json"
],
)
report_schedule.last_state = ReportState.GRACE
report_schedule.last_eval_dttm = datetime(2020, 1, 1, 0, 0)

log = ReportExecutionLog(
report_schedule=report_schedule,
state=ReportState.SUCCESS,
start_dttm=report_schedule.last_eval_dttm,
end_dttm=report_schedule.last_eval_dttm,
scheduled_dttm=report_schedule.last_eval_dttm,
)
db.session.add(log)
db.session.commit()
yield report_schedule

cleanup_report_schedule(report_schedule)
cleanup_report_schedule(report_schedule)


@pytest.fixture(
Expand Down Expand Up @@ -1051,11 +1068,18 @@ def test_report_schedule_success_grace(create_alert_slack_chart_success):


@pytest.mark.usefixtures("create_alert_slack_chart_grace")
def test_report_schedule_success_grace_end(create_alert_slack_chart_grace):
@patch("superset.reports.notifications.slack.WebClient.files_upload")
@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot")
def test_report_schedule_success_grace_end(
screenshot_mock, file_upload_mock, create_alert_slack_chart_grace
):
"""
ExecuteReport Command: Test report schedule on grace to noop
"""
# set current time to within the grace period

screenshot_mock.return_value = SCREENSHOT_FILE

# set current time to after the grace period
current_time = create_alert_slack_chart_grace.last_eval_dttm + timedelta(
seconds=create_alert_slack_chart_grace.grace_period + 1
)
Expand All @@ -1066,7 +1090,7 @@ def test_report_schedule_success_grace_end(create_alert_slack_chart_grace):
).run()

db.session.commit()
assert create_alert_slack_chart_grace.last_state == ReportState.NOOP
assert create_alert_slack_chart_grace.last_state == ReportState.SUCCESS


@pytest.mark.usefixtures("create_alert_email_chart")
Expand Down

0 comments on commit fc98eac

Please sign in to comment.