From b74bb78f28319d905ad460ec63b732fffd5a2538 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 12:06:46 +0000 Subject: [PATCH 01/23] feat: new report schedule models --- .../49b5a32daba5_add_report_schedules.py | 131 ++++++++++++++ superset/models/reports.py | 166 ++++++++++++++++++ 2 files changed, 297 insertions(+) create mode 100644 superset/migrations/versions/49b5a32daba5_add_report_schedules.py create mode 100644 superset/models/reports.py diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py new file mode 100644 index 0000000000000..a5a48172731d0 --- /dev/null +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -0,0 +1,131 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""add report schedules + +Revision ID: 49b5a32daba5 +Revises: 96e99fb176a0 +Create Date: 2020-11-04 11:06:59.249758 + +""" + +# revision identifiers, used by Alembic. +revision = '49b5a32daba5' +down_revision = '96e99fb176a0' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.create_table( + "report_schedule", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "type", + sa.Enum("type", "report_schedule", name="reportscheduletype"), + nullable=False, + ), + sa.Column("label", sa.String(length=150), nullable=False), + sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("crontab", sa.String(length=50), nullable=False), + sa.Column("sql", sa.Text(), nullable=True), + sa.Column("chart_id", sa.Integer(), nullable=True), + sa.Column("dashboard_id", sa.Integer(), nullable=True), + sa.Column("database_id", sa.Integer(), nullable=True), + sa.Column( + "email_format", + sa.Enum("visualization", "data", name="reportemailformat"), + nullable=True, + ), + sa.Column("last_eval_dttm", sa.DateTime(), nullable=True), + sa.Column( + "last_state", + sa.Enum("success", "error", name="reportlogstate"), + nullable=True, + ), + sa.Column("last_value", sa.Float(), nullable=True), + sa.Column("last_value_row_json", sa.Text(), nullable=True), + sa.Column("validator_type", sa.String(length=100), nullable=True), + sa.Column("validator_config_json", sa.Text(), nullable=True), + sa.Column("log_retention", sa.Integer(), nullable=False, default=90), + sa.Column("grace_period", sa.Integer(), nullable=False, default=60 * 60 * 4), + sa.ForeignKeyConstraint(["chart_id"], ["slices.id"],), + sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"],), + sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False) + + op.create_table( + "report_execution_log", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("start_dttm", sa.DateTime(), nullable=True), + sa.Column("end_dttm", sa.DateTime(), nullable=True), + sa.Column("observation_dttm", sa.DateTime(), nullable=True), + sa.Column("value", sa.Float(), nullable=True), + sa.Column("value_row_json", sa.Text(), nullable=True), + sa.Column( + "state", + sa.Enum("success", "error", name="reportlogstate"), + nullable=True, + ), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.PrimaryKeyConstraint("id"), + ) + + op.create_table( + "report_recipient", + sa.Column("id", sa.Integer(), nullable=False), + + sa.Column( + "type", + sa.Enum("email", "slack", name="reportrecipienttype"), + nullable=True, + ), + sa.Column("value_row_json", sa.Text(), nullable=True), + sa.Column("recipient_config_json", sa.Text(), nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.PrimaryKeyConstraint("id"), + ) + + op.create_table( + "report_schedule_user", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"],), + sa.PrimaryKeyConstraint("id"), + ) + + +def downgrade(): + op.drop_index( + op.f("ix_report_schedule_active"), table_name="report_schedule" + ) + op.drop_table("report_execution_log") + op.drop_table("report_recipient") + op.drop_table("report_schedule_user") + op.drop_table("report_schedule") + # https://github.com/miguelgrinberg/Flask-Migrate/issues/48 + sa.Enum(name='reportscheduletype').drop(op.get_bind(), checkfirst=False) + sa.Enum(name='reportemailformat').drop(op.get_bind(), checkfirst=False) + sa.Enum(name='reportrecipienttype').drop(op.get_bind(), checkfirst=False) + sa.Enum(name='reportlogstate').drop(op.get_bind(), checkfirst=False) diff --git a/superset/models/reports.py b/superset/models/reports.py new file mode 100644 index 0000000000000..49ab9763b4ba6 --- /dev/null +++ b/superset/models/reports.py @@ -0,0 +1,166 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=line-too-long,unused-argument,ungrouped-imports +"""A collection of ORM sqlalchemy models for Superset""" +import enum + +from flask_appbuilder import Model +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Enum, + Float, + ForeignKey, + Integer, + String, + Table, + Text, +) +from sqlalchemy.orm import relationship +from sqlalchemy.schema import UniqueConstraint + +from superset.extensions import security_manager +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice + +metadata = Model.metadata # pylint: disable=no-member + + +class ReportScheduleType(str, enum.Enum): + alert = "alert" + report = "report" + + +class ReportRecipientType(str, enum.Enum): + email = "email" + slack = "slack" + + +class ReportLogState(str, enum.Enum): + success = "success" + error = "error" + + +class ReportEmailFormat(str, enum.Enum): + visualization = "Visualization" + data = "Raw data" + + +report_schedule_user = Table( + "report_schedule_user", + metadata, + Column("id", Integer, primary_key=True), + Column("user_id", Integer, ForeignKey("ab_user.id")), + Column("report_schedule_id", Integer, ForeignKey("report_schedule.id")), + UniqueConstraint("user_id", "report_schedule_id"), +) + + +class ReportSchedule(Model): + + """ + Report Schedules, supports alerts and reports + """ + + __tablename__ = "report_schedule" + id = Column(Integer, primary_key=True) + type = Column(Enum(ReportScheduleType, name="report_schedule_type"), nullable=False) + label = Column(String(150), nullable=False) + active = Column(Boolean, default=True, index=True) + crontab = Column(String(50), nullable=False) + sql = Column(Text()) + # (Reports) M-O to chart + chart_id = Column(Integer, ForeignKey("slice.id"), nullable=True) + chart = relationship(Slice, backref="report_schedules", foreign_keys=[chart_id]) + # (Reports) M-O to dashboard + dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=True) + dashboard = relationship( + Dashboard, backref="report_schedules", foreign_keys=[dashboard_id] + ) + # (Alerts) M-O to database + database_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) + database = relationship(Database, foreign_keys=[dashboard_id]) + owners = relationship(security_manager.user_model, secondary=report_schedule_user) + + # (Reports) email format + email_format = Column(Enum(ReportEmailFormat, name="report_email_format")) + + # (Alerts) Stamped last observations + last_eval_dttm = Column(DateTime, nullable=True) + last_state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) + last_value = Column(Float, nullable=True) + last_value_row_json = Column(Text, nullable=True) + + # (Alerts) Observed value validation related columns + validator_type = Column(String(100), nullable=True) + validator_config_json = Column(Text, default="{}") + + # Log retention + log_retention = Column(Integer, default=90) + grace_period = Column(Integer, default=60 * 60 * 4) + + +class ReportRecipients(Model): + + """ + Report Recipients, meant to support multiple notification types, eg: Slack, email + """ + + __tablename__ = "report_recipient" + id = Column(Integer, primary_key=True) + type = Column( + Enum(ReportRecipientType, name="report_recipient_type"), nullable=False + ) + recipient_config_json = Column(Text, default="{}") + report_schedule_id = Column( + Integer, ForeignKey("report_schedule.id"), nullable=True + ) + report_schedule = relationship( + ReportSchedule, backref="recipients", foreign_keys=[report_schedule_id] + ) + + +class ReportExecutionLog(Model): + + """ + Report Execution Log, hold the result of the report execution with timestamps, + last observation and possible error messages + """ + + __tablename__ = "report_execution_log" + id = Column(Integer, primary_key=True) + + # Timestamps + start_dttm = Column(DateTime, nullable=True) + end_dttm = Column(DateTime, nullable=True) + + # (Alerts) Observed values + observation_dttm = Column(DateTime, nullable=True) + value = Column(Float, nullable=True) + value_row_json = Column(Text, nullable=True) + + state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) + error_message = Column(Text, nullable=True) + + report_schedule_id = Column( + Integer, ForeignKey("report_schedule.id"), nullable=True + ) + report_schedule = relationship( + ReportSchedule, backref="logs", foreign_keys=[report_schedule_id] + ) From 703bdab7f6ef0a68e0bda6e58b4050295e3fad13 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 15:09:31 +0000 Subject: [PATCH 02/23] lint and unique constraint --- .../49b5a32daba5_add_report_schedules.py | 31 ++++++++++--------- superset/models/reports.py | 9 ++++-- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index a5a48172731d0..7a1b1cf90f2a3 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -23,11 +23,11 @@ """ # revision identifiers, used by Alembic. -revision = '49b5a32daba5' -down_revision = '96e99fb176a0' +revision = "49b5a32daba5" +down_revision = "96e99fb176a0" -from alembic import op import sqlalchemy as sa +from alembic import op def upgrade(): @@ -68,7 +68,12 @@ def upgrade(): sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), sa.PrimaryKeyConstraint("id"), ) - op.create_index(op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False) + op.create_unique_constraint( + "uq_report_schedule_label", "report_schedule", ["label"] + ) + op.create_index( + op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False + ) op.create_table( "report_execution_log", @@ -79,9 +84,7 @@ def upgrade(): sa.Column("value", sa.Float(), nullable=True), sa.Column("value_row_json", sa.Text(), nullable=True), sa.Column( - "state", - sa.Enum("success", "error", name="reportlogstate"), - nullable=True, + "state", sa.Enum("success", "error", name="reportlogstate"), nullable=True, ), sa.Column("error_message", sa.Text(), nullable=True), sa.Column("report_schedule_id", sa.Integer(), nullable=True), @@ -92,7 +95,6 @@ def upgrade(): op.create_table( "report_recipient", sa.Column("id", sa.Integer(), nullable=False), - sa.Column( "type", sa.Enum("email", "slack", name="reportrecipienttype"), @@ -117,15 +119,14 @@ def upgrade(): def downgrade(): - op.drop_index( - op.f("ix_report_schedule_active"), table_name="report_schedule" - ) + op.drop_index(op.f("ix_report_schedule_active"), table_name="report_schedule") + op.drop_constraint("uq_report_schedule_label", "report_schedule", type_="unique") op.drop_table("report_execution_log") op.drop_table("report_recipient") op.drop_table("report_schedule_user") op.drop_table("report_schedule") # https://github.com/miguelgrinberg/Flask-Migrate/issues/48 - sa.Enum(name='reportscheduletype').drop(op.get_bind(), checkfirst=False) - sa.Enum(name='reportemailformat').drop(op.get_bind(), checkfirst=False) - sa.Enum(name='reportrecipienttype').drop(op.get_bind(), checkfirst=False) - sa.Enum(name='reportlogstate').drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportscheduletype").drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportemailformat").drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportrecipienttype").drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportlogstate").drop(op.get_bind(), checkfirst=False) diff --git a/superset/models/reports.py b/superset/models/reports.py index 49ab9763b4ba6..e3ed6a4935a3c 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -81,7 +81,7 @@ class ReportSchedule(Model): __tablename__ = "report_schedule" id = Column(Integer, primary_key=True) type = Column(Enum(ReportScheduleType, name="report_schedule_type"), nullable=False) - label = Column(String(150), nullable=False) + label = Column(String(150), nullable=False, unique=True) active = Column(Boolean, default=True, index=True) crontab = Column(String(50), nullable=False) sql = Column(Text()) @@ -115,8 +115,11 @@ class ReportSchedule(Model): log_retention = Column(Integer, default=90) grace_period = Column(Integer, default=60 * 60 * 4) + def __repr__(self) -> str: + return str(self.label) -class ReportRecipients(Model): + +class ReportRecipients(Model): # pylint: disable=too-few-public-methods """ Report Recipients, meant to support multiple notification types, eg: Slack, email @@ -136,7 +139,7 @@ class ReportRecipients(Model): ) -class ReportExecutionLog(Model): +class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods """ Report Execution Log, hold the result of the report execution with timestamps, From 6063b8ffa426785683ec861a3977c9c788d48cd3 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 15:44:39 +0000 Subject: [PATCH 03/23] support sqlite --- .../49b5a32daba5_add_report_schedules.py | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index 7a1b1cf90f2a3..e08f1bea69455 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -28,6 +28,7 @@ import sqlalchemy as sa from alembic import op +from sqlalchemy.exc import OperationalError def upgrade(): @@ -68,9 +69,14 @@ def upgrade(): sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), sa.PrimaryKeyConstraint("id"), ) - op.create_unique_constraint( - "uq_report_schedule_label", "report_schedule", ["label"] - ) + try: + op.create_unique_constraint( + "uq_report_schedule_label", "report_schedule", ["label"] + ) + except OperationalError: + # Expected to fail on SQLite + pass + op.create_index( op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False ) @@ -120,7 +126,14 @@ def upgrade(): def downgrade(): op.drop_index(op.f("ix_report_schedule_active"), table_name="report_schedule") - op.drop_constraint("uq_report_schedule_label", "report_schedule", type_="unique") + try: + op.drop_constraint( + "uq_report_schedule_label", "report_schedule", type_="unique" + ) + except OperationalError: + # Expected to fail on SQLite + pass + op.drop_table("report_execution_log") op.drop_table("report_recipient") op.drop_table("report_schedule_user") From dfb9198a6fa8c6dda66c45fcc54d857ae6b69562 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 16:06:56 +0000 Subject: [PATCH 04/23] fix sqlite --- .../migrations/versions/49b5a32daba5_add_report_schedules.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index e08f1bea69455..8245d5ff54776 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -73,10 +73,9 @@ def upgrade(): op.create_unique_constraint( "uq_report_schedule_label", "report_schedule", ["label"] ) - except OperationalError: + except Exception: # Expected to fail on SQLite pass - op.create_index( op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False ) @@ -130,7 +129,7 @@ def downgrade(): op.drop_constraint( "uq_report_schedule_label", "report_schedule", type_="unique" ) - except OperationalError: + except Exception: # Expected to fail on SQLite pass From e8605ca0c31cd4ec00f51e2ea6e6c2d7146ef8fe Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 18:37:57 +0000 Subject: [PATCH 05/23] add audit mixin and minor fixes --- .../49b5a32daba5_add_report_schedules.py | 79 ++++++++----------- superset/models/reports.py | 67 ++++++++-------- 2 files changed, 70 insertions(+), 76 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index 8245d5ff54776..f32700014539f 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -35,38 +35,33 @@ def upgrade(): op.create_table( "report_schedule", sa.Column("id", sa.Integer(), nullable=False), - sa.Column( - "type", - sa.Enum("type", "report_schedule", name="reportscheduletype"), - nullable=False, - ), - sa.Column("label", sa.String(length=150), nullable=False), - sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("type", sa.String(length=50), nullable=False), + sa.Column("label", sa.String(length=150), nullable=False, unique=True), + sa.Column("active", sa.Boolean(), default=True, nullable=True), sa.Column("crontab", sa.String(length=50), nullable=False), sa.Column("sql", sa.Text(), nullable=True), sa.Column("chart_id", sa.Integer(), nullable=True), sa.Column("dashboard_id", sa.Integer(), nullable=True), sa.Column("database_id", sa.Integer(), nullable=True), - sa.Column( - "email_format", - sa.Enum("visualization", "data", name="reportemailformat"), - nullable=True, - ), + sa.Column("email_format", sa.String(length=50), nullable=True), sa.Column("last_eval_dttm", sa.DateTime(), nullable=True), - sa.Column( - "last_state", - sa.Enum("success", "error", name="reportlogstate"), - nullable=True, - ), + sa.Column("last_state", sa.String(length=50), nullable=True), sa.Column("last_value", sa.Float(), nullable=True), sa.Column("last_value_row_json", sa.Text(), nullable=True), sa.Column("validator_type", sa.String(length=100), nullable=True), - sa.Column("validator_config_json", sa.Text(), nullable=True), - sa.Column("log_retention", sa.Integer(), nullable=False, default=90), - sa.Column("grace_period", sa.Integer(), nullable=False, default=60 * 60 * 4), - sa.ForeignKeyConstraint(["chart_id"], ["slices.id"],), - sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"],), - sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), + sa.Column("validator_config_json", sa.Text(), default="{}", nullable=True), + sa.Column("log_retention", sa.Integer(), nullable=True, default=90), + sa.Column("grace_period", sa.Integer(), nullable=True, default=60 * 60 * 4), + # Audit Mixin + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["chart_id"], ["slices.id"]), + sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"]), + sa.ForeignKeyConstraint(["database_id"], ["dbs.id"]), + sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]), + sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]), sa.PrimaryKeyConstraint("id"), ) try: @@ -83,40 +78,41 @@ def upgrade(): op.create_table( "report_execution_log", sa.Column("id", sa.Integer(), nullable=False), + sa.Column("scheduled_dttm", sa.DateTime(), nullable=False), sa.Column("start_dttm", sa.DateTime(), nullable=True), sa.Column("end_dttm", sa.DateTime(), nullable=True), sa.Column("observation_dttm", sa.DateTime(), nullable=True), sa.Column("value", sa.Float(), nullable=True), sa.Column("value_row_json", sa.Text(), nullable=True), - sa.Column( - "state", sa.Enum("success", "error", name="reportlogstate"), nullable=True, - ), + sa.Column("state", sa.String(length=50), nullable=False), sa.Column("error_message", sa.Text(), nullable=True), - sa.Column("report_schedule_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.Column("report_schedule_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"]), sa.PrimaryKeyConstraint("id"), ) op.create_table( "report_recipient", sa.Column("id", sa.Integer(), nullable=False), - sa.Column( - "type", - sa.Enum("email", "slack", name="reportrecipienttype"), - nullable=True, - ), - sa.Column("value_row_json", sa.Text(), nullable=True), - sa.Column("recipient_config_json", sa.Text(), nullable=True), - sa.Column("report_schedule_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.Column("type", sa.String(length=50), nullable=False), + sa.Column("recipient_config_json", sa.Text(), default="{}", nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=False), + # Audit Mixin + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"]), + sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]), + sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]), sa.PrimaryKeyConstraint("id"), ) op.create_table( "report_schedule_user", sa.Column("id", sa.Integer(), nullable=False), - sa.Column("user_id", sa.Integer(), nullable=True), - sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("report_schedule_id", sa.Integer(), nullable=False), sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"],), sa.PrimaryKeyConstraint("id"), @@ -137,8 +133,3 @@ def downgrade(): op.drop_table("report_recipient") op.drop_table("report_schedule_user") op.drop_table("report_schedule") - # https://github.com/miguelgrinberg/Flask-Migrate/issues/48 - sa.Enum(name="reportscheduletype").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="reportemailformat").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="reportrecipienttype").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="reportlogstate").drop(op.get_bind(), checkfirst=False) diff --git a/superset/models/reports.py b/superset/models/reports.py index e3ed6a4935a3c..5cb1d08129f91 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -23,7 +23,6 @@ Boolean, Column, DateTime, - Enum, Float, ForeignKey, Integer, @@ -37,42 +36,45 @@ from superset.extensions import security_manager from superset.models.core import Database from superset.models.dashboard import Dashboard +from superset.models.helpers import AuditMixinNullable from superset.models.slice import Slice metadata = Model.metadata # pylint: disable=no-member class ReportScheduleType(str, enum.Enum): - alert = "alert" - report = "report" + ALERT = "Alert" + REPORT = "Report" class ReportRecipientType(str, enum.Enum): - email = "email" - slack = "slack" + EMAIL = "Email" + SLACK = "Slack" class ReportLogState(str, enum.Enum): - success = "success" - error = "error" + SUCCESS = "Success" + ERROR = "Error" class ReportEmailFormat(str, enum.Enum): - visualization = "Visualization" - data = "Raw data" + VISUALIZATION = "Visualization" + DATA = "Raw data" report_schedule_user = Table( "report_schedule_user", metadata, Column("id", Integer, primary_key=True), - Column("user_id", Integer, ForeignKey("ab_user.id")), - Column("report_schedule_id", Integer, ForeignKey("report_schedule.id")), + Column("user_id", Integer, ForeignKey("ab_user.id"), nullable=False), + Column( + "report_schedule_id", Integer, ForeignKey("report_schedule.id"), nullable=False + ), UniqueConstraint("user_id", "report_schedule_id"), ) -class ReportSchedule(Model): +class ReportSchedule(Model, AuditMixinNullable): """ Report Schedules, supports alerts and reports @@ -80,7 +82,7 @@ class ReportSchedule(Model): __tablename__ = "report_schedule" id = Column(Integer, primary_key=True) - type = Column(Enum(ReportScheduleType, name="report_schedule_type"), nullable=False) + type = Column(String(50), nullable=False) label = Column(String(150), nullable=False, unique=True) active = Column(Boolean, default=True, index=True) crontab = Column(String(50), nullable=False) @@ -99,16 +101,16 @@ class ReportSchedule(Model): owners = relationship(security_manager.user_model, secondary=report_schedule_user) # (Reports) email format - email_format = Column(Enum(ReportEmailFormat, name="report_email_format")) + email_format = Column(String(50)) # (Alerts) Stamped last observations - last_eval_dttm = Column(DateTime, nullable=True) - last_state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) - last_value = Column(Float, nullable=True) - last_value_row_json = Column(Text, nullable=True) + last_eval_dttm = Column(DateTime) + last_state = Column(String(50)) + last_value = Column(Float) + last_value_row_json = Column(Text) # (Alerts) Observed value validation related columns - validator_type = Column(String(100), nullable=True) + validator_type = Column(String(100)) validator_config_json = Column(Text, default="{}") # Log retention @@ -119,7 +121,9 @@ def __repr__(self) -> str: return str(self.label) -class ReportRecipients(Model): # pylint: disable=too-few-public-methods +class ReportRecipients( + Model, AuditMixinNullable +): # pylint: disable=too-few-public-methods """ Report Recipients, meant to support multiple notification types, eg: Slack, email @@ -127,12 +131,10 @@ class ReportRecipients(Model): # pylint: disable=too-few-public-methods __tablename__ = "report_recipient" id = Column(Integer, primary_key=True) - type = Column( - Enum(ReportRecipientType, name="report_recipient_type"), nullable=False - ) + type = Column(String(50), nullable=False) recipient_config_json = Column(Text, default="{}") report_schedule_id = Column( - Integer, ForeignKey("report_schedule.id"), nullable=True + Integer, ForeignKey("report_schedule.id"), nullable=False ) report_schedule = relationship( ReportSchedule, backref="recipients", foreign_keys=[report_schedule_id] @@ -150,19 +152,20 @@ class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods id = Column(Integer, primary_key=True) # Timestamps - start_dttm = Column(DateTime, nullable=True) - end_dttm = Column(DateTime, nullable=True) + scheduled_dttm = Column(DateTime, nullable=False) + start_dttm = Column(DateTime) + end_dttm = Column(DateTime) # (Alerts) Observed values - observation_dttm = Column(DateTime, nullable=True) - value = Column(Float, nullable=True) - value_row_json = Column(Text, nullable=True) + observation_dttm = Column(DateTime) + value = Column(Float) + value_row_json = Column(Text) - state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) - error_message = Column(Text, nullable=True) + state = Column(String(50), nullable=False) + error_message = Column(Text) report_schedule_id = Column( - Integer, ForeignKey("report_schedule.id"), nullable=True + Integer, ForeignKey("report_schedule.id"), nullable=False ) report_schedule = relationship( ReportSchedule, backref="logs", foreign_keys=[report_schedule_id] From bc61ac9e663568859a8a97e9e117c367630cde61 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Thu, 5 Nov 2020 18:09:05 +0000 Subject: [PATCH 06/23] feat(api): alerts and reports REST API --- superset/app.py | 4 + .../49b5a32daba5_add_report_schedules.py | 1 - superset/models/reports.py | 13 +- superset/reports/__init__.py | 16 + superset/reports/api.py | 337 ++++++++++++++++++ superset/reports/commands/__init__.py | 16 + superset/reports/commands/bulk_delete.py | 55 +++ superset/reports/commands/create.py | 123 +++++++ superset/reports/commands/delete.py | 56 +++ superset/reports/commands/exceptions.py | 93 +++++ superset/reports/commands/update.py | 81 +++++ superset/reports/dao.py | 64 ++++ superset/reports/logs/__init__.py | 16 + superset/reports/logs/api.py | 195 ++++++++++ superset/reports/logs/schemas.py | 35 ++ superset/reports/schemas.py | 164 +++++++++ 16 files changed, 1264 insertions(+), 5 deletions(-) create mode 100644 superset/reports/__init__.py create mode 100644 superset/reports/api.py create mode 100644 superset/reports/commands/__init__.py create mode 100644 superset/reports/commands/bulk_delete.py create mode 100644 superset/reports/commands/create.py create mode 100644 superset/reports/commands/delete.py create mode 100644 superset/reports/commands/exceptions.py create mode 100644 superset/reports/commands/update.py create mode 100644 superset/reports/dao.py create mode 100644 superset/reports/logs/__init__.py create mode 100644 superset/reports/logs/api.py create mode 100644 superset/reports/logs/schemas.py create mode 100644 superset/reports/schemas.py diff --git a/superset/app.py b/superset/app.py index dc8d612d144e5..dc0162f17ce1c 100644 --- a/superset/app.py +++ b/superset/app.py @@ -148,6 +148,8 @@ def init_views(self) -> None: from superset.datasets.api import DatasetRestApi from superset.queries.api import QueryRestApi from superset.queries.saved_queries.api import SavedQueryRestApi + from superset.reports.api import ReportScheduleRestApi + from superset.reports.logs.api import ReportExecutionLogRestApi from superset.views.access_requests import AccessRequestsModelView from superset.views.alerts import ( AlertLogModelView, @@ -206,6 +208,8 @@ def init_views(self) -> None: appbuilder.add_api(DatasetRestApi) appbuilder.add_api(QueryRestApi) appbuilder.add_api(SavedQueryRestApi) + appbuilder.add_api(ReportScheduleRestApi) + appbuilder.add_api(ReportExecutionLogRestApi) # # Setup regular views # diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index f32700014539f..34b2e56d4cbe9 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -81,7 +81,6 @@ def upgrade(): sa.Column("scheduled_dttm", sa.DateTime(), nullable=False), sa.Column("start_dttm", sa.DateTime(), nullable=True), sa.Column("end_dttm", sa.DateTime(), nullable=True), - sa.Column("observation_dttm", sa.DateTime(), nullable=True), sa.Column("value", sa.Float(), nullable=True), sa.Column("value_row_json", sa.Text(), nullable=True), sa.Column("state", sa.String(length=50), nullable=False), diff --git a/superset/models/reports.py b/superset/models/reports.py index 5cb1d08129f91..4794fdf4089e6 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -44,7 +44,13 @@ class ReportScheduleType(str, enum.Enum): ALERT = "Alert" - REPORT = "Report" + REPORT_DASHBOARD = "ReportDashboard" + REPORT_CHART = "ReportChart" + + +class ReportScheduleValidatorType(str, enum.Enum): + NOT_NULL = "not null" + OPERATOR = "operator" class ReportRecipientType(str, enum.Enum): @@ -88,7 +94,7 @@ class ReportSchedule(Model, AuditMixinNullable): crontab = Column(String(50), nullable=False) sql = Column(Text()) # (Reports) M-O to chart - chart_id = Column(Integer, ForeignKey("slice.id"), nullable=True) + chart_id = Column(Integer, ForeignKey("slices.id"), nullable=True) chart = relationship(Slice, backref="report_schedules", foreign_keys=[chart_id]) # (Reports) M-O to dashboard dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=True) @@ -97,7 +103,7 @@ class ReportSchedule(Model, AuditMixinNullable): ) # (Alerts) M-O to database database_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) - database = relationship(Database, foreign_keys=[dashboard_id]) + database = relationship(Database, foreign_keys=[database_id]) owners = relationship(security_manager.user_model, secondary=report_schedule_user) # (Reports) email format @@ -157,7 +163,6 @@ class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods end_dttm = Column(DateTime) # (Alerts) Observed values - observation_dttm = Column(DateTime) value = Column(Float) value_row_json = Column(Text) diff --git a/superset/reports/__init__.py b/superset/reports/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/reports/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/reports/api.py b/superset/reports/api.py new file mode 100644 index 0000000000000..5c2423fb1e226 --- /dev/null +++ b/superset/reports/api.py @@ -0,0 +1,337 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any + +from flask import g, request, Response +from flask_appbuilder.api import expose, permission_name, protect, rison, safe +from flask_appbuilder.models.sqla.interface import SQLAInterface +from flask_babel import ngettext +from marshmallow import ValidationError + +from superset.constants import RouteMethod +from superset.models.reports import ReportSchedule +from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand +from superset.reports.commands.create import CreateReportScheduleCommand +from superset.reports.commands.delete import DeleteReportScheduleCommand +from superset.reports.commands.exceptions import ( + ReportScheduleBulkDeleteFailedError, + ReportScheduleBulkDeleteIntegrityError, + ReportScheduleCreateFailedError, + ReportScheduleDeleteFailedError, + ReportScheduleDeleteIntegrityError, + ReportScheduleInvalidError, + ReportScheduleNotFoundError, + ReportScheduleUpdateFailedError, +) +from superset.reports.commands.update import UpdateReportScheduleCommand +from superset.reports.schemas import ( + get_delete_ids_schema, + openapi_spec_methods_override, + ReportSchedulePostSchema, + ReportSchedulePutSchema, +) +from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics + +logger = logging.getLogger(__name__) + + +class ReportScheduleRestApi(BaseSupersetModelRestApi): + datamodel = SQLAInterface(ReportSchedule) + + include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { + RouteMethod.RELATED, + "bulk_delete", # not using RouteMethod since locally defined + } + class_permission_name = "ReportSchedule" + resource_name = "report" + allow_browser_login = True + + show_columns = [ + "id", + "label", + "recipients.id", + "recipients.type", + "recipients.recipient_config_json", + ] + list_columns = [ + "active", + "changed_by.first_name", + "changed_by.last_name", + "changed_on", + "changed_on_delta_humanized", + "created_by.first_name", + "created_by.last_name", + "created_on", + "id", + "label", + "type", + ] + add_columns = ReportSchedulePostSchema._declared_fields.keys() + edit_columns = ReportSchedulePutSchema._declared_fields.keys() + add_model_schema = ReportSchedulePostSchema() + edit_model_schema = ReportSchedulePutSchema() + + order_columns = [ + "active", + "created_by.first_name", + "changed_by.first_name", + "changed_on", + "changed_on_delta_humanized", + "created_on", + "label", + "type", + ] + + allowed_rel_fields = {"created_by"} + + apispec_parameter_schemas = { + "get_delete_ids_schema": get_delete_ids_schema, + } + openapi_spec_tag = "Report Schedules" + openapi_spec_methods = openapi_spec_methods_override + + @expose("/", methods=["DELETE"]) + @protect() + @safe + @statsd_metrics + @permission_name("delete") + def delete(self, pk: int) -> Response: + """Delete a Report Schedule + --- + delete: + description: >- + Delete a Report Schedule + parameters: + - in: path + schema: + type: integer + name: pk + description: The report schedule pk + responses: + 200: + description: Item deleted + content: + application/json: + schema: + type: object + properties: + message: + type: string + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + try: + DeleteReportScheduleCommand(g.user, pk).run() + return self.response(200, message="OK") + except ReportScheduleNotFoundError as ex: + return self.response_404() + except ReportScheduleDeleteIntegrityError as ex: + return self.response_422(message=str(ex)) + except ReportScheduleDeleteFailedError as ex: + logger.error( + "Error deleting report schedule %s: %s", + self.__class__.__name__, + str(ex), + ) + return self.response_422(message=str(ex)) + + @expose("/", methods=["POST"]) + @protect() + @safe + @statsd_metrics + @permission_name("post") + def post(self) -> Response: + """Creates a new Report Schedule + --- + post: + description: >- + Create a new Report Schedule + requestBody: + description: Report Schedule schema + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/{{self.__class__.__name__}}.post' + responses: + 201: + description: Report schedule added + content: + application/json: + schema: + type: object + properties: + id: + type: number + result: + $ref: '#/components/schemas/{{self.__class__.__name__}}.post' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + if not request.is_json: + return self.response_400(message="Request is not JSON") + try: + item = self.add_model_schema.load(request.json) + # This validates custom Schema with custom validations + except ValidationError as error: + return self.response_400(message=error.messages) + try: + new_model = CreateReportScheduleCommand(g.user, item).run() + return self.response(201, id=new_model.id, result=item) + except ReportScheduleNotFoundError as ex: + return self.response_400(message=str(ex)) + except ReportScheduleInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except ReportScheduleCreateFailedError as ex: + logger.error( + "Error creating report schedule %s: %s", + self.__class__.__name__, + str(ex), + ) + return self.response_422(message=str(ex)) + + @expose("/", methods=["PUT"]) + @protect() + @safe + @statsd_metrics + @permission_name("put") + def put(self, pk: int) -> Response: + """Updates an Report Schedule + --- + put: + description: >- + Updates a Report Schedule + parameters: + - in: path + schema: + type: integer + name: pk + description: The Report Schedule pk + requestBody: + description: Report Schedule schema + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/{{self.__class__.__name__}}.put' + responses: + 200: + description: Report Schedule changed + content: + application/json: + schema: + type: object + properties: + id: + type: number + result: + $ref: '#/components/schemas/{{self.__class__.__name__}}.put' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + if not request.is_json: + return self.response_400(message="Request is not JSON") + try: + item = self.edit_model_schema.load(request.json) + item["layer"] = pk + # This validates custom Schema with custom validations + except ValidationError as error: + return self.response_400(message=error.messages) + try: + new_model = UpdateReportScheduleCommand(g.user, pk, item).run() + return self.response(200, id=new_model.id, result=item) + except (ReportScheduleNotFoundError) as ex: + return self.response_404() + except ReportScheduleInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except ReportScheduleUpdateFailedError as ex: + logger.error( + "Error updating report %s: %s", self.__class__.__name__, str(ex) + ) + return self.response_422(message=str(ex)) + + @expose("/", methods=["DELETE"]) + @protect() + @safe + @statsd_metrics + @rison(get_delete_ids_schema) + def bulk_delete(self, **kwargs: Any) -> Response: + """Delete bulk Report Schedule layers + --- + delete: + description: >- + Deletes multiple report schedules in a bulk operation. + parameters: + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/get_delete_ids_schema' + responses: + 200: + description: Report Schedule bulk delete + content: + application/json: + schema: + type: object + properties: + message: + type: string + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + item_ids = kwargs["rison"] + try: + BulkDeleteReportScheduleCommand(g.user, item_ids).run() + return self.response( + 200, + message=ngettext( + "Deleted %(num)d report schedule", + "Deleted %(num)d report schedules", + num=len(item_ids), + ), + ) + except ReportScheduleNotFoundError: + return self.response_404() + except ReportScheduleBulkDeleteIntegrityError as ex: + return self.response_422(message=str(ex)) + except ReportScheduleBulkDeleteFailedError as ex: + return self.response_422(message=str(ex)) diff --git a/superset/reports/commands/__init__.py b/superset/reports/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/reports/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/reports/commands/bulk_delete.py b/superset/reports/commands/bulk_delete.py new file mode 100644 index 0000000000000..b93b300e021d3 --- /dev/null +++ b/superset/reports/commands/bulk_delete.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import List, Optional + +from flask_appbuilder.security.sqla.models import User + +from superset.commands.base import BaseCommand +from superset.dao.exceptions import DAODeleteFailedError +from superset.models.reports import ReportSchedule +from superset.reports.commands.exceptions import ( + ReportScheduleBulkDeleteFailedError, + ReportScheduleBulkDeleteIntegrityError, + ReportScheduleNotFoundError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class BulkDeleteReportScheduleCommand(BaseCommand): + def __init__(self, user: User, model_ids: List[int]): + self._actor = user + self._model_ids = model_ids + self._models: Optional[List[ReportSchedule]] = None + + def run(self) -> None: + self.validate() + try: + ReportScheduleDAO.bulk_delete(self._models) + return None + except DAODeleteFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleBulkDeleteFailedError() + + def validate(self) -> None: + # Validate/populate model exists + self._models = ReportScheduleDAO.find_by_ids(self._model_ids) + if not self._models or len(self._models) != len(self._model_ids): + raise ReportScheduleNotFoundError() + # TODO handle recipients and logs diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py new file mode 100644 index 0000000000000..a15f558f9bd53 --- /dev/null +++ b/superset/reports/commands/create.py @@ -0,0 +1,123 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +import logging +from typing import Any, Callable, cast, Dict, List, Optional, Type + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User +from marshmallow import ValidationError + +from superset.charts.dao import ChartDAO +from superset.commands.base import BaseCommand +from superset.commands.utils import populate_owners +from superset.dao.exceptions import DAOCreateFailedError +from superset.dashboards.dao import DashboardDAO +from superset.databases.dao import DatabaseDAO +from superset.models.reports import ReportScheduleType +from superset.reports.commands.exceptions import ( + ChartNotFoundValidationError, + DashboardNotFoundValidationError, + DatabaseNotFoundValidationError, + ReportScheduleCreateFailedError, + ReportScheduleInvalidError, + ReportScheduleLabelUniquenessValidationError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class CreateReportScheduleCommand(BaseCommand): + def __init__(self, user: User, data: Dict[str, Any]): + self._actor = user + self._properties = data.copy() + + self._validate_by_type = { + ReportScheduleType.ALERT: { + "field": "database", + "find_by_id": DatabaseDAO.find_by_id, + "exception": DatabaseNotFoundValidationError, + }, + ReportScheduleType.REPORT_DASHBOARD: { + "field": "dashboard", + "find_by_id": DashboardDAO.find_by_id, + "exception": DashboardNotFoundValidationError, + }, + ReportScheduleType.REPORT_CHART: { + "field": "chart", + "find_by_id": ChartDAO.find_by_id, + "exception": ChartNotFoundValidationError, + }, + } + + def run(self) -> Model: + self.validate() + try: + report_schedule = ReportScheduleDAO.create(self._properties) + except DAOCreateFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleCreateFailedError() + return report_schedule + + def validate(self) -> None: + exceptions: List[ValidationError] = list() + owner_ids: Optional[List[int]] = self._properties.get("owners") + label = self._properties.get("label", "") + report_type = self._properties.get("type", ReportScheduleType.ALERT) + + # Validate label uniqueness + if not ReportScheduleDAO.validate_update_uniqueness(label): + exceptions.append(ReportScheduleLabelUniquenessValidationError()) + + # Generic validation by report schedule type + type_field = cast(str, self._validate_by_type[report_type]["field"]) + type_find_by_id = cast( + Callable[[Optional[int]], Optional[Model]], + self._validate_by_type[report_type]["find_by_id"], + ) + type_exception = cast( + Type[Exception], self._validate_by_type[report_type]["exception"] + ) + type_related_id: Optional[int] = self._properties.get(type_field) + if not type_related_id: + exceptions.append(type_exception()) + type_related_obj = type_find_by_id(type_related_id) + if not type_related_obj: + exceptions.append(type_exception()) + self._properties[type_field] = type_related_obj + # Remove existing related fields that don't belong to this report type + # ex: If it's an Alert remove chart and dashboard keys + for type_key in set(self._validate_by_type.keys()) - {report_type}: + self._properties.pop( + cast(str, self._validate_by_type[type_key]["field"]), None + ) + + # Convert validator config dict into string + self._properties["validator_config_json"] = json.dumps( + self._properties.get("validator_config_json", "{}") + ) + + try: + owners = populate_owners(self._actor, owner_ids) + self._properties["owners"] = owners + except ValidationError as ex: + exceptions.append(ex) + if exceptions: + exception = ReportScheduleInvalidError() + exception.add_list(exceptions) + raise exception diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py new file mode 100644 index 0000000000000..fe2b66703510a --- /dev/null +++ b/superset/reports/commands/delete.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Optional + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User + +from superset.commands.base import BaseCommand +from superset.dao.exceptions import DAODeleteFailedError +from superset.models.reports import ReportSchedule +from superset.reports.commands.exceptions import ( + ReportScheduleDeleteFailedError, + ReportScheduleDeleteIntegrityError, + ReportScheduleNotFoundError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class DeleteReportScheduleCommand(BaseCommand): + def __init__(self, user: User, model_id: int): + self._actor = user + self._model_id = model_id + self._model: Optional[ReportSchedule] = None + + def run(self) -> Model: + self.validate() + try: + report_schedule = ReportScheduleDAO.delete(self._model) + except DAODeleteFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleDeleteFailedError() + return report_schedule + + def validate(self) -> None: + # Validate/populate model exists + self._model = ReportScheduleDAO.find_by_id(self._model_id) + if not self._model: + raise ReportScheduleNotFoundError() + # TODO check integrity diff --git a/superset/reports/commands/exceptions.py b/superset/reports/commands/exceptions.py new file mode 100644 index 0000000000000..15cefd7b86485 --- /dev/null +++ b/superset/reports/commands/exceptions.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from flask_babel import lazy_gettext as _ + +from superset.commands.exceptions import ( + CommandException, + CommandInvalidError, + CreateFailedError, + DeleteFailedError, + ValidationError, +) + + +class DatabaseNotFoundValidationError(ValidationError): + """ + Marshmallow validation error for database does not exist + """ + + def __init__(self) -> None: + super().__init__(_("Database does not exist"), field_name="database") + + +class DashboardNotFoundValidationError(ValidationError): + """ + Marshmallow validation error for dashboard does not exist + """ + + def __init__(self) -> None: + super().__init__(_("Dashboard does not exist"), field_name="dashboard") + + +class ChartNotFoundValidationError(ValidationError): + """ + Marshmallow validation error for chart does not exist + """ + + def __init__(self) -> None: + super().__init__(_("Chart does not exist"), field_name="chart") + + +class ReportScheduleInvalidError(CommandInvalidError): + message = _("Report Schedule parameters are invalid.") + + +class ReportScheduleBulkDeleteFailedError(DeleteFailedError): + message = _("Report Schedule could not be deleted.") + + +class ReportScheduleCreateFailedError(CreateFailedError): + message = _("Report Schedule could not be created.") + + +class ReportScheduleUpdateFailedError(CreateFailedError): + message = _("Report Schedule could not be updated.") + + +class ReportScheduleNotFoundError(CommandException): + message = _("Report Schedule not found.") + + +class ReportScheduleDeleteFailedError(CommandException): + message = _("Report Schedule delete failed.") + + +class ReportScheduleDeleteIntegrityError(CommandException): + message = _("Report Schedule has associated logs or recipients.") + + +class ReportScheduleBulkDeleteIntegrityError(CommandException): + message = _("Report Schedule has associated logs or recipients.") + + +class ReportScheduleLabelUniquenessValidationError(ValidationError): + """ + Marshmallow validation error for Report Schedule label already exists + """ + + def __init__(self) -> None: + super().__init__([_("Label must be unique")], field_name="label") diff --git a/superset/reports/commands/update.py b/superset/reports/commands/update.py new file mode 100644 index 0000000000000..331399283692b --- /dev/null +++ b/superset/reports/commands/update.py @@ -0,0 +1,81 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict, List, Optional + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User +from marshmallow import ValidationError + +from superset.commands.base import BaseCommand +from superset.commands.utils import populate_owners +from superset.dao.exceptions import DAOUpdateFailedError +from superset.models.reports import ReportSchedule +from superset.reports.commands.exceptions import ( + ReportScheduleInvalidError, + ReportScheduleLabelUniquenessValidationError, + ReportScheduleNotFoundError, + ReportScheduleUpdateFailedError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class UpdateReportScheduleCommand(BaseCommand): + def __init__(self, user: User, model_id: int, data: Dict[str, Any]): + self._actor = user + self._model_id = model_id + self._properties = data.copy() + self._model: Optional[ReportSchedule] = None + + def run(self) -> Model: + self.validate() + try: + report_schedule = ReportScheduleDAO.update(self._model, self._properties) + except DAOUpdateFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleUpdateFailedError() + return report_schedule + + def validate(self) -> None: + exceptions: List[ValidationError] = list() + owner_ids: Optional[List[int]] = self._properties.get("owners") + + label = self._properties.get("label", "") + self._model = ReportScheduleDAO.find_by_id(self._model_id) + + if not self._model: + raise ReportScheduleNotFoundError() + + if not ReportScheduleDAO.validate_update_uniqueness( + label, report_schedule_id=self._model_id + ): + exceptions.append(ReportScheduleLabelUniquenessValidationError()) + + # Validate/Populate owner + if owner_ids is None: + owner_ids = [owner.id for owner in self._model.owners] + try: + owners = populate_owners(self._actor, owner_ids) + self._properties["owners"] = owners + except ValidationError as ex: + exceptions.append(ex) + if exceptions: + exception = ReportScheduleInvalidError() + exception.add_list(exceptions) + raise exception diff --git a/superset/reports/dao.py b/superset/reports/dao.py new file mode 100644 index 0000000000000..73692b981e08f --- /dev/null +++ b/superset/reports/dao.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import List, Optional, Union + +from sqlalchemy.exc import SQLAlchemyError + +from superset.dao.base import BaseDAO +from superset.dao.exceptions import DAODeleteFailedError +from superset.extensions import db +from superset.models.reports import ReportSchedule + +logger = logging.getLogger(__name__) + + +class ReportScheduleDAO(BaseDAO): + model_cls = ReportSchedule + + @staticmethod + def bulk_delete( + models: Optional[List[ReportSchedule]], commit: bool = True + ) -> None: + item_ids = [model.id for model in models] if models else [] + try: + db.session.query(ReportSchedule).filter( + ReportSchedule.id.in_(item_ids) + ).delete(synchronize_session="fetch") + if commit: + db.session.commit() + except SQLAlchemyError: + if commit: + db.session.rollback() + raise DAODeleteFailedError() + + @staticmethod + def validate_update_uniqueness( + label: str, report_schedule_id: Optional[int] = None + ) -> bool: + """ + Validate if this label is unique. + + :param name: The annotation layer name + :param report_schedule_id: The report schedule current id + (only for validating on updates) + :return: bool + """ + query = db.session.query(ReportSchedule).filter(ReportSchedule.label == label) + if report_schedule_id: + query = query.filter(ReportSchedule.id != report_schedule_id) + return not db.session.query(query.exists()).scalar() diff --git a/superset/reports/logs/__init__.py b/superset/reports/logs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/reports/logs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/reports/logs/api.py b/superset/reports/logs/api.py new file mode 100644 index 0000000000000..c7e2e099851d4 --- /dev/null +++ b/superset/reports/logs/api.py @@ -0,0 +1,195 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict + +from flask import Response +from flask_appbuilder.api import expose, permission_name, protect, rison, safe +from flask_appbuilder.api.schemas import get_item_schema, get_list_schema +from flask_appbuilder.models.sqla.interface import SQLAInterface + +from superset.constants import RouteMethod +from superset.models.reports import ReportExecutionLog +from superset.reports.logs.schemas import openapi_spec_methods_override +from superset.views.base_api import BaseSupersetModelRestApi + +logger = logging.getLogger(__name__) + + +class ReportExecutionLogRestApi(BaseSupersetModelRestApi): + datamodel = SQLAInterface(ReportExecutionLog) + + include_route_methods = {RouteMethod.GET, RouteMethod.GET_LIST} + class_permission_name = "ReportSchedule" + resource_name = "report" + allow_browser_login = True + + show_columns = [ + "id", + "scheduled_dttm", + "end_dttm", + "start_dttm", + "value", + "value_row_json", + "state", + "error_message", + ] + list_columns = [ + "id", + "end_dttm", + "start_dttm", + "value", + "value_row_json", + "state", + "error_message", + ] + order_columns = [ + "state" "value", + "error_message", + "end_dttm", + "start_dttm", + ] + openapi_spec_tag = "Report Schedules" + openapi_spec_methods = openapi_spec_methods_override + + @staticmethod + def _apply_layered_relation_to_rison( # pylint: disable=invalid-name + layer_id: int, rison_parameters: Dict[str, Any] + ) -> None: + if "filters" not in rison_parameters: + rison_parameters["filters"] = [] + rison_parameters["filters"].append( + {"col": "report_schedule", "opr": "rel_o_m", "value": layer_id} + ) + + @expose("//log/", methods=["GET"]) + @protect() + @safe + @permission_name("get") + @rison(get_list_schema) + def get_list( # pylint: disable=arguments-differ + self, pk: int, **kwargs: Dict[str, Any] + ) -> Response: + """Get a list of report schedule logs + --- + get: + description: >- + Get a list of report schedule logs + parameters: + - in: path + schema: + type: integer + description: The report schedule id for these logs + name: pk + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/get_list_schema' + responses: + 200: + description: Items from logs + content: + application/json: + schema: + type: object + properties: + ids: + description: >- + A list of log ids + type: array + items: + type: string + count: + description: >- + The total record count on the backend + type: number + result: + description: >- + The result from the get list query + type: array + items: + $ref: '#/components/schemas/{{self.__class__.__name__}}.get_list' # pylint: disable=line-too-long + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + self._apply_layered_relation_to_rison(pk, kwargs["rison"]) + return self.get_list_headless(**kwargs) + + @expose("//log/", methods=["GET"]) + @protect() + @safe + @permission_name("get") + @rison(get_item_schema) + def get( # pylint: disable=arguments-differ + self, pk: int, log_id: int, **kwargs: Dict[str, Any] + ) -> Response: + """Get a report schedule log + --- + get: + description: >- + Get a report schedule log + parameters: + - in: path + schema: + type: integer + name: pk + description: The report schedule pk for log + - in: path + schema: + type: integer + name: log_id + description: The log pk + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/get_item_schema' + responses: + 200: + description: Item log + content: + application/json: + schema: + type: object + properties: + id: + description: The log id + type: string + result: + $ref: '#/components/schemas/{{self.__class__.__name__}}.get' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + self._apply_layered_relation_to_rison(pk, kwargs["rison"]) + return self.get_headless(log_id, **kwargs) diff --git a/superset/reports/logs/schemas.py b/superset/reports/logs/schemas.py new file mode 100644 index 0000000000000..78aeb864f6b1d --- /dev/null +++ b/superset/reports/logs/schemas.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Union + +from marshmallow import fields, Schema, ValidationError +from marshmallow.validate import Length + +from superset.exceptions import SupersetException +from superset.utils import core as utils + +openapi_spec_methods_override = { + "get": {"get": {"description": "Get a report schedule log"}}, + "get_list": { + "get": { + "description": "Get a list of report schedule logs, use Rison or JSON " + "query parameters for filtering, sorting," + " pagination and for selecting specific" + " columns and metadata.", + } + }, +} diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py new file mode 100644 index 0000000000000..83a850d26b621 --- /dev/null +++ b/superset/reports/schemas.py @@ -0,0 +1,164 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Union + +from croniter import croniter +from flask_babel import lazy_gettext as _ +from marshmallow import fields, Schema, validate +from marshmallow.validate import Length, ValidationError + +from superset.models.reports import ReportScheduleType, ReportScheduleValidatorType + +openapi_spec_methods_override = { + "get": {"get": {"description": "Get a report schedule"}}, + "get_list": { + "get": { + "description": "Get a list of report schedules, use Rison or JSON " + "query parameters for filtering, sorting," + " pagination and for selecting specific" + " columns and metadata.", + } + }, + "post": {"post": {"description": "Create a report schedule"}}, + "put": {"put": {"description": "Update a report schedule"}}, + "delete": {"delete": {"description": "Delete a report schedule"}}, +} + +get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}} + +type_description = "The report schedule type" +label_description = "The report schedule label." +crontab_description = ( + "A CRON-like expression." + "[Crontab Guru](https://crontab.guru/) is " + "a helpful resource that can help you craft a CRON expression." +) +sql_description = ( + "A SQL statement that defines whether the alert should get triggered or " + "not. The query is expected to return either NULL or a number value." +) +owners_description = ( + "Owner are users ids allowed to delete or change this chart. " + "If left empty you will be one of the owners of the chart." +) +validator_type_description = ( + "Determines when to trigger alert based off value from alert query. " + "Alerts will be triggered with these validator types:\n" + "- Not Null - When the return value is Not NULL, Empty, or 0\n" + "- Operator - When `sql_return_value comparison_operator threshold`" + " is True e.g. `50 <= 75`
Supports the comparison operators <, <=, " + ">, >=, ==, and !=" +) +validator_config_json_op_description = ( + "The operation to compare with a threshold to apply to the SQL output\n" +) +log_retention_description = "How long to keep the logs around for this report (in days)" +grace_period_description = ( + "Once an alert is triggered, how long, in seconds, before " + "Superset nags you again. (in seconds)" +) + + +def validate_crontab(value: Union[bytes, bytearray, str]) -> None: + if not croniter.is_valid(str(value)): + raise ValidationError("Cron expression is not valid") + + +class ValidatorConfigJSONSchema(Schema): + op = fields.String( + description=validator_config_json_op_description, + validate=validate.OneOf(choices=["<", "<=", ">", ">=", "==", "!="]), + ) + threshold = fields.Integer() + + +class ReportSchedulePostSchema(Schema): + type = fields.String( + description=type_description, + allow_none=False, + validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), + ) + label = fields.String( + description=label_description, + allow_none=False, + validate=[Length(1, 150)], + example="Daily dashboard email", + ) + active = fields.Boolean() + crontab = fields.String( + description=crontab_description, + validate=[validate_crontab, Length(1, 50)], + example="*/5 * * * * *", + ) + sql = fields.String( + description=sql_description, example="SELECT value FROM time_series_table" + ) + chart = fields.Integer(required=False) + dashboard = fields.Integer(required=False) + database = fields.Integer(required=False) + owners = fields.List(fields.Integer(description=owners_description)) + email_format = fields.String(validate=[Length(1, 50)]) + validator_type = fields.String( + description=validator_type_description, + validate=validate.OneOf( + choices=tuple(key.value for key in ReportScheduleValidatorType) + ), + ) + validator_config_json = fields.Nested(ValidatorConfigJSONSchema) + log_retention = fields.Integer(description=log_retention_description, example=90) + grace_period = fields.Integer(description=grace_period_description, example=14400) + + +class ReportSchedulePutSchema(Schema): + type = fields.String( + description=type_description, + required=False, + validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), + ) + label = fields.String( + description=label_description, required=False, validate=[Length(1, 150)] + ) + active = fields.Boolean(required=False) + crontab = fields.String( + description=crontab_description, + validate=[validate_crontab, Length(1, 50)], + required=False, + ) + sql = fields.String( + description=sql_description, + example="SELECT value FROM time_series_table", + required=False, + ) + chart = fields.Integer(required=False) + dashboard = fields.Integer(required=False) + database = fields.Integer(required=False) + owners = fields.List(fields.Integer(description=owners_description), required=False) + email_format = fields.String(validate=[Length(1, 50)], required=False) + validator_type = fields.String( + description=validator_type_description, + validate=validate.OneOf( + choices=tuple(key.value for key in ReportScheduleValidatorType) + ), + required=False, + ) + validator_config_json = fields.Nested(ValidatorConfigJSONSchema, required=False) + log_retention = fields.Integer( + description=log_retention_description, example=90, required=False + ) + grace_period = fields.Integer( + description=grace_period_description, example=14400, required=False + ) From 6030089aad09958b83a768aa22cc643eee56670a Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 12:06:46 +0000 Subject: [PATCH 07/23] feat: new report schedule models --- .../49b5a32daba5_add_report_schedules.py | 131 ++++++++++++++ superset/models/reports.py | 166 ++++++++++++++++++ 2 files changed, 297 insertions(+) create mode 100644 superset/migrations/versions/49b5a32daba5_add_report_schedules.py create mode 100644 superset/models/reports.py diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py new file mode 100644 index 0000000000000..a5a48172731d0 --- /dev/null +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -0,0 +1,131 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""add report schedules + +Revision ID: 49b5a32daba5 +Revises: 96e99fb176a0 +Create Date: 2020-11-04 11:06:59.249758 + +""" + +# revision identifiers, used by Alembic. +revision = '49b5a32daba5' +down_revision = '96e99fb176a0' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.create_table( + "report_schedule", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "type", + sa.Enum("type", "report_schedule", name="reportscheduletype"), + nullable=False, + ), + sa.Column("label", sa.String(length=150), nullable=False), + sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("crontab", sa.String(length=50), nullable=False), + sa.Column("sql", sa.Text(), nullable=True), + sa.Column("chart_id", sa.Integer(), nullable=True), + sa.Column("dashboard_id", sa.Integer(), nullable=True), + sa.Column("database_id", sa.Integer(), nullable=True), + sa.Column( + "email_format", + sa.Enum("visualization", "data", name="reportemailformat"), + nullable=True, + ), + sa.Column("last_eval_dttm", sa.DateTime(), nullable=True), + sa.Column( + "last_state", + sa.Enum("success", "error", name="reportlogstate"), + nullable=True, + ), + sa.Column("last_value", sa.Float(), nullable=True), + sa.Column("last_value_row_json", sa.Text(), nullable=True), + sa.Column("validator_type", sa.String(length=100), nullable=True), + sa.Column("validator_config_json", sa.Text(), nullable=True), + sa.Column("log_retention", sa.Integer(), nullable=False, default=90), + sa.Column("grace_period", sa.Integer(), nullable=False, default=60 * 60 * 4), + sa.ForeignKeyConstraint(["chart_id"], ["slices.id"],), + sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"],), + sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index(op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False) + + op.create_table( + "report_execution_log", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("start_dttm", sa.DateTime(), nullable=True), + sa.Column("end_dttm", sa.DateTime(), nullable=True), + sa.Column("observation_dttm", sa.DateTime(), nullable=True), + sa.Column("value", sa.Float(), nullable=True), + sa.Column("value_row_json", sa.Text(), nullable=True), + sa.Column( + "state", + sa.Enum("success", "error", name="reportlogstate"), + nullable=True, + ), + sa.Column("error_message", sa.Text(), nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.PrimaryKeyConstraint("id"), + ) + + op.create_table( + "report_recipient", + sa.Column("id", sa.Integer(), nullable=False), + + sa.Column( + "type", + sa.Enum("email", "slack", name="reportrecipienttype"), + nullable=True, + ), + sa.Column("value_row_json", sa.Text(), nullable=True), + sa.Column("recipient_config_json", sa.Text(), nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.PrimaryKeyConstraint("id"), + ) + + op.create_table( + "report_schedule_user", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"],), + sa.PrimaryKeyConstraint("id"), + ) + + +def downgrade(): + op.drop_index( + op.f("ix_report_schedule_active"), table_name="report_schedule" + ) + op.drop_table("report_execution_log") + op.drop_table("report_recipient") + op.drop_table("report_schedule_user") + op.drop_table("report_schedule") + # https://github.com/miguelgrinberg/Flask-Migrate/issues/48 + sa.Enum(name='reportscheduletype').drop(op.get_bind(), checkfirst=False) + sa.Enum(name='reportemailformat').drop(op.get_bind(), checkfirst=False) + sa.Enum(name='reportrecipienttype').drop(op.get_bind(), checkfirst=False) + sa.Enum(name='reportlogstate').drop(op.get_bind(), checkfirst=False) diff --git a/superset/models/reports.py b/superset/models/reports.py new file mode 100644 index 0000000000000..49ab9763b4ba6 --- /dev/null +++ b/superset/models/reports.py @@ -0,0 +1,166 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=line-too-long,unused-argument,ungrouped-imports +"""A collection of ORM sqlalchemy models for Superset""" +import enum + +from flask_appbuilder import Model +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Enum, + Float, + ForeignKey, + Integer, + String, + Table, + Text, +) +from sqlalchemy.orm import relationship +from sqlalchemy.schema import UniqueConstraint + +from superset.extensions import security_manager +from superset.models.core import Database +from superset.models.dashboard import Dashboard +from superset.models.slice import Slice + +metadata = Model.metadata # pylint: disable=no-member + + +class ReportScheduleType(str, enum.Enum): + alert = "alert" + report = "report" + + +class ReportRecipientType(str, enum.Enum): + email = "email" + slack = "slack" + + +class ReportLogState(str, enum.Enum): + success = "success" + error = "error" + + +class ReportEmailFormat(str, enum.Enum): + visualization = "Visualization" + data = "Raw data" + + +report_schedule_user = Table( + "report_schedule_user", + metadata, + Column("id", Integer, primary_key=True), + Column("user_id", Integer, ForeignKey("ab_user.id")), + Column("report_schedule_id", Integer, ForeignKey("report_schedule.id")), + UniqueConstraint("user_id", "report_schedule_id"), +) + + +class ReportSchedule(Model): + + """ + Report Schedules, supports alerts and reports + """ + + __tablename__ = "report_schedule" + id = Column(Integer, primary_key=True) + type = Column(Enum(ReportScheduleType, name="report_schedule_type"), nullable=False) + label = Column(String(150), nullable=False) + active = Column(Boolean, default=True, index=True) + crontab = Column(String(50), nullable=False) + sql = Column(Text()) + # (Reports) M-O to chart + chart_id = Column(Integer, ForeignKey("slice.id"), nullable=True) + chart = relationship(Slice, backref="report_schedules", foreign_keys=[chart_id]) + # (Reports) M-O to dashboard + dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=True) + dashboard = relationship( + Dashboard, backref="report_schedules", foreign_keys=[dashboard_id] + ) + # (Alerts) M-O to database + database_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) + database = relationship(Database, foreign_keys=[dashboard_id]) + owners = relationship(security_manager.user_model, secondary=report_schedule_user) + + # (Reports) email format + email_format = Column(Enum(ReportEmailFormat, name="report_email_format")) + + # (Alerts) Stamped last observations + last_eval_dttm = Column(DateTime, nullable=True) + last_state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) + last_value = Column(Float, nullable=True) + last_value_row_json = Column(Text, nullable=True) + + # (Alerts) Observed value validation related columns + validator_type = Column(String(100), nullable=True) + validator_config_json = Column(Text, default="{}") + + # Log retention + log_retention = Column(Integer, default=90) + grace_period = Column(Integer, default=60 * 60 * 4) + + +class ReportRecipients(Model): + + """ + Report Recipients, meant to support multiple notification types, eg: Slack, email + """ + + __tablename__ = "report_recipient" + id = Column(Integer, primary_key=True) + type = Column( + Enum(ReportRecipientType, name="report_recipient_type"), nullable=False + ) + recipient_config_json = Column(Text, default="{}") + report_schedule_id = Column( + Integer, ForeignKey("report_schedule.id"), nullable=True + ) + report_schedule = relationship( + ReportSchedule, backref="recipients", foreign_keys=[report_schedule_id] + ) + + +class ReportExecutionLog(Model): + + """ + Report Execution Log, hold the result of the report execution with timestamps, + last observation and possible error messages + """ + + __tablename__ = "report_execution_log" + id = Column(Integer, primary_key=True) + + # Timestamps + start_dttm = Column(DateTime, nullable=True) + end_dttm = Column(DateTime, nullable=True) + + # (Alerts) Observed values + observation_dttm = Column(DateTime, nullable=True) + value = Column(Float, nullable=True) + value_row_json = Column(Text, nullable=True) + + state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) + error_message = Column(Text, nullable=True) + + report_schedule_id = Column( + Integer, ForeignKey("report_schedule.id"), nullable=True + ) + report_schedule = relationship( + ReportSchedule, backref="logs", foreign_keys=[report_schedule_id] + ) From 313a3668ee5e2451b3f670498ce4a0f9d7c69c5f Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 15:09:31 +0000 Subject: [PATCH 08/23] lint and unique constraint --- .../49b5a32daba5_add_report_schedules.py | 31 ++++++++++--------- superset/models/reports.py | 9 ++++-- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index a5a48172731d0..7a1b1cf90f2a3 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -23,11 +23,11 @@ """ # revision identifiers, used by Alembic. -revision = '49b5a32daba5' -down_revision = '96e99fb176a0' +revision = "49b5a32daba5" +down_revision = "96e99fb176a0" -from alembic import op import sqlalchemy as sa +from alembic import op def upgrade(): @@ -68,7 +68,12 @@ def upgrade(): sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), sa.PrimaryKeyConstraint("id"), ) - op.create_index(op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False) + op.create_unique_constraint( + "uq_report_schedule_label", "report_schedule", ["label"] + ) + op.create_index( + op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False + ) op.create_table( "report_execution_log", @@ -79,9 +84,7 @@ def upgrade(): sa.Column("value", sa.Float(), nullable=True), sa.Column("value_row_json", sa.Text(), nullable=True), sa.Column( - "state", - sa.Enum("success", "error", name="reportlogstate"), - nullable=True, + "state", sa.Enum("success", "error", name="reportlogstate"), nullable=True, ), sa.Column("error_message", sa.Text(), nullable=True), sa.Column("report_schedule_id", sa.Integer(), nullable=True), @@ -92,7 +95,6 @@ def upgrade(): op.create_table( "report_recipient", sa.Column("id", sa.Integer(), nullable=False), - sa.Column( "type", sa.Enum("email", "slack", name="reportrecipienttype"), @@ -117,15 +119,14 @@ def upgrade(): def downgrade(): - op.drop_index( - op.f("ix_report_schedule_active"), table_name="report_schedule" - ) + op.drop_index(op.f("ix_report_schedule_active"), table_name="report_schedule") + op.drop_constraint("uq_report_schedule_label", "report_schedule", type_="unique") op.drop_table("report_execution_log") op.drop_table("report_recipient") op.drop_table("report_schedule_user") op.drop_table("report_schedule") # https://github.com/miguelgrinberg/Flask-Migrate/issues/48 - sa.Enum(name='reportscheduletype').drop(op.get_bind(), checkfirst=False) - sa.Enum(name='reportemailformat').drop(op.get_bind(), checkfirst=False) - sa.Enum(name='reportrecipienttype').drop(op.get_bind(), checkfirst=False) - sa.Enum(name='reportlogstate').drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportscheduletype").drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportemailformat").drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportrecipienttype").drop(op.get_bind(), checkfirst=False) + sa.Enum(name="reportlogstate").drop(op.get_bind(), checkfirst=False) diff --git a/superset/models/reports.py b/superset/models/reports.py index 49ab9763b4ba6..e3ed6a4935a3c 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -81,7 +81,7 @@ class ReportSchedule(Model): __tablename__ = "report_schedule" id = Column(Integer, primary_key=True) type = Column(Enum(ReportScheduleType, name="report_schedule_type"), nullable=False) - label = Column(String(150), nullable=False) + label = Column(String(150), nullable=False, unique=True) active = Column(Boolean, default=True, index=True) crontab = Column(String(50), nullable=False) sql = Column(Text()) @@ -115,8 +115,11 @@ class ReportSchedule(Model): log_retention = Column(Integer, default=90) grace_period = Column(Integer, default=60 * 60 * 4) + def __repr__(self) -> str: + return str(self.label) -class ReportRecipients(Model): + +class ReportRecipients(Model): # pylint: disable=too-few-public-methods """ Report Recipients, meant to support multiple notification types, eg: Slack, email @@ -136,7 +139,7 @@ class ReportRecipients(Model): ) -class ReportExecutionLog(Model): +class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods """ Report Execution Log, hold the result of the report execution with timestamps, From 0c53b26edc7391138ca77ca74536e079d2480932 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 15:44:39 +0000 Subject: [PATCH 09/23] support sqlite --- .../49b5a32daba5_add_report_schedules.py | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index 7a1b1cf90f2a3..e08f1bea69455 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -28,6 +28,7 @@ import sqlalchemy as sa from alembic import op +from sqlalchemy.exc import OperationalError def upgrade(): @@ -68,9 +69,14 @@ def upgrade(): sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), sa.PrimaryKeyConstraint("id"), ) - op.create_unique_constraint( - "uq_report_schedule_label", "report_schedule", ["label"] - ) + try: + op.create_unique_constraint( + "uq_report_schedule_label", "report_schedule", ["label"] + ) + except OperationalError: + # Expected to fail on SQLite + pass + op.create_index( op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False ) @@ -120,7 +126,14 @@ def upgrade(): def downgrade(): op.drop_index(op.f("ix_report_schedule_active"), table_name="report_schedule") - op.drop_constraint("uq_report_schedule_label", "report_schedule", type_="unique") + try: + op.drop_constraint( + "uq_report_schedule_label", "report_schedule", type_="unique" + ) + except OperationalError: + # Expected to fail on SQLite + pass + op.drop_table("report_execution_log") op.drop_table("report_recipient") op.drop_table("report_schedule_user") From eaf79cb94d36f96a76a3b449aa4fe7cc336e2207 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 16:06:56 +0000 Subject: [PATCH 10/23] fix sqlite --- .../migrations/versions/49b5a32daba5_add_report_schedules.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index e08f1bea69455..8245d5ff54776 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -73,10 +73,9 @@ def upgrade(): op.create_unique_constraint( "uq_report_schedule_label", "report_schedule", ["label"] ) - except OperationalError: + except Exception: # Expected to fail on SQLite pass - op.create_index( op.f("ix_report_schedule_active"), "report_schedule", ["active"], unique=False ) @@ -130,7 +129,7 @@ def downgrade(): op.drop_constraint( "uq_report_schedule_label", "report_schedule", type_="unique" ) - except OperationalError: + except Exception: # Expected to fail on SQLite pass From ce1a227dc27b3f1d472722fc1847b1e2a02e1e16 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 4 Nov 2020 18:37:57 +0000 Subject: [PATCH 11/23] add audit mixin and minor fixes --- .../49b5a32daba5_add_report_schedules.py | 79 ++++++++----------- superset/models/reports.py | 67 ++++++++-------- 2 files changed, 70 insertions(+), 76 deletions(-) diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index 8245d5ff54776..f32700014539f 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -35,38 +35,33 @@ def upgrade(): op.create_table( "report_schedule", sa.Column("id", sa.Integer(), nullable=False), - sa.Column( - "type", - sa.Enum("type", "report_schedule", name="reportscheduletype"), - nullable=False, - ), - sa.Column("label", sa.String(length=150), nullable=False), - sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("type", sa.String(length=50), nullable=False), + sa.Column("label", sa.String(length=150), nullable=False, unique=True), + sa.Column("active", sa.Boolean(), default=True, nullable=True), sa.Column("crontab", sa.String(length=50), nullable=False), sa.Column("sql", sa.Text(), nullable=True), sa.Column("chart_id", sa.Integer(), nullable=True), sa.Column("dashboard_id", sa.Integer(), nullable=True), sa.Column("database_id", sa.Integer(), nullable=True), - sa.Column( - "email_format", - sa.Enum("visualization", "data", name="reportemailformat"), - nullable=True, - ), + sa.Column("email_format", sa.String(length=50), nullable=True), sa.Column("last_eval_dttm", sa.DateTime(), nullable=True), - sa.Column( - "last_state", - sa.Enum("success", "error", name="reportlogstate"), - nullable=True, - ), + sa.Column("last_state", sa.String(length=50), nullable=True), sa.Column("last_value", sa.Float(), nullable=True), sa.Column("last_value_row_json", sa.Text(), nullable=True), sa.Column("validator_type", sa.String(length=100), nullable=True), - sa.Column("validator_config_json", sa.Text(), nullable=True), - sa.Column("log_retention", sa.Integer(), nullable=False, default=90), - sa.Column("grace_period", sa.Integer(), nullable=False, default=60 * 60 * 4), - sa.ForeignKeyConstraint(["chart_id"], ["slices.id"],), - sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"],), - sa.ForeignKeyConstraint(["database_id"], ["dbs.id"],), + sa.Column("validator_config_json", sa.Text(), default="{}", nullable=True), + sa.Column("log_retention", sa.Integer(), nullable=True, default=90), + sa.Column("grace_period", sa.Integer(), nullable=True, default=60 * 60 * 4), + # Audit Mixin + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["chart_id"], ["slices.id"]), + sa.ForeignKeyConstraint(["dashboard_id"], ["dashboards.id"]), + sa.ForeignKeyConstraint(["database_id"], ["dbs.id"]), + sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]), + sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]), sa.PrimaryKeyConstraint("id"), ) try: @@ -83,40 +78,41 @@ def upgrade(): op.create_table( "report_execution_log", sa.Column("id", sa.Integer(), nullable=False), + sa.Column("scheduled_dttm", sa.DateTime(), nullable=False), sa.Column("start_dttm", sa.DateTime(), nullable=True), sa.Column("end_dttm", sa.DateTime(), nullable=True), sa.Column("observation_dttm", sa.DateTime(), nullable=True), sa.Column("value", sa.Float(), nullable=True), sa.Column("value_row_json", sa.Text(), nullable=True), - sa.Column( - "state", sa.Enum("success", "error", name="reportlogstate"), nullable=True, - ), + sa.Column("state", sa.String(length=50), nullable=False), sa.Column("error_message", sa.Text(), nullable=True), - sa.Column("report_schedule_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.Column("report_schedule_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"]), sa.PrimaryKeyConstraint("id"), ) op.create_table( "report_recipient", sa.Column("id", sa.Integer(), nullable=False), - sa.Column( - "type", - sa.Enum("email", "slack", name="reportrecipienttype"), - nullable=True, - ), - sa.Column("value_row_json", sa.Text(), nullable=True), - sa.Column("recipient_config_json", sa.Text(), nullable=True), - sa.Column("report_schedule_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), + sa.Column("type", sa.String(length=50), nullable=False), + sa.Column("recipient_config_json", sa.Text(), default="{}", nullable=True), + sa.Column("report_schedule_id", sa.Integer(), nullable=False), + # Audit Mixin + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"]), + sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]), + sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]), sa.PrimaryKeyConstraint("id"), ) op.create_table( "report_schedule_user", sa.Column("id", sa.Integer(), nullable=False), - sa.Column("user_id", sa.Integer(), nullable=True), - sa.Column("report_schedule_id", sa.Integer(), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("report_schedule_id", sa.Integer(), nullable=False), sa.ForeignKeyConstraint(["report_schedule_id"], ["report_schedule.id"],), sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"],), sa.PrimaryKeyConstraint("id"), @@ -137,8 +133,3 @@ def downgrade(): op.drop_table("report_recipient") op.drop_table("report_schedule_user") op.drop_table("report_schedule") - # https://github.com/miguelgrinberg/Flask-Migrate/issues/48 - sa.Enum(name="reportscheduletype").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="reportemailformat").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="reportrecipienttype").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="reportlogstate").drop(op.get_bind(), checkfirst=False) diff --git a/superset/models/reports.py b/superset/models/reports.py index e3ed6a4935a3c..5cb1d08129f91 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -23,7 +23,6 @@ Boolean, Column, DateTime, - Enum, Float, ForeignKey, Integer, @@ -37,42 +36,45 @@ from superset.extensions import security_manager from superset.models.core import Database from superset.models.dashboard import Dashboard +from superset.models.helpers import AuditMixinNullable from superset.models.slice import Slice metadata = Model.metadata # pylint: disable=no-member class ReportScheduleType(str, enum.Enum): - alert = "alert" - report = "report" + ALERT = "Alert" + REPORT = "Report" class ReportRecipientType(str, enum.Enum): - email = "email" - slack = "slack" + EMAIL = "Email" + SLACK = "Slack" class ReportLogState(str, enum.Enum): - success = "success" - error = "error" + SUCCESS = "Success" + ERROR = "Error" class ReportEmailFormat(str, enum.Enum): - visualization = "Visualization" - data = "Raw data" + VISUALIZATION = "Visualization" + DATA = "Raw data" report_schedule_user = Table( "report_schedule_user", metadata, Column("id", Integer, primary_key=True), - Column("user_id", Integer, ForeignKey("ab_user.id")), - Column("report_schedule_id", Integer, ForeignKey("report_schedule.id")), + Column("user_id", Integer, ForeignKey("ab_user.id"), nullable=False), + Column( + "report_schedule_id", Integer, ForeignKey("report_schedule.id"), nullable=False + ), UniqueConstraint("user_id", "report_schedule_id"), ) -class ReportSchedule(Model): +class ReportSchedule(Model, AuditMixinNullable): """ Report Schedules, supports alerts and reports @@ -80,7 +82,7 @@ class ReportSchedule(Model): __tablename__ = "report_schedule" id = Column(Integer, primary_key=True) - type = Column(Enum(ReportScheduleType, name="report_schedule_type"), nullable=False) + type = Column(String(50), nullable=False) label = Column(String(150), nullable=False, unique=True) active = Column(Boolean, default=True, index=True) crontab = Column(String(50), nullable=False) @@ -99,16 +101,16 @@ class ReportSchedule(Model): owners = relationship(security_manager.user_model, secondary=report_schedule_user) # (Reports) email format - email_format = Column(Enum(ReportEmailFormat, name="report_email_format")) + email_format = Column(String(50)) # (Alerts) Stamped last observations - last_eval_dttm = Column(DateTime, nullable=True) - last_state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) - last_value = Column(Float, nullable=True) - last_value_row_json = Column(Text, nullable=True) + last_eval_dttm = Column(DateTime) + last_state = Column(String(50)) + last_value = Column(Float) + last_value_row_json = Column(Text) # (Alerts) Observed value validation related columns - validator_type = Column(String(100), nullable=True) + validator_type = Column(String(100)) validator_config_json = Column(Text, default="{}") # Log retention @@ -119,7 +121,9 @@ def __repr__(self) -> str: return str(self.label) -class ReportRecipients(Model): # pylint: disable=too-few-public-methods +class ReportRecipients( + Model, AuditMixinNullable +): # pylint: disable=too-few-public-methods """ Report Recipients, meant to support multiple notification types, eg: Slack, email @@ -127,12 +131,10 @@ class ReportRecipients(Model): # pylint: disable=too-few-public-methods __tablename__ = "report_recipient" id = Column(Integer, primary_key=True) - type = Column( - Enum(ReportRecipientType, name="report_recipient_type"), nullable=False - ) + type = Column(String(50), nullable=False) recipient_config_json = Column(Text, default="{}") report_schedule_id = Column( - Integer, ForeignKey("report_schedule.id"), nullable=True + Integer, ForeignKey("report_schedule.id"), nullable=False ) report_schedule = relationship( ReportSchedule, backref="recipients", foreign_keys=[report_schedule_id] @@ -150,19 +152,20 @@ class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods id = Column(Integer, primary_key=True) # Timestamps - start_dttm = Column(DateTime, nullable=True) - end_dttm = Column(DateTime, nullable=True) + scheduled_dttm = Column(DateTime, nullable=False) + start_dttm = Column(DateTime) + end_dttm = Column(DateTime) # (Alerts) Observed values - observation_dttm = Column(DateTime, nullable=True) - value = Column(Float, nullable=True) - value_row_json = Column(Text, nullable=True) + observation_dttm = Column(DateTime) + value = Column(Float) + value_row_json = Column(Text) - state = Column(Enum(ReportLogState, name="report_log_state"), nullable=False) - error_message = Column(Text, nullable=True) + state = Column(String(50), nullable=False) + error_message = Column(Text) report_schedule_id = Column( - Integer, ForeignKey("report_schedule.id"), nullable=True + Integer, ForeignKey("report_schedule.id"), nullable=False ) report_schedule = relationship( ReportSchedule, backref="logs", foreign_keys=[report_schedule_id] From 6e75d1c5cb419de23a38659208d0330e8b9f314e Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Thu, 5 Nov 2020 18:09:05 +0000 Subject: [PATCH 12/23] feat(api): alerts and reports REST API --- superset/app.py | 4 + .../49b5a32daba5_add_report_schedules.py | 1 - superset/models/reports.py | 13 +- superset/reports/__init__.py | 16 + superset/reports/api.py | 337 ++++++++++++++++++ superset/reports/commands/__init__.py | 16 + superset/reports/commands/bulk_delete.py | 55 +++ superset/reports/commands/create.py | 123 +++++++ superset/reports/commands/delete.py | 56 +++ superset/reports/commands/exceptions.py | 93 +++++ superset/reports/commands/update.py | 81 +++++ superset/reports/dao.py | 64 ++++ superset/reports/logs/__init__.py | 16 + superset/reports/logs/api.py | 195 ++++++++++ superset/reports/logs/schemas.py | 35 ++ superset/reports/schemas.py | 164 +++++++++ 16 files changed, 1264 insertions(+), 5 deletions(-) create mode 100644 superset/reports/__init__.py create mode 100644 superset/reports/api.py create mode 100644 superset/reports/commands/__init__.py create mode 100644 superset/reports/commands/bulk_delete.py create mode 100644 superset/reports/commands/create.py create mode 100644 superset/reports/commands/delete.py create mode 100644 superset/reports/commands/exceptions.py create mode 100644 superset/reports/commands/update.py create mode 100644 superset/reports/dao.py create mode 100644 superset/reports/logs/__init__.py create mode 100644 superset/reports/logs/api.py create mode 100644 superset/reports/logs/schemas.py create mode 100644 superset/reports/schemas.py diff --git a/superset/app.py b/superset/app.py index 427ac11de721e..ba4c5204b98d0 100644 --- a/superset/app.py +++ b/superset/app.py @@ -148,6 +148,8 @@ def init_views(self) -> None: from superset.datasets.api import DatasetRestApi from superset.queries.api import QueryRestApi from superset.queries.saved_queries.api import SavedQueryRestApi + from superset.reports.api import ReportScheduleRestApi + from superset.reports.logs.api import ReportExecutionLogRestApi from superset.views.access_requests import AccessRequestsModelView from superset.views.alerts import ( AlertLogModelView, @@ -206,6 +208,8 @@ def init_views(self) -> None: appbuilder.add_api(DatasetRestApi) appbuilder.add_api(QueryRestApi) appbuilder.add_api(SavedQueryRestApi) + appbuilder.add_api(ReportScheduleRestApi) + appbuilder.add_api(ReportExecutionLogRestApi) # # Setup regular views # diff --git a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py index f32700014539f..34b2e56d4cbe9 100644 --- a/superset/migrations/versions/49b5a32daba5_add_report_schedules.py +++ b/superset/migrations/versions/49b5a32daba5_add_report_schedules.py @@ -81,7 +81,6 @@ def upgrade(): sa.Column("scheduled_dttm", sa.DateTime(), nullable=False), sa.Column("start_dttm", sa.DateTime(), nullable=True), sa.Column("end_dttm", sa.DateTime(), nullable=True), - sa.Column("observation_dttm", sa.DateTime(), nullable=True), sa.Column("value", sa.Float(), nullable=True), sa.Column("value_row_json", sa.Text(), nullable=True), sa.Column("state", sa.String(length=50), nullable=False), diff --git a/superset/models/reports.py b/superset/models/reports.py index 5cb1d08129f91..4794fdf4089e6 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -44,7 +44,13 @@ class ReportScheduleType(str, enum.Enum): ALERT = "Alert" - REPORT = "Report" + REPORT_DASHBOARD = "ReportDashboard" + REPORT_CHART = "ReportChart" + + +class ReportScheduleValidatorType(str, enum.Enum): + NOT_NULL = "not null" + OPERATOR = "operator" class ReportRecipientType(str, enum.Enum): @@ -88,7 +94,7 @@ class ReportSchedule(Model, AuditMixinNullable): crontab = Column(String(50), nullable=False) sql = Column(Text()) # (Reports) M-O to chart - chart_id = Column(Integer, ForeignKey("slice.id"), nullable=True) + chart_id = Column(Integer, ForeignKey("slices.id"), nullable=True) chart = relationship(Slice, backref="report_schedules", foreign_keys=[chart_id]) # (Reports) M-O to dashboard dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=True) @@ -97,7 +103,7 @@ class ReportSchedule(Model, AuditMixinNullable): ) # (Alerts) M-O to database database_id = Column(Integer, ForeignKey("dbs.id"), nullable=True) - database = relationship(Database, foreign_keys=[dashboard_id]) + database = relationship(Database, foreign_keys=[database_id]) owners = relationship(security_manager.user_model, secondary=report_schedule_user) # (Reports) email format @@ -157,7 +163,6 @@ class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods end_dttm = Column(DateTime) # (Alerts) Observed values - observation_dttm = Column(DateTime) value = Column(Float) value_row_json = Column(Text) diff --git a/superset/reports/__init__.py b/superset/reports/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/reports/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/reports/api.py b/superset/reports/api.py new file mode 100644 index 0000000000000..5c2423fb1e226 --- /dev/null +++ b/superset/reports/api.py @@ -0,0 +1,337 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any + +from flask import g, request, Response +from flask_appbuilder.api import expose, permission_name, protect, rison, safe +from flask_appbuilder.models.sqla.interface import SQLAInterface +from flask_babel import ngettext +from marshmallow import ValidationError + +from superset.constants import RouteMethod +from superset.models.reports import ReportSchedule +from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand +from superset.reports.commands.create import CreateReportScheduleCommand +from superset.reports.commands.delete import DeleteReportScheduleCommand +from superset.reports.commands.exceptions import ( + ReportScheduleBulkDeleteFailedError, + ReportScheduleBulkDeleteIntegrityError, + ReportScheduleCreateFailedError, + ReportScheduleDeleteFailedError, + ReportScheduleDeleteIntegrityError, + ReportScheduleInvalidError, + ReportScheduleNotFoundError, + ReportScheduleUpdateFailedError, +) +from superset.reports.commands.update import UpdateReportScheduleCommand +from superset.reports.schemas import ( + get_delete_ids_schema, + openapi_spec_methods_override, + ReportSchedulePostSchema, + ReportSchedulePutSchema, +) +from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics + +logger = logging.getLogger(__name__) + + +class ReportScheduleRestApi(BaseSupersetModelRestApi): + datamodel = SQLAInterface(ReportSchedule) + + include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { + RouteMethod.RELATED, + "bulk_delete", # not using RouteMethod since locally defined + } + class_permission_name = "ReportSchedule" + resource_name = "report" + allow_browser_login = True + + show_columns = [ + "id", + "label", + "recipients.id", + "recipients.type", + "recipients.recipient_config_json", + ] + list_columns = [ + "active", + "changed_by.first_name", + "changed_by.last_name", + "changed_on", + "changed_on_delta_humanized", + "created_by.first_name", + "created_by.last_name", + "created_on", + "id", + "label", + "type", + ] + add_columns = ReportSchedulePostSchema._declared_fields.keys() + edit_columns = ReportSchedulePutSchema._declared_fields.keys() + add_model_schema = ReportSchedulePostSchema() + edit_model_schema = ReportSchedulePutSchema() + + order_columns = [ + "active", + "created_by.first_name", + "changed_by.first_name", + "changed_on", + "changed_on_delta_humanized", + "created_on", + "label", + "type", + ] + + allowed_rel_fields = {"created_by"} + + apispec_parameter_schemas = { + "get_delete_ids_schema": get_delete_ids_schema, + } + openapi_spec_tag = "Report Schedules" + openapi_spec_methods = openapi_spec_methods_override + + @expose("/", methods=["DELETE"]) + @protect() + @safe + @statsd_metrics + @permission_name("delete") + def delete(self, pk: int) -> Response: + """Delete a Report Schedule + --- + delete: + description: >- + Delete a Report Schedule + parameters: + - in: path + schema: + type: integer + name: pk + description: The report schedule pk + responses: + 200: + description: Item deleted + content: + application/json: + schema: + type: object + properties: + message: + type: string + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + try: + DeleteReportScheduleCommand(g.user, pk).run() + return self.response(200, message="OK") + except ReportScheduleNotFoundError as ex: + return self.response_404() + except ReportScheduleDeleteIntegrityError as ex: + return self.response_422(message=str(ex)) + except ReportScheduleDeleteFailedError as ex: + logger.error( + "Error deleting report schedule %s: %s", + self.__class__.__name__, + str(ex), + ) + return self.response_422(message=str(ex)) + + @expose("/", methods=["POST"]) + @protect() + @safe + @statsd_metrics + @permission_name("post") + def post(self) -> Response: + """Creates a new Report Schedule + --- + post: + description: >- + Create a new Report Schedule + requestBody: + description: Report Schedule schema + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/{{self.__class__.__name__}}.post' + responses: + 201: + description: Report schedule added + content: + application/json: + schema: + type: object + properties: + id: + type: number + result: + $ref: '#/components/schemas/{{self.__class__.__name__}}.post' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + if not request.is_json: + return self.response_400(message="Request is not JSON") + try: + item = self.add_model_schema.load(request.json) + # This validates custom Schema with custom validations + except ValidationError as error: + return self.response_400(message=error.messages) + try: + new_model = CreateReportScheduleCommand(g.user, item).run() + return self.response(201, id=new_model.id, result=item) + except ReportScheduleNotFoundError as ex: + return self.response_400(message=str(ex)) + except ReportScheduleInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except ReportScheduleCreateFailedError as ex: + logger.error( + "Error creating report schedule %s: %s", + self.__class__.__name__, + str(ex), + ) + return self.response_422(message=str(ex)) + + @expose("/", methods=["PUT"]) + @protect() + @safe + @statsd_metrics + @permission_name("put") + def put(self, pk: int) -> Response: + """Updates an Report Schedule + --- + put: + description: >- + Updates a Report Schedule + parameters: + - in: path + schema: + type: integer + name: pk + description: The Report Schedule pk + requestBody: + description: Report Schedule schema + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/{{self.__class__.__name__}}.put' + responses: + 200: + description: Report Schedule changed + content: + application/json: + schema: + type: object + properties: + id: + type: number + result: + $ref: '#/components/schemas/{{self.__class__.__name__}}.put' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + if not request.is_json: + return self.response_400(message="Request is not JSON") + try: + item = self.edit_model_schema.load(request.json) + item["layer"] = pk + # This validates custom Schema with custom validations + except ValidationError as error: + return self.response_400(message=error.messages) + try: + new_model = UpdateReportScheduleCommand(g.user, pk, item).run() + return self.response(200, id=new_model.id, result=item) + except (ReportScheduleNotFoundError) as ex: + return self.response_404() + except ReportScheduleInvalidError as ex: + return self.response_422(message=ex.normalized_messages()) + except ReportScheduleUpdateFailedError as ex: + logger.error( + "Error updating report %s: %s", self.__class__.__name__, str(ex) + ) + return self.response_422(message=str(ex)) + + @expose("/", methods=["DELETE"]) + @protect() + @safe + @statsd_metrics + @rison(get_delete_ids_schema) + def bulk_delete(self, **kwargs: Any) -> Response: + """Delete bulk Report Schedule layers + --- + delete: + description: >- + Deletes multiple report schedules in a bulk operation. + parameters: + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/get_delete_ids_schema' + responses: + 200: + description: Report Schedule bulk delete + content: + application/json: + schema: + type: object + properties: + message: + type: string + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + item_ids = kwargs["rison"] + try: + BulkDeleteReportScheduleCommand(g.user, item_ids).run() + return self.response( + 200, + message=ngettext( + "Deleted %(num)d report schedule", + "Deleted %(num)d report schedules", + num=len(item_ids), + ), + ) + except ReportScheduleNotFoundError: + return self.response_404() + except ReportScheduleBulkDeleteIntegrityError as ex: + return self.response_422(message=str(ex)) + except ReportScheduleBulkDeleteFailedError as ex: + return self.response_422(message=str(ex)) diff --git a/superset/reports/commands/__init__.py b/superset/reports/commands/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/reports/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/reports/commands/bulk_delete.py b/superset/reports/commands/bulk_delete.py new file mode 100644 index 0000000000000..b93b300e021d3 --- /dev/null +++ b/superset/reports/commands/bulk_delete.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import List, Optional + +from flask_appbuilder.security.sqla.models import User + +from superset.commands.base import BaseCommand +from superset.dao.exceptions import DAODeleteFailedError +from superset.models.reports import ReportSchedule +from superset.reports.commands.exceptions import ( + ReportScheduleBulkDeleteFailedError, + ReportScheduleBulkDeleteIntegrityError, + ReportScheduleNotFoundError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class BulkDeleteReportScheduleCommand(BaseCommand): + def __init__(self, user: User, model_ids: List[int]): + self._actor = user + self._model_ids = model_ids + self._models: Optional[List[ReportSchedule]] = None + + def run(self) -> None: + self.validate() + try: + ReportScheduleDAO.bulk_delete(self._models) + return None + except DAODeleteFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleBulkDeleteFailedError() + + def validate(self) -> None: + # Validate/populate model exists + self._models = ReportScheduleDAO.find_by_ids(self._model_ids) + if not self._models or len(self._models) != len(self._model_ids): + raise ReportScheduleNotFoundError() + # TODO handle recipients and logs diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py new file mode 100644 index 0000000000000..a15f558f9bd53 --- /dev/null +++ b/superset/reports/commands/create.py @@ -0,0 +1,123 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +import logging +from typing import Any, Callable, cast, Dict, List, Optional, Type + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User +from marshmallow import ValidationError + +from superset.charts.dao import ChartDAO +from superset.commands.base import BaseCommand +from superset.commands.utils import populate_owners +from superset.dao.exceptions import DAOCreateFailedError +from superset.dashboards.dao import DashboardDAO +from superset.databases.dao import DatabaseDAO +from superset.models.reports import ReportScheduleType +from superset.reports.commands.exceptions import ( + ChartNotFoundValidationError, + DashboardNotFoundValidationError, + DatabaseNotFoundValidationError, + ReportScheduleCreateFailedError, + ReportScheduleInvalidError, + ReportScheduleLabelUniquenessValidationError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class CreateReportScheduleCommand(BaseCommand): + def __init__(self, user: User, data: Dict[str, Any]): + self._actor = user + self._properties = data.copy() + + self._validate_by_type = { + ReportScheduleType.ALERT: { + "field": "database", + "find_by_id": DatabaseDAO.find_by_id, + "exception": DatabaseNotFoundValidationError, + }, + ReportScheduleType.REPORT_DASHBOARD: { + "field": "dashboard", + "find_by_id": DashboardDAO.find_by_id, + "exception": DashboardNotFoundValidationError, + }, + ReportScheduleType.REPORT_CHART: { + "field": "chart", + "find_by_id": ChartDAO.find_by_id, + "exception": ChartNotFoundValidationError, + }, + } + + def run(self) -> Model: + self.validate() + try: + report_schedule = ReportScheduleDAO.create(self._properties) + except DAOCreateFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleCreateFailedError() + return report_schedule + + def validate(self) -> None: + exceptions: List[ValidationError] = list() + owner_ids: Optional[List[int]] = self._properties.get("owners") + label = self._properties.get("label", "") + report_type = self._properties.get("type", ReportScheduleType.ALERT) + + # Validate label uniqueness + if not ReportScheduleDAO.validate_update_uniqueness(label): + exceptions.append(ReportScheduleLabelUniquenessValidationError()) + + # Generic validation by report schedule type + type_field = cast(str, self._validate_by_type[report_type]["field"]) + type_find_by_id = cast( + Callable[[Optional[int]], Optional[Model]], + self._validate_by_type[report_type]["find_by_id"], + ) + type_exception = cast( + Type[Exception], self._validate_by_type[report_type]["exception"] + ) + type_related_id: Optional[int] = self._properties.get(type_field) + if not type_related_id: + exceptions.append(type_exception()) + type_related_obj = type_find_by_id(type_related_id) + if not type_related_obj: + exceptions.append(type_exception()) + self._properties[type_field] = type_related_obj + # Remove existing related fields that don't belong to this report type + # ex: If it's an Alert remove chart and dashboard keys + for type_key in set(self._validate_by_type.keys()) - {report_type}: + self._properties.pop( + cast(str, self._validate_by_type[type_key]["field"]), None + ) + + # Convert validator config dict into string + self._properties["validator_config_json"] = json.dumps( + self._properties.get("validator_config_json", "{}") + ) + + try: + owners = populate_owners(self._actor, owner_ids) + self._properties["owners"] = owners + except ValidationError as ex: + exceptions.append(ex) + if exceptions: + exception = ReportScheduleInvalidError() + exception.add_list(exceptions) + raise exception diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py new file mode 100644 index 0000000000000..fe2b66703510a --- /dev/null +++ b/superset/reports/commands/delete.py @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Optional + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User + +from superset.commands.base import BaseCommand +from superset.dao.exceptions import DAODeleteFailedError +from superset.models.reports import ReportSchedule +from superset.reports.commands.exceptions import ( + ReportScheduleDeleteFailedError, + ReportScheduleDeleteIntegrityError, + ReportScheduleNotFoundError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class DeleteReportScheduleCommand(BaseCommand): + def __init__(self, user: User, model_id: int): + self._actor = user + self._model_id = model_id + self._model: Optional[ReportSchedule] = None + + def run(self) -> Model: + self.validate() + try: + report_schedule = ReportScheduleDAO.delete(self._model) + except DAODeleteFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleDeleteFailedError() + return report_schedule + + def validate(self) -> None: + # Validate/populate model exists + self._model = ReportScheduleDAO.find_by_id(self._model_id) + if not self._model: + raise ReportScheduleNotFoundError() + # TODO check integrity diff --git a/superset/reports/commands/exceptions.py b/superset/reports/commands/exceptions.py new file mode 100644 index 0000000000000..15cefd7b86485 --- /dev/null +++ b/superset/reports/commands/exceptions.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from flask_babel import lazy_gettext as _ + +from superset.commands.exceptions import ( + CommandException, + CommandInvalidError, + CreateFailedError, + DeleteFailedError, + ValidationError, +) + + +class DatabaseNotFoundValidationError(ValidationError): + """ + Marshmallow validation error for database does not exist + """ + + def __init__(self) -> None: + super().__init__(_("Database does not exist"), field_name="database") + + +class DashboardNotFoundValidationError(ValidationError): + """ + Marshmallow validation error for dashboard does not exist + """ + + def __init__(self) -> None: + super().__init__(_("Dashboard does not exist"), field_name="dashboard") + + +class ChartNotFoundValidationError(ValidationError): + """ + Marshmallow validation error for chart does not exist + """ + + def __init__(self) -> None: + super().__init__(_("Chart does not exist"), field_name="chart") + + +class ReportScheduleInvalidError(CommandInvalidError): + message = _("Report Schedule parameters are invalid.") + + +class ReportScheduleBulkDeleteFailedError(DeleteFailedError): + message = _("Report Schedule could not be deleted.") + + +class ReportScheduleCreateFailedError(CreateFailedError): + message = _("Report Schedule could not be created.") + + +class ReportScheduleUpdateFailedError(CreateFailedError): + message = _("Report Schedule could not be updated.") + + +class ReportScheduleNotFoundError(CommandException): + message = _("Report Schedule not found.") + + +class ReportScheduleDeleteFailedError(CommandException): + message = _("Report Schedule delete failed.") + + +class ReportScheduleDeleteIntegrityError(CommandException): + message = _("Report Schedule has associated logs or recipients.") + + +class ReportScheduleBulkDeleteIntegrityError(CommandException): + message = _("Report Schedule has associated logs or recipients.") + + +class ReportScheduleLabelUniquenessValidationError(ValidationError): + """ + Marshmallow validation error for Report Schedule label already exists + """ + + def __init__(self) -> None: + super().__init__([_("Label must be unique")], field_name="label") diff --git a/superset/reports/commands/update.py b/superset/reports/commands/update.py new file mode 100644 index 0000000000000..331399283692b --- /dev/null +++ b/superset/reports/commands/update.py @@ -0,0 +1,81 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict, List, Optional + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User +from marshmallow import ValidationError + +from superset.commands.base import BaseCommand +from superset.commands.utils import populate_owners +from superset.dao.exceptions import DAOUpdateFailedError +from superset.models.reports import ReportSchedule +from superset.reports.commands.exceptions import ( + ReportScheduleInvalidError, + ReportScheduleLabelUniquenessValidationError, + ReportScheduleNotFoundError, + ReportScheduleUpdateFailedError, +) +from superset.reports.dao import ReportScheduleDAO + +logger = logging.getLogger(__name__) + + +class UpdateReportScheduleCommand(BaseCommand): + def __init__(self, user: User, model_id: int, data: Dict[str, Any]): + self._actor = user + self._model_id = model_id + self._properties = data.copy() + self._model: Optional[ReportSchedule] = None + + def run(self) -> Model: + self.validate() + try: + report_schedule = ReportScheduleDAO.update(self._model, self._properties) + except DAOUpdateFailedError as ex: + logger.exception(ex.exception) + raise ReportScheduleUpdateFailedError() + return report_schedule + + def validate(self) -> None: + exceptions: List[ValidationError] = list() + owner_ids: Optional[List[int]] = self._properties.get("owners") + + label = self._properties.get("label", "") + self._model = ReportScheduleDAO.find_by_id(self._model_id) + + if not self._model: + raise ReportScheduleNotFoundError() + + if not ReportScheduleDAO.validate_update_uniqueness( + label, report_schedule_id=self._model_id + ): + exceptions.append(ReportScheduleLabelUniquenessValidationError()) + + # Validate/Populate owner + if owner_ids is None: + owner_ids = [owner.id for owner in self._model.owners] + try: + owners = populate_owners(self._actor, owner_ids) + self._properties["owners"] = owners + except ValidationError as ex: + exceptions.append(ex) + if exceptions: + exception = ReportScheduleInvalidError() + exception.add_list(exceptions) + raise exception diff --git a/superset/reports/dao.py b/superset/reports/dao.py new file mode 100644 index 0000000000000..73692b981e08f --- /dev/null +++ b/superset/reports/dao.py @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import List, Optional, Union + +from sqlalchemy.exc import SQLAlchemyError + +from superset.dao.base import BaseDAO +from superset.dao.exceptions import DAODeleteFailedError +from superset.extensions import db +from superset.models.reports import ReportSchedule + +logger = logging.getLogger(__name__) + + +class ReportScheduleDAO(BaseDAO): + model_cls = ReportSchedule + + @staticmethod + def bulk_delete( + models: Optional[List[ReportSchedule]], commit: bool = True + ) -> None: + item_ids = [model.id for model in models] if models else [] + try: + db.session.query(ReportSchedule).filter( + ReportSchedule.id.in_(item_ids) + ).delete(synchronize_session="fetch") + if commit: + db.session.commit() + except SQLAlchemyError: + if commit: + db.session.rollback() + raise DAODeleteFailedError() + + @staticmethod + def validate_update_uniqueness( + label: str, report_schedule_id: Optional[int] = None + ) -> bool: + """ + Validate if this label is unique. + + :param name: The annotation layer name + :param report_schedule_id: The report schedule current id + (only for validating on updates) + :return: bool + """ + query = db.session.query(ReportSchedule).filter(ReportSchedule.label == label) + if report_schedule_id: + query = query.filter(ReportSchedule.id != report_schedule_id) + return not db.session.query(query.exists()).scalar() diff --git a/superset/reports/logs/__init__.py b/superset/reports/logs/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/superset/reports/logs/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/reports/logs/api.py b/superset/reports/logs/api.py new file mode 100644 index 0000000000000..c7e2e099851d4 --- /dev/null +++ b/superset/reports/logs/api.py @@ -0,0 +1,195 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict + +from flask import Response +from flask_appbuilder.api import expose, permission_name, protect, rison, safe +from flask_appbuilder.api.schemas import get_item_schema, get_list_schema +from flask_appbuilder.models.sqla.interface import SQLAInterface + +from superset.constants import RouteMethod +from superset.models.reports import ReportExecutionLog +from superset.reports.logs.schemas import openapi_spec_methods_override +from superset.views.base_api import BaseSupersetModelRestApi + +logger = logging.getLogger(__name__) + + +class ReportExecutionLogRestApi(BaseSupersetModelRestApi): + datamodel = SQLAInterface(ReportExecutionLog) + + include_route_methods = {RouteMethod.GET, RouteMethod.GET_LIST} + class_permission_name = "ReportSchedule" + resource_name = "report" + allow_browser_login = True + + show_columns = [ + "id", + "scheduled_dttm", + "end_dttm", + "start_dttm", + "value", + "value_row_json", + "state", + "error_message", + ] + list_columns = [ + "id", + "end_dttm", + "start_dttm", + "value", + "value_row_json", + "state", + "error_message", + ] + order_columns = [ + "state" "value", + "error_message", + "end_dttm", + "start_dttm", + ] + openapi_spec_tag = "Report Schedules" + openapi_spec_methods = openapi_spec_methods_override + + @staticmethod + def _apply_layered_relation_to_rison( # pylint: disable=invalid-name + layer_id: int, rison_parameters: Dict[str, Any] + ) -> None: + if "filters" not in rison_parameters: + rison_parameters["filters"] = [] + rison_parameters["filters"].append( + {"col": "report_schedule", "opr": "rel_o_m", "value": layer_id} + ) + + @expose("//log/", methods=["GET"]) + @protect() + @safe + @permission_name("get") + @rison(get_list_schema) + def get_list( # pylint: disable=arguments-differ + self, pk: int, **kwargs: Dict[str, Any] + ) -> Response: + """Get a list of report schedule logs + --- + get: + description: >- + Get a list of report schedule logs + parameters: + - in: path + schema: + type: integer + description: The report schedule id for these logs + name: pk + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/get_list_schema' + responses: + 200: + description: Items from logs + content: + application/json: + schema: + type: object + properties: + ids: + description: >- + A list of log ids + type: array + items: + type: string + count: + description: >- + The total record count on the backend + type: number + result: + description: >- + The result from the get list query + type: array + items: + $ref: '#/components/schemas/{{self.__class__.__name__}}.get_list' # pylint: disable=line-too-long + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + self._apply_layered_relation_to_rison(pk, kwargs["rison"]) + return self.get_list_headless(**kwargs) + + @expose("//log/", methods=["GET"]) + @protect() + @safe + @permission_name("get") + @rison(get_item_schema) + def get( # pylint: disable=arguments-differ + self, pk: int, log_id: int, **kwargs: Dict[str, Any] + ) -> Response: + """Get a report schedule log + --- + get: + description: >- + Get a report schedule log + parameters: + - in: path + schema: + type: integer + name: pk + description: The report schedule pk for log + - in: path + schema: + type: integer + name: log_id + description: The log pk + - in: query + name: q + content: + application/json: + schema: + $ref: '#/components/schemas/get_item_schema' + responses: + 200: + description: Item log + content: + application/json: + schema: + type: object + properties: + id: + description: The log id + type: string + result: + $ref: '#/components/schemas/{{self.__class__.__name__}}.get' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + self._apply_layered_relation_to_rison(pk, kwargs["rison"]) + return self.get_headless(log_id, **kwargs) diff --git a/superset/reports/logs/schemas.py b/superset/reports/logs/schemas.py new file mode 100644 index 0000000000000..78aeb864f6b1d --- /dev/null +++ b/superset/reports/logs/schemas.py @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Union + +from marshmallow import fields, Schema, ValidationError +from marshmallow.validate import Length + +from superset.exceptions import SupersetException +from superset.utils import core as utils + +openapi_spec_methods_override = { + "get": {"get": {"description": "Get a report schedule log"}}, + "get_list": { + "get": { + "description": "Get a list of report schedule logs, use Rison or JSON " + "query parameters for filtering, sorting," + " pagination and for selecting specific" + " columns and metadata.", + } + }, +} diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py new file mode 100644 index 0000000000000..83a850d26b621 --- /dev/null +++ b/superset/reports/schemas.py @@ -0,0 +1,164 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Union + +from croniter import croniter +from flask_babel import lazy_gettext as _ +from marshmallow import fields, Schema, validate +from marshmallow.validate import Length, ValidationError + +from superset.models.reports import ReportScheduleType, ReportScheduleValidatorType + +openapi_spec_methods_override = { + "get": {"get": {"description": "Get a report schedule"}}, + "get_list": { + "get": { + "description": "Get a list of report schedules, use Rison or JSON " + "query parameters for filtering, sorting," + " pagination and for selecting specific" + " columns and metadata.", + } + }, + "post": {"post": {"description": "Create a report schedule"}}, + "put": {"put": {"description": "Update a report schedule"}}, + "delete": {"delete": {"description": "Delete a report schedule"}}, +} + +get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}} + +type_description = "The report schedule type" +label_description = "The report schedule label." +crontab_description = ( + "A CRON-like expression." + "[Crontab Guru](https://crontab.guru/) is " + "a helpful resource that can help you craft a CRON expression." +) +sql_description = ( + "A SQL statement that defines whether the alert should get triggered or " + "not. The query is expected to return either NULL or a number value." +) +owners_description = ( + "Owner are users ids allowed to delete or change this chart. " + "If left empty you will be one of the owners of the chart." +) +validator_type_description = ( + "Determines when to trigger alert based off value from alert query. " + "Alerts will be triggered with these validator types:\n" + "- Not Null - When the return value is Not NULL, Empty, or 0\n" + "- Operator - When `sql_return_value comparison_operator threshold`" + " is True e.g. `50 <= 75`
Supports the comparison operators <, <=, " + ">, >=, ==, and !=" +) +validator_config_json_op_description = ( + "The operation to compare with a threshold to apply to the SQL output\n" +) +log_retention_description = "How long to keep the logs around for this report (in days)" +grace_period_description = ( + "Once an alert is triggered, how long, in seconds, before " + "Superset nags you again. (in seconds)" +) + + +def validate_crontab(value: Union[bytes, bytearray, str]) -> None: + if not croniter.is_valid(str(value)): + raise ValidationError("Cron expression is not valid") + + +class ValidatorConfigJSONSchema(Schema): + op = fields.String( + description=validator_config_json_op_description, + validate=validate.OneOf(choices=["<", "<=", ">", ">=", "==", "!="]), + ) + threshold = fields.Integer() + + +class ReportSchedulePostSchema(Schema): + type = fields.String( + description=type_description, + allow_none=False, + validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), + ) + label = fields.String( + description=label_description, + allow_none=False, + validate=[Length(1, 150)], + example="Daily dashboard email", + ) + active = fields.Boolean() + crontab = fields.String( + description=crontab_description, + validate=[validate_crontab, Length(1, 50)], + example="*/5 * * * * *", + ) + sql = fields.String( + description=sql_description, example="SELECT value FROM time_series_table" + ) + chart = fields.Integer(required=False) + dashboard = fields.Integer(required=False) + database = fields.Integer(required=False) + owners = fields.List(fields.Integer(description=owners_description)) + email_format = fields.String(validate=[Length(1, 50)]) + validator_type = fields.String( + description=validator_type_description, + validate=validate.OneOf( + choices=tuple(key.value for key in ReportScheduleValidatorType) + ), + ) + validator_config_json = fields.Nested(ValidatorConfigJSONSchema) + log_retention = fields.Integer(description=log_retention_description, example=90) + grace_period = fields.Integer(description=grace_period_description, example=14400) + + +class ReportSchedulePutSchema(Schema): + type = fields.String( + description=type_description, + required=False, + validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), + ) + label = fields.String( + description=label_description, required=False, validate=[Length(1, 150)] + ) + active = fields.Boolean(required=False) + crontab = fields.String( + description=crontab_description, + validate=[validate_crontab, Length(1, 50)], + required=False, + ) + sql = fields.String( + description=sql_description, + example="SELECT value FROM time_series_table", + required=False, + ) + chart = fields.Integer(required=False) + dashboard = fields.Integer(required=False) + database = fields.Integer(required=False) + owners = fields.List(fields.Integer(description=owners_description), required=False) + email_format = fields.String(validate=[Length(1, 50)], required=False) + validator_type = fields.String( + description=validator_type_description, + validate=validate.OneOf( + choices=tuple(key.value for key in ReportScheduleValidatorType) + ), + required=False, + ) + validator_config_json = fields.Nested(ValidatorConfigJSONSchema, required=False) + log_retention = fields.Integer( + description=log_retention_description, example=90, required=False + ) + grace_period = fields.Integer( + description=grace_period_description, example=14400, required=False + ) From 089fa26e9aa25f60683e94271c1d405bf43a6117 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Fri, 6 Nov 2020 17:16:46 +0000 Subject: [PATCH 13/23] draft working version --- superset/dao/base.py | 4 +- superset/models/reports.py | 11 ++-- superset/reports/api.py | 30 +++++++-- superset/reports/commands/base.py | 63 +++++++++++++++++++ superset/reports/commands/bulk_delete.py | 2 - superset/reports/commands/create.py | 39 ++++++++---- superset/reports/commands/delete.py | 1 - superset/reports/commands/exceptions.py | 24 +++++++- superset/reports/commands/update.py | 34 ++++++++--- superset/reports/dao.py | 77 ++++++++++++++++++++++-- superset/reports/logs/api.py | 3 +- superset/reports/logs/schemas.py | 7 --- superset/reports/schemas.py | 64 ++++++++++++++++---- 13 files changed, 296 insertions(+), 63 deletions(-) create mode 100644 superset/reports/commands/base.py diff --git a/superset/dao/base.py b/superset/dao/base.py index c5db30167b1bb..6b33c4e638d4f 100644 --- a/superset/dao/base.py +++ b/superset/dao/base.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Type from flask_appbuilder.models.filters import BaseFilter from flask_appbuilder.models.sqla import Model @@ -35,7 +35,7 @@ class BaseDAO: Base DAO, implement base CRUD sqlalchemy operations """ - model_cls: Optional[Model] = None + model_cls: Optional[Type[Model]] = None """ Child classes need to state the Model class so they don't need to implement basic create, update and delete methods diff --git a/superset/models/reports.py b/superset/models/reports.py index 62818229cfcb2..731d1f9629166 100644 --- a/superset/models/reports.py +++ b/superset/models/reports.py @@ -14,7 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=line-too-long,unused-argument,ungrouped-imports """A collection of ORM sqlalchemy models for Superset""" import enum @@ -30,7 +29,7 @@ Table, Text, ) -from sqlalchemy.orm import relationship +from sqlalchemy.orm import backref, relationship from sqlalchemy.schema import UniqueConstraint from superset.extensions import security_manager @@ -143,7 +142,9 @@ class ReportRecipients( Integer, ForeignKey("report_schedule.id"), nullable=False ) report_schedule = relationship( - ReportSchedule, backref="recipients", foreign_keys=[report_schedule_id] + ReportSchedule, + backref=backref("recipients", cascade="all,delete,delete-orphan"), + foreign_keys=[report_schedule_id], ) @@ -173,5 +174,7 @@ class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods Integer, ForeignKey("report_schedule.id"), nullable=False ) report_schedule = relationship( - ReportSchedule, backref="logs", foreign_keys=[report_schedule_id] + ReportSchedule, + backref=backref("logs", cascade="all,delete"), + foreign_keys=[report_schedule_id], ) diff --git a/superset/reports/api.py b/superset/reports/api.py index 5c2423fb1e226..e31c5c7dc71f7 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -63,7 +63,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): show_columns = [ "id", - "label", + "name", "recipients.id", "recipients.type", "recipients.recipient_config_json", @@ -78,11 +78,30 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "created_by.last_name", "created_on", "id", - "label", + "name", + "recipients.id", + "recipients.type", + "type", + ] + add_columns = [ + "active", + "chart", + "context_markdown", + "crontab", + "dashboard", + "database", + "description", + "grace_period", + "log_retention", + "name", + "owners", + "recipients", + "sql", "type", + "validator_config_json", + "validator_type", ] - add_columns = ReportSchedulePostSchema._declared_fields.keys() - edit_columns = ReportSchedulePutSchema._declared_fields.keys() + edit_columns = add_columns add_model_schema = ReportSchedulePostSchema() edit_model_schema = ReportSchedulePutSchema() @@ -93,9 +112,10 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "changed_on", "changed_on_delta_humanized", "created_on", - "label", + "name", "type", ] + search_columns = ["name", "active", "created_by", "type"] allowed_rel_fields = {"created_by"} diff --git a/superset/reports/commands/base.py b/superset/reports/commands/base.py new file mode 100644 index 0000000000000..bb4064d22cde7 --- /dev/null +++ b/superset/reports/commands/base.py @@ -0,0 +1,63 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict, List + +from marshmallow import ValidationError + +from superset.charts.dao import ChartDAO +from superset.commands.base import BaseCommand +from superset.dashboards.dao import DashboardDAO +from superset.reports.commands.exceptions import ( + ChartNotFoundValidationError, + DashboardNotFoundValidationError, + ReportScheduleChartOrDashboardValidationError, +) + +logger = logging.getLogger(__name__) + + +class BaseReportScheduleCommand(BaseCommand): + + _properties: Dict[str, Any] + + def run(self) -> Any: + pass + + def validate(self) -> None: + pass + + def validate_chart_dashboard( + self, exceptions: List[ValidationError], update: bool = False + ) -> None: + """ Validate chart or dashboard relation """ + chart_id = self._properties.get("chart") + dashboard_id = self._properties.get("dashboard") + if chart_id and dashboard_id: + exceptions.append(ReportScheduleChartOrDashboardValidationError()) + if chart_id: + chart = ChartDAO.find_by_id(chart_id) + if not chart: + exceptions.append(ChartNotFoundValidationError()) + self._properties["chart"] = chart + elif dashboard_id: + dashboard = DashboardDAO.find_by_id(dashboard_id) + if not dashboard: + exceptions.append(DashboardNotFoundValidationError()) + self._properties["dashboard"] = dashboard + elif not update: + exceptions.append(ReportScheduleChartOrDashboardValidationError()) diff --git a/superset/reports/commands/bulk_delete.py b/superset/reports/commands/bulk_delete.py index b93b300e021d3..b9dd57267555e 100644 --- a/superset/reports/commands/bulk_delete.py +++ b/superset/reports/commands/bulk_delete.py @@ -24,7 +24,6 @@ from superset.models.reports import ReportSchedule from superset.reports.commands.exceptions import ( ReportScheduleBulkDeleteFailedError, - ReportScheduleBulkDeleteIntegrityError, ReportScheduleNotFoundError, ) from superset.reports.dao import ReportScheduleDAO @@ -52,4 +51,3 @@ def validate(self) -> None: self._models = ReportScheduleDAO.find_by_ids(self._model_ids) if not self._models or len(self._models) != len(self._model_ids): raise ReportScheduleNotFoundError() - # TODO handle recipients and logs diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py index b4bbdf3760c42..226e4efc49138 100644 --- a/superset/reports/commands/create.py +++ b/superset/reports/commands/create.py @@ -16,33 +16,30 @@ # under the License. import json import logging -from typing import Any, Callable, cast, Dict, List, Optional, Type +from typing import Any, Dict, List, Optional from flask_appbuilder.models.sqla import Model from flask_appbuilder.security.sqla.models import User from marshmallow import ValidationError -from superset.charts.dao import ChartDAO -from superset.commands.base import BaseCommand from superset.commands.utils import populate_owners from superset.dao.exceptions import DAOCreateFailedError -from superset.dashboards.dao import DashboardDAO from superset.databases.dao import DatabaseDAO from superset.models.reports import ReportScheduleType +from superset.reports.commands.base import BaseReportScheduleCommand from superset.reports.commands.exceptions import ( - ChartNotFoundValidationError, - DashboardNotFoundValidationError, DatabaseNotFoundValidationError, + ReportScheduleAlertRequiredDatabaseValidationError, ReportScheduleCreateFailedError, ReportScheduleInvalidError, - ReportScheduleLabelUniquenessValidationError, + ReportScheduleNameUniquenessValidationError, ) from superset.reports.dao import ReportScheduleDAO logger = logging.getLogger(__name__) -class CreateReportScheduleCommand(BaseCommand): +class CreateReportScheduleCommand(BaseReportScheduleCommand): def __init__(self, user: User, data: Dict[str, Any]): self._actor = user self._properties = data.copy() @@ -59,14 +56,30 @@ def run(self) -> Model: def validate(self) -> None: exceptions: List[ValidationError] = list() owner_ids: Optional[List[int]] = self._properties.get("owners") - label = self._properties.get("label", "") + name = self._properties.get("name", "") report_type = self._properties.get("type", ReportScheduleType.ALERT) - # Validate label uniqueness - if not ReportScheduleDAO.validate_update_uniqueness(label): - exceptions.append(ReportScheduleLabelUniquenessValidationError()) + # Validate name uniqueness + if not ReportScheduleDAO.validate_update_uniqueness(name): + exceptions.append(ReportScheduleNameUniquenessValidationError()) - # TODO validate relations based on the report type + # validate relation by report type + if report_type == ReportScheduleType.ALERT: + database_id = self._properties.get("database") + if not database_id: + exceptions.append(ReportScheduleAlertRequiredDatabaseValidationError()) + else: + database = DatabaseDAO.find_by_id(database_id) + if not database: + exceptions.append(DatabaseNotFoundValidationError()) + self._properties["database"] = database + + # Validate chart or dashboard relations + self.validate_chart_dashboard(exceptions) + + self._properties["validator_config_json"] = json.dumps( + self._properties["validator_config_json"] + ) try: owners = populate_owners(self._actor, owner_ids) diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py index fe2b66703510a..ab48efe671cfb 100644 --- a/superset/reports/commands/delete.py +++ b/superset/reports/commands/delete.py @@ -25,7 +25,6 @@ from superset.models.reports import ReportSchedule from superset.reports.commands.exceptions import ( ReportScheduleDeleteFailedError, - ReportScheduleDeleteIntegrityError, ReportScheduleNotFoundError, ) from superset.reports.dao import ReportScheduleDAO diff --git a/superset/reports/commands/exceptions.py b/superset/reports/commands/exceptions.py index 15cefd7b86485..f807ef9a263e5 100644 --- a/superset/reports/commands/exceptions.py +++ b/superset/reports/commands/exceptions.py @@ -52,6 +52,24 @@ def __init__(self) -> None: super().__init__(_("Chart does not exist"), field_name="chart") +class ReportScheduleAlertRequiredDatabaseValidationError(ValidationError): + """ + Marshmallow validation error for report schedule alert missing database field + """ + + def __init__(self) -> None: + super().__init__(_("Database is required for alerts"), field_name="database") + + +class ReportScheduleChartOrDashboardValidationError(ValidationError): + """ + Marshmallow validation error for report schedule accept exlusive chart or dashboard + """ + + def __init__(self) -> None: + super().__init__(_("Choose a chart or dashboard not both"), field_name="chart") + + class ReportScheduleInvalidError(CommandInvalidError): message = _("Report Schedule parameters are invalid.") @@ -84,10 +102,10 @@ class ReportScheduleBulkDeleteIntegrityError(CommandException): message = _("Report Schedule has associated logs or recipients.") -class ReportScheduleLabelUniquenessValidationError(ValidationError): +class ReportScheduleNameUniquenessValidationError(ValidationError): """ - Marshmallow validation error for Report Schedule label already exists + Marshmallow validation error for Report Schedule name already exists """ def __init__(self) -> None: - super().__init__([_("Label must be unique")], field_name="label") + super().__init__([_("Name must be unique")], field_name="name") diff --git a/superset/reports/commands/update.py b/superset/reports/commands/update.py index 331399283692b..7202b6480cc93 100644 --- a/superset/reports/commands/update.py +++ b/superset/reports/commands/update.py @@ -21,13 +21,15 @@ from flask_appbuilder.security.sqla.models import User from marshmallow import ValidationError -from superset.commands.base import BaseCommand from superset.commands.utils import populate_owners from superset.dao.exceptions import DAOUpdateFailedError -from superset.models.reports import ReportSchedule +from superset.databases.dao import DatabaseDAO +from superset.models.reports import ReportSchedule, ReportScheduleType +from superset.reports.commands.base import BaseReportScheduleCommand from superset.reports.commands.exceptions import ( + DatabaseNotFoundValidationError, ReportScheduleInvalidError, - ReportScheduleLabelUniquenessValidationError, + ReportScheduleNameUniquenessValidationError, ReportScheduleNotFoundError, ReportScheduleUpdateFailedError, ) @@ -36,7 +38,7 @@ logger = logging.getLogger(__name__) -class UpdateReportScheduleCommand(BaseCommand): +class UpdateReportScheduleCommand(BaseReportScheduleCommand): def __init__(self, user: User, model_id: int, data: Dict[str, Any]): self._actor = user self._model_id = model_id @@ -55,17 +57,35 @@ def run(self) -> Model: def validate(self) -> None: exceptions: List[ValidationError] = list() owner_ids: Optional[List[int]] = self._properties.get("owners") + report_type = self._properties.get("type", ReportScheduleType.ALERT) - label = self._properties.get("label", "") + name = self._properties.get("name", "") self._model = ReportScheduleDAO.find_by_id(self._model_id) + # Does the report exist? if not self._model: raise ReportScheduleNotFoundError() + # Validate name uniqueness if not ReportScheduleDAO.validate_update_uniqueness( - label, report_schedule_id=self._model_id + name, report_schedule_id=self._model_id ): - exceptions.append(ReportScheduleLabelUniquenessValidationError()) + exceptions.append(ReportScheduleNameUniquenessValidationError()) + + # validate relation by report type + if not report_type: + report_type = self._model.type + if report_type == ReportScheduleType.ALERT: + database_id = self._properties.get("database") + # If database_id was sent let's validate it exists + if database_id: + database = DatabaseDAO.find_by_id(database_id) + if not database: + exceptions.append(DatabaseNotFoundValidationError()) + self._properties["database"] = database + + # Validate chart or dashboard relations + self.validate_chart_dashboard(exceptions, update=True) # Validate/Populate owner if owner_ids is None: diff --git a/superset/reports/dao.py b/superset/reports/dao.py index 73692b981e08f..c169b01412464 100644 --- a/superset/reports/dao.py +++ b/superset/reports/dao.py @@ -15,14 +15,15 @@ # specific language governing permissions and limitations # under the License. import logging -from typing import List, Optional, Union +from typing import Any, Dict, List, Optional +from flask_appbuilder import Model from sqlalchemy.exc import SQLAlchemyError from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAODeleteFailedError +from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db -from superset.models.reports import ReportSchedule +from superset.models.reports import ReportRecipients, ReportSchedule logger = logging.getLogger(__name__) @@ -48,17 +49,81 @@ def bulk_delete( @staticmethod def validate_update_uniqueness( - label: str, report_schedule_id: Optional[int] = None + name: str, report_schedule_id: Optional[int] = None ) -> bool: """ - Validate if this label is unique. + Validate if this name is unique. :param name: The annotation layer name :param report_schedule_id: The report schedule current id (only for validating on updates) :return: bool """ - query = db.session.query(ReportSchedule).filter(ReportSchedule.label == label) + query = db.session.query(ReportSchedule).filter(ReportSchedule.name == name) if report_schedule_id: query = query.filter(ReportSchedule.id != report_schedule_id) return not db.session.query(query.exists()).scalar() + + @classmethod + def create(cls, properties: Dict[str, Any], commit: bool = True) -> Model: + """ + create a report schedule and nested recipients + :raises: DAOCreateFailedError + """ + import json + + try: + model = ReportSchedule() + for key, value in properties.items(): + if key != "recipients": + setattr(model, key, value) + recipients = properties.get("recipients", []) + for recipient in recipients: + model.recipients.append( # pylint: disable=no-member + ReportRecipients( + type=recipient["type"], + recipient_config_json=json.dumps( + recipient["recipient_config_json"] + ), + ) + ) + db.session.add(model) + if commit: + db.session.commit() + return model + except SQLAlchemyError: + db.session.rollback() + raise DAOCreateFailedError + + @classmethod + def update( + cls, model: Model, properties: Dict[str, Any], commit: bool = True + ) -> Model: + """ + create a report schedule and nested recipients + :raises: DAOCreateFailedError + """ + import json + + try: + for key, value in properties.items(): + if key != "recipients": + setattr(model, key, value) + recipients = properties.get("recipients", []) + model.recipients = [ + ReportRecipients( + type=recipient["type"], + recipient_config_json=json.dumps( + recipient["recipient_config_json"] + ), + report_schedule=model, + ) + for recipient in recipients + ] + db.session.merge(model) + if commit: + db.session.commit() + return model + except SQLAlchemyError: + db.session.rollback() + raise DAOCreateFailedError diff --git a/superset/reports/logs/api.py b/superset/reports/logs/api.py index c7e2e099851d4..0b026c45a825a 100644 --- a/superset/reports/logs/api.py +++ b/superset/reports/logs/api.py @@ -58,7 +58,8 @@ class ReportExecutionLogRestApi(BaseSupersetModelRestApi): "error_message", ] order_columns = [ - "state" "value", + "state", + "value", "error_message", "end_dttm", "start_dttm", diff --git a/superset/reports/logs/schemas.py b/superset/reports/logs/schemas.py index 78aeb864f6b1d..bec162fbcf578 100644 --- a/superset/reports/logs/schemas.py +++ b/superset/reports/logs/schemas.py @@ -14,13 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Union - -from marshmallow import fields, Schema, ValidationError -from marshmallow.validate import Length - -from superset.exceptions import SupersetException -from superset.utils import core as utils openapi_spec_methods_override = { "get": {"get": {"description": "Get a report schedule log"}}, diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py index 83a850d26b621..d63f2d81bc63d 100644 --- a/superset/reports/schemas.py +++ b/superset/reports/schemas.py @@ -17,11 +17,14 @@ from typing import Union from croniter import croniter -from flask_babel import lazy_gettext as _ from marshmallow import fields, Schema, validate from marshmallow.validate import Length, ValidationError -from superset.models.reports import ReportScheduleType, ReportScheduleValidatorType +from superset.models.reports import ( + ReportRecipientType, + ReportScheduleType, + ReportScheduleValidatorType, +) openapi_spec_methods_override = { "get": {"get": {"description": "Get a report schedule"}}, @@ -41,7 +44,10 @@ get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}} type_description = "The report schedule type" -label_description = "The report schedule label." +name_description = "The report schedule name." +# :) +description_description = "Use a nice description to give context to this Alert/Report" +context_markdown_description = "Markdown description" crontab_description = ( "A CRON-like expression." "[Crontab Guru](https://crontab.guru/) is " @@ -52,8 +58,8 @@ "not. The query is expected to return either NULL or a number value." ) owners_description = ( - "Owner are users ids allowed to delete or change this chart. " - "If left empty you will be one of the owners of the chart." + "Owner are users ids allowed to delete or change this report. " + "If left empty you will be one of the owners of the report." ) validator_type_description = ( "Determines when to trigger alert based off value from alert query. " @@ -79,25 +85,50 @@ def validate_crontab(value: Union[bytes, bytearray, str]) -> None: class ValidatorConfigJSONSchema(Schema): - op = fields.String( + operation = fields.String( description=validator_config_json_op_description, validate=validate.OneOf(choices=["<", "<=", ">", ">=", "==", "!="]), ) threshold = fields.Integer() +class ReportRecipientConfigJSONSchema(Schema): + # TODO if email check validity + target = fields.String() + + +class ReportRecipientSchema(Schema): + type = fields.String( + description="The recipient type, check spec for valid options", + allow_none=False, + validate=validate.OneOf( + choices=tuple(key.value for key in ReportRecipientType) + ), + ) + recipient_config_json = fields.Nested(ReportRecipientConfigJSONSchema) + + class ReportSchedulePostSchema(Schema): type = fields.String( description=type_description, allow_none=False, validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), ) - label = fields.String( - description=label_description, + name = fields.String( + description=name_description, allow_none=False, validate=[Length(1, 150)], example="Daily dashboard email", ) + description = fields.String( + description=description_description, + allow_none=True, + required=False, + example="Daily sales dashboard to marketing", + ) + context_markdown = fields.String( + description=context_markdown_description, allow_none=True, required=False + ) active = fields.Boolean() crontab = fields.String( description=crontab_description, @@ -111,7 +142,6 @@ class ReportSchedulePostSchema(Schema): dashboard = fields.Integer(required=False) database = fields.Integer(required=False) owners = fields.List(fields.Integer(description=owners_description)) - email_format = fields.String(validate=[Length(1, 50)]) validator_type = fields.String( description=validator_type_description, validate=validate.OneOf( @@ -121,6 +151,7 @@ class ReportSchedulePostSchema(Schema): validator_config_json = fields.Nested(ValidatorConfigJSONSchema) log_retention = fields.Integer(description=log_retention_description, example=90) grace_period = fields.Integer(description=grace_period_description, example=14400) + recipients = fields.List(fields.Nested(ReportRecipientSchema)) class ReportSchedulePutSchema(Schema): @@ -129,8 +160,17 @@ class ReportSchedulePutSchema(Schema): required=False, validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), ) - label = fields.String( - description=label_description, required=False, validate=[Length(1, 150)] + name = fields.String( + description=name_description, required=False, validate=[Length(1, 150)] + ) + description = fields.String( + description=description_description, + allow_none=True, + required=False, + example="Daily sales dashboard to marketing", + ) + context_markdown = fields.String( + description=context_markdown_description, allow_none=True, required=False ) active = fields.Boolean(required=False) crontab = fields.String( @@ -147,7 +187,6 @@ class ReportSchedulePutSchema(Schema): dashboard = fields.Integer(required=False) database = fields.Integer(required=False) owners = fields.List(fields.Integer(description=owners_description), required=False) - email_format = fields.String(validate=[Length(1, 50)], required=False) validator_type = fields.String( description=validator_type_description, validate=validate.OneOf( @@ -162,3 +201,4 @@ class ReportSchedulePutSchema(Schema): grace_period = fields.Integer( description=grace_period_description, example=14400, required=False ) + recipients = fields.List(fields.Nested(ReportRecipientSchema), required=False) From 6f57dcad6c012bc5885d9c56b359c278b3d5da68 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Mon, 9 Nov 2020 15:11:50 +0000 Subject: [PATCH 14/23] add tests --- superset/models/slice.py | 1 - superset/reports/api.py | 23 +- superset/reports/commands/create.py | 7 +- superset/reports/dao.py | 12 +- superset/reports/schemas.py | 5 + tests/reports/__init__.py | 16 + tests/reports/api_tests.py | 775 ++++++++++++++++++++++++++++ 7 files changed, 832 insertions(+), 7 deletions(-) create mode 100644 tests/reports/__init__.py create mode 100644 tests/reports/api_tests.py diff --git a/superset/models/slice.py b/superset/models/slice.py index 4ed8ba41fb41c..99d7b92157c72 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -80,7 +80,6 @@ class Slice( primaryjoin="and_(Slice.datasource_id == SqlaTable.id, " "Slice.datasource_type == 'table')", remote_side="SqlaTable.id", - lazy="joined", ) token = "" diff --git a/superset/reports/api.py b/superset/reports/api.py index e31c5c7dc71f7..7953a3317a6f3 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -64,6 +64,25 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): show_columns = [ "id", "name", + "type", + "description", + "context_markdown", + "active", + "crontab", + "chart.id", + "dashboard.id", + "database.id", + "owners.id", + "owners.first_name", + "owners.last_name", + "last_eval_dttm", + "last_state", + "last_value", + "last_value_row_json", + "validator_type", + "validator_config_json", + "log_retention", + "grace_period", "recipients.id", "recipients.type", "recipients.recipient_config_json", @@ -78,6 +97,8 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "created_by.last_name", "created_on", "id", + "last_eval_dttm", + "last_state", "name", "recipients.id", "recipients.type", @@ -115,7 +136,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "name", "type", ] - search_columns = ["name", "active", "created_by", "type"] + search_columns = ["name", "active", "created_by"] allowed_rel_fields = {"created_by"} diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py index 226e4efc49138..e8638f71a4678 100644 --- a/superset/reports/commands/create.py +++ b/superset/reports/commands/create.py @@ -77,9 +77,10 @@ def validate(self) -> None: # Validate chart or dashboard relations self.validate_chart_dashboard(exceptions) - self._properties["validator_config_json"] = json.dumps( - self._properties["validator_config_json"] - ) + if "validator_config_json" in self._properties: + self._properties["validator_config_json"] = json.dumps( + self._properties["validator_config_json"] + ) try: owners = populate_owners(self._actor, owner_ids) diff --git a/superset/reports/dao.py b/superset/reports/dao.py index c169b01412464..2d52c1649761f 100644 --- a/superset/reports/dao.py +++ b/superset/reports/dao.py @@ -23,7 +23,7 @@ from superset.dao.base import BaseDAO from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db -from superset.models.reports import ReportRecipients, ReportSchedule +from superset.models.reports import ReportExecutionLog, ReportRecipients, ReportSchedule logger = logging.getLogger(__name__) @@ -37,6 +37,14 @@ def bulk_delete( ) -> None: item_ids = [model.id for model in models] if models else [] try: + db.session.query(ReportRecipients).filter( + ReportRecipients.report_schedule_id.in_(item_ids) + ).delete(synchronize_session="fetch") + + db.session.query(ReportExecutionLog).filter( + ReportExecutionLog.report_schedule_id.in_(item_ids) + ).delete(synchronize_session="fetch") + db.session.query(ReportSchedule).filter( ReportSchedule.id.in_(item_ids) ).delete(synchronize_session="fetch") @@ -54,7 +62,7 @@ def validate_update_uniqueness( """ Validate if this name is unique. - :param name: The annotation layer name + :param name: The report schedule name :param report_schedule_id: The report schedule current id (only for validating on updates) :return: bool diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py index d63f2d81bc63d..5da7a1bf0b0b1 100644 --- a/superset/reports/schemas.py +++ b/superset/reports/schemas.py @@ -101,6 +101,7 @@ class ReportRecipientSchema(Schema): type = fields.String( description="The recipient type, check spec for valid options", allow_none=False, + required=True, validate=validate.OneOf( choices=tuple(key.value for key in ReportRecipientType) ), @@ -112,11 +113,13 @@ class ReportSchedulePostSchema(Schema): type = fields.String( description=type_description, allow_none=False, + required=True, validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)), ) name = fields.String( description=name_description, allow_none=False, + required=True, validate=[Length(1, 150)], example="Daily dashboard email", ) @@ -134,6 +137,8 @@ class ReportSchedulePostSchema(Schema): description=crontab_description, validate=[validate_crontab, Length(1, 50)], example="*/5 * * * * *", + allow_none=False, + required=True, ) sql = fields.String( description=sql_description, example="SELECT value FROM time_series_table" diff --git a/tests/reports/__init__.py b/tests/reports/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/reports/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/reports/api_tests.py b/tests/reports/api_tests.py new file mode 100644 index 0000000000000..d76f72ce20398 --- /dev/null +++ b/tests/reports/api_tests.py @@ -0,0 +1,775 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file +"""Unit tests for Superset""" +from datetime import datetime +from typing import List, Optional +import json + +from flask_appbuilder.security.sqla.models import User +import pytest +import prison +from sqlalchemy.sql import func + +import tests.test_app +from superset import db +from superset.models.core import Database +from superset.models.slice import Slice +from superset.models.dashboard import Dashboard +from superset.models.reports import ( + ReportSchedule, + ReportRecipients, + ReportExecutionLog, + ReportScheduleType, + ReportRecipientType, + ReportLogState, +) + +from tests.base_tests import SupersetTestCase +from superset.utils.core import get_example_database + + +REPORTS_COUNT = 10 +# REPORTS_LOGS_COUNT = 5 +# REPORTS_RECIPIENTS_COUNT = 5 + + +class TestReportSchedulesApi(SupersetTestCase): + def insert_report_schedule( + self, + type: str, + name: str, + crontab: str, + sql: Optional[str] = None, + description: Optional[str] = None, + chart: Optional[Slice] = None, + dashboard: Optional[Dashboard] = None, + database: Optional[Database] = None, + owners: Optional[List[User]] = None, + validator_type: Optional[str] = None, + validator_config_json: Optional[str] = None, + log_retention: Optional[int] = None, + grace_period: Optional[int] = None, + recipients: Optional[List[ReportRecipients]] = None, + logs: Optional[List[ReportExecutionLog]] = None, + ) -> ReportSchedule: + owners = owners or [] + recipients = recipients or [] + logs = logs or [] + report_schedule = ReportSchedule( + type=type, + name=name, + crontab=crontab, + sql=sql, + description=description, + chart=chart, + dashboard=dashboard, + database=database, + owners=owners, + validator_type=validator_type, + validator_config_json=validator_config_json, + log_retention=log_retention, + grace_period=grace_period, + recipients=recipients, + logs=logs, + ) + db.session.add(report_schedule) + db.session.commit() + return report_schedule + + @pytest.fixture() + def create_report_schedules(self): + with self.create_app().app_context(): + report_schedules = [] + chart = db.session.query(Slice).first() + example_db = get_example_database() + for cx in range(REPORTS_COUNT): + recipients = [] + logs = [] + for cy in range(cx): + config_json = {"target": f"target{cy}@email.com"} + recipients.append( + ReportRecipients( + type=ReportRecipientType.EMAIL, + recipient_config_json=json.dumps(config_json), + ) + ) + logs.append( + ReportExecutionLog( + scheduled_dttm=datetime(2020, 1, 1), + state=ReportLogState.ERROR, + error_message=f"Error {cy}", + ) + ) + report_schedules.append( + self.insert_report_schedule( + type=ReportScheduleType.ALERT, + name=f"name{cx}", + crontab=f"*/{cx} * * * *", + sql="SELECT value from table1", + description=f"Some description {cx}", + chart=chart, + database=example_db, + recipients=recipients, + logs=logs, + ) + ) + yield report_schedules + + # rollback changes (assuming cascade delete) + for report_schedule in report_schedules: + db.session.delete(report_schedule) + db.session.commit() + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_report_schedule(self): + """ + ReportSchedule Api: Test get report schedule + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name1") + .first() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}" + rv = self.get_assert_metric(uri, "get") + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 200 + expected_result = { + "active": report_schedule.active, + "chart": {"id": report_schedule.chart.id}, + "context_markdown": report_schedule.context_markdown, + "crontab": report_schedule.crontab, + "dashboard": None, + "database": {"id": report_schedule.database.id}, + "description": report_schedule.description, + "grace_period": report_schedule.grace_period, + "id": report_schedule.id, + "last_eval_dttm": report_schedule.last_eval_dttm, + "last_state": report_schedule.last_state, + "last_value": report_schedule.last_value, + "last_value_row_json": report_schedule.last_value_row_json, + "log_retention": report_schedule.log_retention, + "name": report_schedule.name, + "owners": [], + "recipients": [ + { + "id": report_schedule.recipients[0].id, + "recipient_config_json": '{"target": "target0@email.com"}', + "type": "Email", + } + ], + "type": report_schedule.type, + "validator_config_json": report_schedule.validator_config_json, + "validator_type": report_schedule.validator_type, + } + assert data["result"] == expected_result + + def test_info_report_schedule(self): + """ + ReportSchedule API: Test info + """ + self.login(username="admin") + uri = f"api/v1/report/_info" + rv = self.get_assert_metric(uri, "info") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_report_schedule_not_found(self): + """ + ReportSchedule Api: Test get report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + self.login(username="admin") + uri = f"api/v1/report/{max_id + 1}" + rv = self.get_assert_metric(uri, "get") + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule(self): + """ + ReportSchedule Api: Test get list report schedules + """ + self.login(username="admin") + uri = f"api/v1/report/" + rv = self.get_assert_metric(uri, "get_list") + + expected_fields = [ + "active", + "changed_by", + "changed_on", + "changed_on_delta_humanized", + "created_by", + "created_on", + "id", + "last_eval_dttm", + "last_state", + "name", + "recipients", + "type", + ] + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == REPORTS_COUNT + for expected_field in expected_fields: + assert expected_field in data["result"][0] + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_sorting(self): + """ + ReportSchedule Api: Test sorting on get list report schedules + """ + self.login(username="admin") + uri = f"api/v1/report/" + + order_columns = [ + "active", + "created_by.first_name", + "changed_by.first_name", + "changed_on", + "changed_on_delta_humanized", + "created_on", + "name", + "type", + ] + + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter(self): + """ + ReportSchedule Api: Test filters on get list report schedules + """ + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [{"col": "name", "opr": "ct", "value": "2"}], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + expected_result = { + "name": "name2", + } + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + assert data["result"][0] == expected_result + + arguments = { + "columns": ["name"], + "filters": [{"col": "name", "opr": "active", "value": True}], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == REPORTS_COUNT + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule(self): + """ + ReportSchedule Api: Test create report schedule + """ + self.login(username="admin") + + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + }, + { + "type": ReportRecipientType.SLACK, + "recipient_config_json": {"target": "channel"}, + }, + ], + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 201 + created_model = db.session.query(ReportSchedule).get(data.get("id")) + assert created_model is not None + assert created_model.name == report_schedule_data["name"] + assert created_model.description == report_schedule_data["description"] + assert created_model.crontab == report_schedule_data["crontab"] + assert created_model.chart.id == report_schedule_data["chart"] + assert created_model.database.id == report_schedule_data["database"] + # Rollback changes + db.session.delete(created_model) + db.session.commit() + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule_uniqueness(self): + """ + ReportSchedule Api: Test create report schedule uniqueness + """ + self.login(username="admin") + + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "name3", + "description": "description", + "crontab": "0 9 * * *", + "chart": chart.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"name": ["Name must be unique"]}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule_chart_dash_validation(self): + """ + ReportSchedule Api: Test create report schedule chart and dashboard validation + """ + self.login(username="admin") + + # Test we can submit a chart or a dashboard not both + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "chart": chart.id, + "dashboard": dashboard.id, + "database": example_db.id, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"chart": "Choose a chart or dashboard not both"}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule_chart_db_validation(self): + """ + ReportSchedule Api: Test create report schedule chart and database validation + """ + self.login(username="admin") + + # Test database required for alerts + chart = db.session.query(Slice).first() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "chart": chart.id, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"database": "Database is required for alerts"}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_create_report_schedule_relations_exist(self): + """ + ReportSchedule Api: Test create report schedule + relations (chart, dash, db) exist + """ + self.login(username="admin") + + # Test chart and database do not exist + chart_max_id = db.session.query(func.max(Slice.id)).scalar() + database_max_id = db.session.query(func.max(Database.id)).scalar() + examples_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "chart": chart_max_id + 1, + "database": database_max_id + 1, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "chart": "Chart does not exist", + "database": "Database does not exist", + } + } + + # Test dashboard does not exist + dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "dashboard": dashboard_max_id + 1, + "database": examples_db.id, + } + uri = "api/v1/report/" + rv = self.client.post(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"dashboard": "Dashboard does not exist"}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule(self): + """ + ReportSchedule Api: Test update report schedule + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + self.login(username="admin") + chart = db.session.query(Slice).first() + example_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "changed", + "description": "description", + "crontab": "0 10 * * *", + "recipients": [ + { + "type": ReportRecipientType.EMAIL, + "recipient_config_json": {"target": "target@superset.org"}, + } + ], + "chart": chart.id, + "database": example_db.id, + } + + uri = f"api/v1/report/{report_schedule.id}" + + rv = self.client.put(uri, json=report_schedule_data) + assert rv.status_code == 200 + updated_model = db.session.query(ReportSchedule).get(report_schedule.id) + assert updated_model is not None + assert updated_model.name == report_schedule_data["name"] + assert updated_model.description == report_schedule_data["description"] + assert len(updated_model.recipients) == 1 + assert updated_model.crontab == report_schedule_data["crontab"] + assert updated_model.chart_id == report_schedule_data["chart"] + assert updated_model.database_id == report_schedule_data["database"] + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule_uniqueness(self): + """ + ReportSchedule Api: Test update report schedule uniqueness + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + self.login(username="admin") + report_schedule_data = {"name": "name3", "description": "changed_description"} + uri = f"api/v1/report/{report_schedule.id}" + rv = self.client.put(uri, json=report_schedule_data) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": {"name": ["Name must be unique"]}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule_not_found(self): + """ + ReportSchedule Api: Test update report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + + self.login(username="admin") + annotation_layer_data = {"name": "changed"} + uri = f"api/v1/report/{max_id + 1}" + rv = self.client.put(uri, json=annotation_layer_data) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule_chart_dash_validation(self): + """ + ReportSchedule Api: Test update report schedule chart and dashboard validation + """ + self.login(username="admin") + + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + # Test we can submit a chart or a dashboard not both + chart = db.session.query(Slice).first() + dashboard = db.session.query(Dashboard).first() + example_db = get_example_database() + report_schedule_data = { + "chart": chart.id, + "dashboard": dashboard.id, + "database": example_db.id, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.client.put(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"chart": "Choose a chart or dashboard not both"}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_update_report_schedule_relations_exist(self): + """ + ReportSchedule Api: Test update report schedule relations exist + relations (chart, dash, db) exist + """ + self.login(username="admin") + + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name2") + .one_or_none() + ) + + # Test chart and database do not exist + chart_max_id = db.session.query(func.max(Slice.id)).scalar() + database_max_id = db.session.query(func.max(Database.id)).scalar() + examples_db = get_example_database() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "chart": chart_max_id + 1, + "database": database_max_id + 1, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.client.put(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == { + "message": { + "chart": "Chart does not exist", + "database": "Database does not exist", + } + } + + # Test dashboard does not exist + dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar() + report_schedule_data = { + "type": ReportScheduleType.ALERT, + "name": "new3", + "description": "description", + "crontab": "0 9 * * *", + "dashboard": dashboard_max_id + 1, + "database": examples_db.id, + } + uri = f"api/v1/report/{report_schedule.id}" + rv = self.client.put(uri, json=report_schedule_data) + assert rv.status_code == 422 + data = json.loads(rv.data.decode("utf-8")) + assert data == {"message": {"dashboard": "Dashboard does not exist"}} + + @pytest.mark.usefixtures("create_report_schedules") + def test_delete_report_schedule(self): + """ + ReportSchedule Api: Test update report schedule + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name1") + .one_or_none() + ) + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + deleted_report_schedule = db.session.query(ReportSchedule).get( + report_schedule.id + ) + assert deleted_report_schedule is None + deleted_recipients = ( + db.session.query(ReportRecipients) + .filter(ReportRecipients.report_schedule_id == report_schedule.id) + .all() + ) + assert deleted_recipients == [] + deleted_logs = ( + db.session.query(ReportExecutionLog) + .filter(ReportExecutionLog.report_schedule_id == report_schedule.id) + .all() + ) + assert deleted_logs == [] + + @pytest.mark.usefixtures("create_report_schedules") + def test_delete_report_schedule_not_found(self): + """ + ReportSchedule Api: Test delete report schedule not found + """ + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + self.login(username="admin") + uri = f"api/v1/report/{max_id + 1}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + def test_bulk_delete_report_schedule(self): + """ + ReportSchedule Api: Test bulk delete report schedules + """ + query_report_schedules = db.session.query(ReportSchedule) + report_schedules = query_report_schedules.all() + + report_schedules_ids = [ + report_schedule.id for report_schedule in report_schedules + ] + self.login(username="admin") + uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + deleted_report_schedules = query_report_schedules.all() + assert deleted_report_schedules == [] + response = json.loads(rv.data.decode("utf-8")) + expected_response = { + "message": f"Deleted {len(report_schedules_ids)} report schedules" + } + assert response == expected_response + + @pytest.mark.usefixtures("create_report_schedules") + def test_bulk_delete_report_schedule_not_found(self): + """ + ReportSchedule Api: Test bulk delete report schedule not found + """ + report_schedules = db.session.query(ReportSchedule).all() + report_schedules_ids = [ + report_schedule.id for report_schedule in report_schedules + ] + max_id = db.session.query(func.max(ReportSchedule.id)).scalar() + report_schedules_ids.append(max_id + 1) + self.login(username="admin") + uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_logs(self): + """ + ReportSchedule Api: Test get list report schedules logs + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}/log/" + rv = self.client.get(uri) + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 3 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_logs_sorting(self): + """ + ReportSchedule Api: Test get list report schedules logs + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}/log/" + + order_columns = [ + "state", + "value", + "error_message", + "end_dttm", + "start_dttm", + ] + + for order_column in order_columns: + arguments = {"order_column": order_column, "order_direction": "asc"} + uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + assert rv.status_code == 200 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_logs_filters(self): + """ + ReportSchedule Api: Test get list report schedules log filters + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [{"col": "state", "opr": "eq", "value": ReportLogState.SUCCESS}], + } + uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + + @pytest.mark.usefixtures("create_report_schedules") + def test_report_schedule_logs_no_mutations(self): + """ + ReportSchedule Api: Test assert there's no way to alter logs + """ + report_schedule = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.name == "name3") + .one_or_none() + ) + + data = {"state": ReportLogState.ERROR, "error_message": "New error changed"} + + self.login(username="admin") + uri = f"api/v1/report/{report_schedule.id}/log/" + rv = self.client.post(uri, json=data) + assert rv.status_code == 405 + uri = f"api/v1/report/{report_schedule.id}/log/{report_schedule.logs[0].id}" + rv = self.client.put(uri, json=data) + assert rv.status_code == 405 + rv = self.client.delete(uri) + assert rv.status_code == 405 From 5a0f7c3e4f2f3ac36da629fd6135cfb2e69f51e1 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Mon, 9 Nov 2020 16:05:37 +0000 Subject: [PATCH 15/23] test --- superset/models/slice.py | 1 + superset/reports/api.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/superset/models/slice.py b/superset/models/slice.py index 99d7b92157c72..1ba16ca20d21e 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -80,6 +80,7 @@ class Slice( primaryjoin="and_(Slice.datasource_id == SqlaTable.id, " "Slice.datasource_type == 'table')", remote_side="SqlaTable.id", + lazy="subquery" ) token = "" diff --git a/superset/reports/api.py b/superset/reports/api.py index 7953a3317a6f3..892b4396f38b7 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -87,6 +87,10 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "recipients.type", "recipients.recipient_config_json", ] + show_select_columns = show_columns + [ + "chart.datasource_id", + "chart.datasource_type", + ] list_columns = [ "active", "changed_by.first_name", From 4633fa5693f6f384c7d5c80319e5e88f75779fb6 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Mon, 9 Nov 2020 16:16:41 +0000 Subject: [PATCH 16/23] black --- superset/models/slice.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset/models/slice.py b/superset/models/slice.py index 1ba16ca20d21e..dc4dc468229d0 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -80,7 +80,7 @@ class Slice( primaryjoin="and_(Slice.datasource_id == SqlaTable.id, " "Slice.datasource_type == 'table')", remote_side="SqlaTable.id", - lazy="subquery" + lazy="subquery", ) token = "" From 63bc5eecf30afa3ea54c0e780971093a04fe236d Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Mon, 9 Nov 2020 16:32:22 +0000 Subject: [PATCH 17/23] remove copy pasta --- tests/reports/api_tests.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/reports/api_tests.py b/tests/reports/api_tests.py index d76f72ce20398..2c08a10cb2482 100644 --- a/tests/reports/api_tests.py +++ b/tests/reports/api_tests.py @@ -516,9 +516,9 @@ def test_update_report_schedule_not_found(self): max_id = db.session.query(func.max(ReportSchedule.id)).scalar() self.login(username="admin") - annotation_layer_data = {"name": "changed"} + report_schedule_data = {"name": "changed"} uri = f"api/v1/report/{max_id + 1}" - rv = self.client.put(uri, json=annotation_layer_data) + rv = self.client.put(uri, json=report_schedule_data) assert rv.status_code == 404 @pytest.mark.usefixtures("create_report_schedules") From 746db04caf8620b95867372e1dee1ad3abecc2eb Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Mon, 9 Nov 2020 18:59:31 +0000 Subject: [PATCH 18/23] solve dashboard object representation being used on cache --- superset/reports/api.py | 12 +++++++++++- superset/views/base_api.py | 22 ++++++++++++++++++++-- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/superset/reports/api.py b/superset/reports/api.py index 892b4396f38b7..426bb50f5703f 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -23,7 +23,9 @@ from flask_babel import ngettext from marshmallow import ValidationError +from superset.charts.filters import ChartFilter from superset.constants import RouteMethod +from superset.dashboards.filters import DashboardFilter from superset.models.reports import ReportSchedule from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand from superset.reports.commands.create import CreateReportScheduleCommand @@ -104,6 +106,9 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "last_eval_dttm", "last_state", "name", + "owners.id", + "owners.first_name", + "owners.last_name", "recipients.id", "recipients.type", "type", @@ -142,7 +147,12 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): ] search_columns = ["name", "active", "created_by"] - allowed_rel_fields = {"created_by"} + allowed_rel_fields = {"created_by", "chart", "dashboard"} + filter_rel_fields = { + "chart": [["id", ChartFilter, lambda: []]], + "dashboard": [["id", DashboardFilter, lambda: []]], + } + text_field_rel_fields = {"dashboard": "dashboard_title"} apispec_parameter_schemas = { "get_delete_ids_schema": get_delete_ids_schema, diff --git a/superset/views/base_api.py b/superset/views/base_api.py index d507d4ace8c90..c1dedcd311305 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -21,7 +21,7 @@ from apispec import APISpec from apispec.exceptions import DuplicateComponentNameError from flask import Blueprint, g, Response -from flask_appbuilder import AppBuilder, ModelRestApi +from flask_appbuilder import AppBuilder, Model, ModelRestApi from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.models.filters import BaseFilter, Filters from flask_appbuilder.models.sqla.filters import FilterStartsWith @@ -170,6 +170,18 @@ class BaseSupersetModelRestApi(ModelRestApi): } """ # pylint: disable=pointless-string-statement allowed_rel_fields: Set[str] = set() + """ + Declare a set of allowed related fields that the `related` endpoint supports + """ # pylint: disable=pointless-string-statement + + text_field_rel_fields: Dict[str, str] = {} + """ + Declare an alternative for the human readable representation of the Model object:: + + text_field_rel_fields = { + "": "" + } + """ # pylint: disable=pointless-string-statement allowed_distinct_fields: Set[str] = set() @@ -380,6 +392,12 @@ def related(self, column_name: str, **kwargs: Any) -> FlaskResponse: 500: $ref: '#/components/responses/500' """ + + def get_text_for_model(model: Model) -> str: + if column_name in self.text_field_rel_fields: + return getattr(model, self.text_field_rel_fields.get(column_name)) + return str(model) + if column_name not in self.allowed_rel_fields: self.incr_stats("error", self.related.__name__) return self.response_404() @@ -405,7 +423,7 @@ def related(self, column_name: str, **kwargs: Any) -> FlaskResponse: ) # produce response result = [ - {"value": datamodel.get_pk_value(value), "text": str(value)} + {"value": datamodel.get_pk_value(value), "text": get_text_for_model(value)} for value in values ] return self.response(200, count=count, result=result) From 1fcb5dfa3deb22331b60b5d3e70df3905f25bb41 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 10 Nov 2020 11:44:39 +0000 Subject: [PATCH 19/23] tests and custom filter --- superset/reports/api.py | 5 +- superset/reports/dao.py | 26 +++++---- superset/reports/filters.py | 41 ++++++++++++++ superset/views/base_api.py | 4 +- tests/reports/api_tests.py | 107 +++++++++++++++++++++++++++++++++--- 5 files changed, 159 insertions(+), 24 deletions(-) create mode 100644 superset/reports/filters.py diff --git a/superset/reports/api.py b/superset/reports/api.py index 426bb50f5703f..fbb25916ef505 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -41,6 +41,7 @@ ReportScheduleUpdateFailedError, ) from superset.reports.commands.update import UpdateReportScheduleCommand +from superset.reports.filters import ReportScheduleAllTextFilter from superset.reports.schemas import ( get_delete_ids_schema, openapi_spec_methods_override, @@ -145,8 +146,8 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi): "name", "type", ] - search_columns = ["name", "active", "created_by"] - + search_columns = ["name", "active", "created_by", "type"] + search_filters = {"name": [ReportScheduleAllTextFilter]} allowed_rel_fields = {"created_by", "chart", "dashboard"} filter_rel_fields = { "chart": [["id", ChartFilter, lambda: []]], diff --git a/superset/reports/dao.py b/superset/reports/dao.py index 2d52c1649761f..23d799eeb7138 100644 --- a/superset/reports/dao.py +++ b/superset/reports/dao.py @@ -23,7 +23,10 @@ from superset.dao.base import BaseDAO from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db -from superset.models.reports import ReportExecutionLog, ReportRecipients, ReportSchedule +from superset.models.reports import ( + ReportRecipients, + ReportSchedule, +) logger = logging.getLogger(__name__) @@ -37,17 +40,16 @@ def bulk_delete( ) -> None: item_ids = [model.id for model in models] if models else [] try: - db.session.query(ReportRecipients).filter( - ReportRecipients.report_schedule_id.in_(item_ids) - ).delete(synchronize_session="fetch") - - db.session.query(ReportExecutionLog).filter( - ReportExecutionLog.report_schedule_id.in_(item_ids) - ).delete(synchronize_session="fetch") - - db.session.query(ReportSchedule).filter( - ReportSchedule.id.in_(item_ids) - ).delete(synchronize_session="fetch") + # Clean owners secondary table + report_schedules = ( + db.session.query(ReportSchedule) + .filter(ReportSchedule.id.in_(item_ids)) + .all() + ) + for report_schedule in report_schedules: + report_schedule.owners = [] + for report_schedule in report_schedules: + db.session.delete(report_schedule) if commit: db.session.commit() except SQLAlchemyError: diff --git a/superset/reports/filters.py b/superset/reports/filters.py new file mode 100644 index 0000000000000..82ea73a91a9f0 --- /dev/null +++ b/superset/reports/filters.py @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any + +from flask_babel import lazy_gettext as _ +from sqlalchemy import or_ +from sqlalchemy.orm.query import Query + +from superset.models.reports import ReportSchedule +from superset.views.base import BaseFilter + + +class ReportScheduleAllTextFilter(BaseFilter): # pylint: disable=too-few-public-methods + name = _("All Text") + arg_name = "report_all_text" + + def apply(self, query: Query, value: Any) -> Query: + if not value: + return query + ilike_value = f"%{value}%" + return query.filter( + or_( + ReportSchedule.name.ilike(ilike_value), + ReportSchedule.description.ilike(ilike_value), + ReportSchedule.sql.ilike((ilike_value)), + ) + ) diff --git a/superset/views/base_api.py b/superset/views/base_api.py index c1dedcd311305..a83fcc671de4a 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -395,7 +395,9 @@ def related(self, column_name: str, **kwargs: Any) -> FlaskResponse: def get_text_for_model(model: Model) -> str: if column_name in self.text_field_rel_fields: - return getattr(model, self.text_field_rel_fields.get(column_name)) + model_column_name = self.text_field_rel_fields.get(column_name) + if model_column_name: + return getattr(model, model_column_name) return str(model) if column_name not in self.allowed_rel_fields: diff --git a/tests/reports/api_tests.py b/tests/reports/api_tests.py index 2c08a10cb2482..eb5425dade989 100644 --- a/tests/reports/api_tests.py +++ b/tests/reports/api_tests.py @@ -44,8 +44,6 @@ REPORTS_COUNT = 10 -# REPORTS_LOGS_COUNT = 5 -# REPORTS_RECIPIENTS_COUNT = 5 class TestReportSchedulesApi(SupersetTestCase): @@ -95,6 +93,8 @@ def insert_report_schedule( def create_report_schedules(self): with self.create_app().app_context(): report_schedules = [] + admin_user = self.get_user("admin") + alpha_user = self.get_user("alpha") chart = db.session.query(Slice).first() example_db = get_example_database() for cx in range(REPORTS_COUNT): @@ -120,16 +120,18 @@ def create_report_schedules(self): type=ReportScheduleType.ALERT, name=f"name{cx}", crontab=f"*/{cx} * * * *", - sql="SELECT value from table1", + sql=f"SELECT value from table{cx}", description=f"Some description {cx}", chart=chart, database=example_db, + owners=[admin_user, alpha_user], recipients=recipients, logs=logs, ) ) yield report_schedules + report_schedules = db.session.query(ReportSchedule).all() # rollback changes (assuming cascade delete) for report_schedule in report_schedules: db.session.delete(report_schedule) @@ -167,7 +169,10 @@ def test_get_report_schedule(self): "last_value_row_json": report_schedule.last_value_row_json, "log_retention": report_schedule.log_retention, "name": report_schedule.name, - "owners": [], + "owners": [ + {"first_name": "admin", "id": 1, "last_name": "user"}, + {"first_name": "alpha", "id": 5, "last_name": "user"}, + ], "recipients": [ { "id": report_schedule.recipients[0].id, @@ -221,14 +226,24 @@ def test_get_list_report_schedule(self): "last_eval_dttm", "last_state", "name", + "owners", "recipients", "type", ] assert rv.status_code == 200 data = json.loads(rv.data.decode("utf-8")) assert data["count"] == REPORTS_COUNT - for expected_field in expected_fields: - assert expected_field in data["result"][0] + data_keys = sorted(list(data["result"][0].keys())) + assert expected_fields == data_keys + + # Assert nested fields + expected_owners_fields = ["first_name", "id", "last_name"] + data_keys = sorted(list(data["result"][0]["owners"][0].keys())) + assert expected_owners_fields == data_keys + + expected_recipients_fields = ["id", "type"] + data_keys = sorted(list(data["result"][1]["recipients"][0].keys())) + assert expected_recipients_fields == data_keys @pytest.mark.usefixtures("create_report_schedules") def test_get_list_report_schedule_sorting(self): @@ -256,11 +271,12 @@ def test_get_list_report_schedule_sorting(self): assert rv.status_code == 200 @pytest.mark.usefixtures("create_report_schedules") - def test_get_list_report_schedule_filter(self): + def test_get_list_report_schedule_filter_name(self): """ - ReportSchedule Api: Test filters on get list report schedules + ReportSchedule Api: Test filter name on get list report schedules """ self.login(username="admin") + # Test normal contains filter arguments = { "columns": ["name"], "filters": [{"col": "name", "opr": "ct", "value": "2"}], @@ -276,9 +292,37 @@ def test_get_list_report_schedule_filter(self): assert data["count"] == 1 assert data["result"][0] == expected_result + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_custom(self): + """ + ReportSchedule Api: Test custom filter on get list report schedules + """ + self.login(username="admin") + # Test custom all text filter arguments = { "columns": ["name"], - "filters": [{"col": "name", "opr": "active", "value": True}], + "filters": [{"col": "name", "opr": "report_all_text", "value": "table3"}], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + expected_result = { + "name": "name3", + } + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 1 + assert data["result"][0] == expected_result + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_active(self): + """ + ReportSchedule Api: Test active filter on get list report schedules + """ + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [{"col": "active", "opr": "eq", "value": True}], } uri = f"api/v1/report/?q={prison.dumps(arguments)}" rv = self.get_assert_metric(uri, "get_list") @@ -287,6 +331,51 @@ def test_get_list_report_schedule_filter(self): data = json.loads(rv.data.decode("utf-8")) assert data["count"] == REPORTS_COUNT + @pytest.mark.usefixtures("create_report_schedules") + def test_get_list_report_schedule_filter_type(self): + """ + ReportSchedule Api: Test type filter on get list report schedules + """ + self.login(username="admin") + arguments = { + "columns": ["name"], + "filters": [ + {"col": "type", "opr": "eq", "value": ReportScheduleType.ALERT} + ], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == REPORTS_COUNT + + # Test type filter + arguments = { + "columns": ["name"], + "filters": [ + {"col": "type", "opr": "eq", "value": ReportScheduleType.REPORT} + ], + } + uri = f"api/v1/report/?q={prison.dumps(arguments)}" + rv = self.get_assert_metric(uri, "get_list") + + assert rv.status_code == 200 + data = json.loads(rv.data.decode("utf-8")) + assert data["count"] == 0 + + @pytest.mark.usefixtures("create_report_schedules") + def test_get_related_report_schedule(self): + """ + ReportSchedule Api: Test get releated report schedule + """ + self.login(username="admin") + related_columns = ["created_by", "chart", "dashboard"] + for related_column in related_columns: + uri = f"api/v1/report/related/{related_column}" + rv = self.client.get(uri) + assert rv.status_code == 200 + @pytest.mark.usefixtures("create_report_schedules") def test_create_report_schedule(self): """ From ecfcf11e84338dea57f188fd2656f5817e49caab Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 10 Nov 2020 11:54:49 +0000 Subject: [PATCH 20/23] fix PUT has PATCH on active field --- superset/reports/api.py | 3 +-- superset/reports/dao.py | 28 +++++++++++++--------------- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/superset/reports/api.py b/superset/reports/api.py index fbb25916ef505..6c5b27ef3e8c9 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -320,14 +320,13 @@ def put(self, pk: int) -> Response: return self.response_400(message="Request is not JSON") try: item = self.edit_model_schema.load(request.json) - item["layer"] = pk # This validates custom Schema with custom validations except ValidationError as error: return self.response_400(message=error.messages) try: new_model = UpdateReportScheduleCommand(g.user, pk, item).run() return self.response(200, id=new_model.id, result=item) - except (ReportScheduleNotFoundError) as ex: + except ReportScheduleNotFoundError: return self.response_404() except ReportScheduleInvalidError as ex: return self.response_422(message=ex.normalized_messages()) diff --git a/superset/reports/dao.py b/superset/reports/dao.py index 23d799eeb7138..e02770af90044 100644 --- a/superset/reports/dao.py +++ b/superset/reports/dao.py @@ -23,10 +23,7 @@ from superset.dao.base import BaseDAO from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db -from superset.models.reports import ( - ReportRecipients, - ReportSchedule, -) +from superset.models.reports import ReportRecipients, ReportSchedule logger = logging.getLogger(__name__) @@ -119,17 +116,18 @@ def update( for key, value in properties.items(): if key != "recipients": setattr(model, key, value) - recipients = properties.get("recipients", []) - model.recipients = [ - ReportRecipients( - type=recipient["type"], - recipient_config_json=json.dumps( - recipient["recipient_config_json"] - ), - report_schedule=model, - ) - for recipient in recipients - ] + if "recipients" in properties: + recipients = properties["recipients"] + model.recipients = [ + ReportRecipients( + type=recipient["type"], + recipient_config_json=json.dumps( + recipient["recipient_config_json"] + ), + report_schedule=model, + ) + for recipient in recipients + ] db.session.merge(model) if commit: db.session.commit() From 039bb36bfd4a10cf16da013efe6b0a88781754dc Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 10 Nov 2020 15:12:44 +0000 Subject: [PATCH 21/23] create feature flag --- superset/app.py | 5 +++-- superset/config.py | 2 ++ tests/superset_test_config.py | 1 + 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/superset/app.py b/superset/app.py index ba4c5204b98d0..2396e639ed0ad 100644 --- a/superset/app.py +++ b/superset/app.py @@ -208,8 +208,9 @@ def init_views(self) -> None: appbuilder.add_api(DatasetRestApi) appbuilder.add_api(QueryRestApi) appbuilder.add_api(SavedQueryRestApi) - appbuilder.add_api(ReportScheduleRestApi) - appbuilder.add_api(ReportExecutionLogRestApi) + if feature_flag_manager.is_feature_enabled("ALERTS_REPORTS"): + appbuilder.add_api(ReportScheduleRestApi) + appbuilder.add_api(ReportExecutionLogRestApi) # # Setup regular views # diff --git a/superset/config.py b/superset/config.py index 5af1bc220109f..d54e39ee71338 100644 --- a/superset/config.py +++ b/superset/config.py @@ -332,6 +332,8 @@ def _try_json_readsha( # pylint: disable=unused-argument # a custom security config could potentially give access to setting filters on # tables that users do not have access to. "ROW_LEVEL_SECURITY": False, + # Enables Alerts and reports new implementation + "ALERT_REPORTS": False, } # Set the default view to card/grid view if thumbnail support is enabled. diff --git a/tests/superset_test_config.py b/tests/superset_test_config.py index f74272e61f165..012cea8c53716 100644 --- a/tests/superset_test_config.py +++ b/tests/superset_test_config.py @@ -57,6 +57,7 @@ "ENABLE_TEMPLATE_PROCESSING": True, "ENABLE_REACT_CRUD_VIEWS": os.environ.get("ENABLE_REACT_CRUD_VIEWS", False), "ROW_LEVEL_SECURITY": True, + "ALERTS_REPORTS": True, } From 3dfe0352f8faae74fa515ff2975f8c1cbfee89a8 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Tue, 10 Nov 2020 15:51:32 +0000 Subject: [PATCH 22/23] fix lint --- superset/app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/superset/app.py b/superset/app.py index 2396e639ed0ad..806dbfd482179 100644 --- a/superset/app.py +++ b/superset/app.py @@ -125,6 +125,7 @@ def init_views(self) -> None: # # pylint: disable=too-many-locals # pylint: disable=too-many-statements + # pylint: disable=too-many-branches from superset.annotation_layers.api import AnnotationLayerRestApi from superset.annotation_layers.annotations.api import AnnotationRestApi from superset.cachekeys.api import CacheRestApi From d07e27f52e4ceea04e55ad49b41f4e90de4d5721 Mon Sep 17 00:00:00 2001 From: Daniel Gaspar Date: Wed, 11 Nov 2020 10:51:03 +0000 Subject: [PATCH 23/23] address comments --- superset/reports/api.py | 6 ------ superset/reports/commands/create.py | 7 ++++++- superset/reports/commands/delete.py | 1 - superset/reports/commands/exceptions.py | 17 +++++++++-------- superset/reports/schemas.py | 2 +- 5 files changed, 16 insertions(+), 17 deletions(-) diff --git a/superset/reports/api.py b/superset/reports/api.py index 6c5b27ef3e8c9..808cbc28f7fbd 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -32,10 +32,8 @@ from superset.reports.commands.delete import DeleteReportScheduleCommand from superset.reports.commands.exceptions import ( ReportScheduleBulkDeleteFailedError, - ReportScheduleBulkDeleteIntegrityError, ReportScheduleCreateFailedError, ReportScheduleDeleteFailedError, - ReportScheduleDeleteIntegrityError, ReportScheduleInvalidError, ReportScheduleNotFoundError, ReportScheduleUpdateFailedError, @@ -200,8 +198,6 @@ def delete(self, pk: int) -> Response: return self.response(200, message="OK") except ReportScheduleNotFoundError as ex: return self.response_404() - except ReportScheduleDeleteIntegrityError as ex: - return self.response_422(message=str(ex)) except ReportScheduleDeleteFailedError as ex: logger.error( "Error deleting report schedule %s: %s", @@ -386,7 +382,5 @@ def bulk_delete(self, **kwargs: Any) -> Response: ) except ReportScheduleNotFoundError: return self.response_404() - except ReportScheduleBulkDeleteIntegrityError as ex: - return self.response_422(message=str(ex)) except ReportScheduleBulkDeleteFailedError as ex: return self.response_422(message=str(ex)) diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py index e8638f71a4678..ce4cc1c0b949e 100644 --- a/superset/reports/commands/create.py +++ b/superset/reports/commands/create.py @@ -33,6 +33,7 @@ ReportScheduleCreateFailedError, ReportScheduleInvalidError, ReportScheduleNameUniquenessValidationError, + ReportScheduleRequiredTypeValidationError, ) from superset.reports.dao import ReportScheduleDAO @@ -57,7 +58,11 @@ def validate(self) -> None: exceptions: List[ValidationError] = list() owner_ids: Optional[List[int]] = self._properties.get("owners") name = self._properties.get("name", "") - report_type = self._properties.get("type", ReportScheduleType.ALERT) + report_type = self._properties.get("type") + + # Validate type is required + if not report_type: + exceptions.append(ReportScheduleRequiredTypeValidationError()) # Validate name uniqueness if not ReportScheduleDAO.validate_update_uniqueness(name): diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py index ab48efe671cfb..79a0f4455b740 100644 --- a/superset/reports/commands/delete.py +++ b/superset/reports/commands/delete.py @@ -52,4 +52,3 @@ def validate(self) -> None: self._model = ReportScheduleDAO.find_by_id(self._model_id) if not self._model: raise ReportScheduleNotFoundError() - # TODO check integrity diff --git a/superset/reports/commands/exceptions.py b/superset/reports/commands/exceptions.py index f807ef9a263e5..23a21425bd92b 100644 --- a/superset/reports/commands/exceptions.py +++ b/superset/reports/commands/exceptions.py @@ -61,6 +61,15 @@ def __init__(self) -> None: super().__init__(_("Database is required for alerts"), field_name="database") +class ReportScheduleRequiredTypeValidationError(ValidationError): + """ + Marshmallow type validation error for report schedule missing type field + """ + + def __init__(self) -> None: + super().__init__(_("Type is required"), field_name="type") + + class ReportScheduleChartOrDashboardValidationError(ValidationError): """ Marshmallow validation error for report schedule accept exlusive chart or dashboard @@ -94,14 +103,6 @@ class ReportScheduleDeleteFailedError(CommandException): message = _("Report Schedule delete failed.") -class ReportScheduleDeleteIntegrityError(CommandException): - message = _("Report Schedule has associated logs or recipients.") - - -class ReportScheduleBulkDeleteIntegrityError(CommandException): - message = _("Report Schedule has associated logs or recipients.") - - class ReportScheduleNameUniquenessValidationError(ValidationError): """ Marshmallow validation error for Report Schedule name already exists diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py index 5da7a1bf0b0b1..7613278addcfb 100644 --- a/superset/reports/schemas.py +++ b/superset/reports/schemas.py @@ -49,7 +49,7 @@ description_description = "Use a nice description to give context to this Alert/Report" context_markdown_description = "Markdown description" crontab_description = ( - "A CRON-like expression." + "A CRON expression." "[Crontab Guru](https://crontab.guru/) is " "a helpful resource that can help you craft a CRON expression." )