feat: new reports models api (#11606)

* feat: new report schedule models

* lint and unique constraint

* support sqlite

* fix sqlite

* add audit mixin and minor fixes

* feat(api): alerts and reports REST API

* feat: new report schedule models

* lint and unique constraint

* support sqlite

* fix sqlite

* add audit mixin and minor fixes

* feat(api): alerts and reports REST API

* draft working version

* add tests

* test

* black

* remove copy pasta

* solve dashboard object representation being used on cache

* tests and custom filter

* fix PUT has PATCH on active field

* create feature flag

* fix lint

* address comments
This commit is contained in:
Daniel Vaz Gaspar 2020-11-12 21:21:01 +00:00 committed by GitHub
parent 1dc4c4746e
commit 12cb27f5cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 2449 additions and 11 deletions

View File

@ -125,6 +125,7 @@ class SupersetAppInitializer:
# #
# pylint: disable=too-many-locals # pylint: disable=too-many-locals
# pylint: disable=too-many-statements # pylint: disable=too-many-statements
# pylint: disable=too-many-branches
from superset.annotation_layers.api import AnnotationLayerRestApi from superset.annotation_layers.api import AnnotationLayerRestApi
from superset.annotation_layers.annotations.api import AnnotationRestApi from superset.annotation_layers.annotations.api import AnnotationRestApi
from superset.cachekeys.api import CacheRestApi from superset.cachekeys.api import CacheRestApi
@ -148,6 +149,8 @@ class SupersetAppInitializer:
from superset.datasets.api import DatasetRestApi from superset.datasets.api import DatasetRestApi
from superset.queries.api import QueryRestApi from superset.queries.api import QueryRestApi
from superset.queries.saved_queries.api import SavedQueryRestApi from superset.queries.saved_queries.api import SavedQueryRestApi
from superset.reports.api import ReportScheduleRestApi
from superset.reports.logs.api import ReportExecutionLogRestApi
from superset.views.access_requests import AccessRequestsModelView from superset.views.access_requests import AccessRequestsModelView
from superset.views.alerts import ( from superset.views.alerts import (
AlertLogModelView, AlertLogModelView,
@ -206,6 +209,9 @@ class SupersetAppInitializer:
appbuilder.add_api(DatasetRestApi) appbuilder.add_api(DatasetRestApi)
appbuilder.add_api(QueryRestApi) appbuilder.add_api(QueryRestApi)
appbuilder.add_api(SavedQueryRestApi) appbuilder.add_api(SavedQueryRestApi)
if feature_flag_manager.is_feature_enabled("ALERTS_REPORTS"):
appbuilder.add_api(ReportScheduleRestApi)
appbuilder.add_api(ReportExecutionLogRestApi)
# #
# Setup regular views # Setup regular views
# #

View File

@ -332,6 +332,8 @@ DEFAULT_FEATURE_FLAGS: Dict[str, bool] = {
# a custom security config could potentially give access to setting filters on # a custom security config could potentially give access to setting filters on
# tables that users do not have access to. # tables that users do not have access to.
"ROW_LEVEL_SECURITY": False, "ROW_LEVEL_SECURITY": False,
# Enables Alerts and reports new implementation
"ALERT_REPORTS": False,
} }
# Set the default view to card/grid view if thumbnail support is enabled. # Set the default view to card/grid view if thumbnail support is enabled.

View File

@ -14,7 +14,7 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional, Type
from flask_appbuilder.models.filters import BaseFilter from flask_appbuilder.models.filters import BaseFilter
from flask_appbuilder.models.sqla import Model from flask_appbuilder.models.sqla import Model
@ -35,7 +35,7 @@ class BaseDAO:
Base DAO, implement base CRUD sqlalchemy operations Base DAO, implement base CRUD sqlalchemy operations
""" """
model_cls: Optional[Model] = None model_cls: Optional[Type[Model]] = None
""" """
Child classes need to state the Model class so they don't need to implement basic Child classes need to state the Model class so they don't need to implement basic
create, update and delete methods create, update and delete methods

View File

@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
# pylint: disable=line-too-long,unused-argument,ungrouped-imports
"""A collection of ORM sqlalchemy models for Superset""" """A collection of ORM sqlalchemy models for Superset"""
import enum import enum
@ -30,7 +29,7 @@ from sqlalchemy import (
Table, Table,
Text, Text,
) )
from sqlalchemy.orm import relationship from sqlalchemy.orm import backref, relationship
from sqlalchemy.schema import UniqueConstraint from sqlalchemy.schema import UniqueConstraint
from superset.extensions import security_manager from superset.extensions import security_manager
@ -50,8 +49,8 @@ class ReportScheduleType(str, enum.Enum):
class ReportScheduleValidatorType(str, enum.Enum): class ReportScheduleValidatorType(str, enum.Enum):
""" Validator types for alerts """ """ Validator types for alerts """
not_null = "not null" NOT_NULL = "not null"
operator = "operator" OPERATOR = "operator"
class ReportRecipientType(str, enum.Enum): class ReportRecipientType(str, enum.Enum):
@ -143,7 +142,9 @@ class ReportRecipients(
Integer, ForeignKey("report_schedule.id"), nullable=False Integer, ForeignKey("report_schedule.id"), nullable=False
) )
report_schedule = relationship( report_schedule = relationship(
ReportSchedule, backref="recipients", foreign_keys=[report_schedule_id] ReportSchedule,
backref=backref("recipients", cascade="all,delete,delete-orphan"),
foreign_keys=[report_schedule_id],
) )
@ -173,5 +174,7 @@ class ReportExecutionLog(Model): # pylint: disable=too-few-public-methods
Integer, ForeignKey("report_schedule.id"), nullable=False Integer, ForeignKey("report_schedule.id"), nullable=False
) )
report_schedule = relationship( report_schedule = relationship(
ReportSchedule, backref="logs", foreign_keys=[report_schedule_id] ReportSchedule,
backref=backref("logs", cascade="all,delete"),
foreign_keys=[report_schedule_id],
) )

View File

@ -80,7 +80,7 @@ class Slice(
primaryjoin="and_(Slice.datasource_id == SqlaTable.id, " primaryjoin="and_(Slice.datasource_id == SqlaTable.id, "
"Slice.datasource_type == 'table')", "Slice.datasource_type == 'table')",
remote_side="SqlaTable.id", remote_side="SqlaTable.id",
lazy="joined", lazy="subquery",
) )
token = "" token = ""

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

386
superset/reports/api.py Normal file
View File

@ -0,0 +1,386 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any
from flask import g, request, Response
from flask_appbuilder.api import expose, permission_name, protect, rison, safe
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
from marshmallow import ValidationError
from superset.charts.filters import ChartFilter
from superset.constants import RouteMethod
from superset.dashboards.filters import DashboardFilter
from superset.models.reports import ReportSchedule
from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand
from superset.reports.commands.create import CreateReportScheduleCommand
from superset.reports.commands.delete import DeleteReportScheduleCommand
from superset.reports.commands.exceptions import (
ReportScheduleBulkDeleteFailedError,
ReportScheduleCreateFailedError,
ReportScheduleDeleteFailedError,
ReportScheduleInvalidError,
ReportScheduleNotFoundError,
ReportScheduleUpdateFailedError,
)
from superset.reports.commands.update import UpdateReportScheduleCommand
from superset.reports.filters import ReportScheduleAllTextFilter
from superset.reports.schemas import (
get_delete_ids_schema,
openapi_spec_methods_override,
ReportSchedulePostSchema,
ReportSchedulePutSchema,
)
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
logger = logging.getLogger(__name__)
class ReportScheduleRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(ReportSchedule)
include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
RouteMethod.RELATED,
"bulk_delete", # not using RouteMethod since locally defined
}
class_permission_name = "ReportSchedule"
resource_name = "report"
allow_browser_login = True
show_columns = [
"id",
"name",
"type",
"description",
"context_markdown",
"active",
"crontab",
"chart.id",
"dashboard.id",
"database.id",
"owners.id",
"owners.first_name",
"owners.last_name",
"last_eval_dttm",
"last_state",
"last_value",
"last_value_row_json",
"validator_type",
"validator_config_json",
"log_retention",
"grace_period",
"recipients.id",
"recipients.type",
"recipients.recipient_config_json",
]
show_select_columns = show_columns + [
"chart.datasource_id",
"chart.datasource_type",
]
list_columns = [
"active",
"changed_by.first_name",
"changed_by.last_name",
"changed_on",
"changed_on_delta_humanized",
"created_by.first_name",
"created_by.last_name",
"created_on",
"id",
"last_eval_dttm",
"last_state",
"name",
"owners.id",
"owners.first_name",
"owners.last_name",
"recipients.id",
"recipients.type",
"type",
]
add_columns = [
"active",
"chart",
"context_markdown",
"crontab",
"dashboard",
"database",
"description",
"grace_period",
"log_retention",
"name",
"owners",
"recipients",
"sql",
"type",
"validator_config_json",
"validator_type",
]
edit_columns = add_columns
add_model_schema = ReportSchedulePostSchema()
edit_model_schema = ReportSchedulePutSchema()
order_columns = [
"active",
"created_by.first_name",
"changed_by.first_name",
"changed_on",
"changed_on_delta_humanized",
"created_on",
"name",
"type",
]
search_columns = ["name", "active", "created_by", "type"]
search_filters = {"name": [ReportScheduleAllTextFilter]}
allowed_rel_fields = {"created_by", "chart", "dashboard"}
filter_rel_fields = {
"chart": [["id", ChartFilter, lambda: []]],
"dashboard": [["id", DashboardFilter, lambda: []]],
}
text_field_rel_fields = {"dashboard": "dashboard_title"}
apispec_parameter_schemas = {
"get_delete_ids_schema": get_delete_ids_schema,
}
openapi_spec_tag = "Report Schedules"
openapi_spec_methods = openapi_spec_methods_override
@expose("/<int:pk>", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@permission_name("delete")
def delete(self, pk: int) -> Response:
"""Delete a Report Schedule
---
delete:
description: >-
Delete a Report Schedule
parameters:
- in: path
schema:
type: integer
name: pk
description: The report schedule pk
responses:
200:
description: Item deleted
content:
application/json:
schema:
type: object
properties:
message:
type: string
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
DeleteReportScheduleCommand(g.user, pk).run()
return self.response(200, message="OK")
except ReportScheduleNotFoundError as ex:
return self.response_404()
except ReportScheduleDeleteFailedError as ex:
logger.error(
"Error deleting report schedule %s: %s",
self.__class__.__name__,
str(ex),
)
return self.response_422(message=str(ex))
@expose("/", methods=["POST"])
@protect()
@safe
@statsd_metrics
@permission_name("post")
def post(self) -> Response:
"""Creates a new Report Schedule
---
post:
description: >-
Create a new Report Schedule
requestBody:
description: Report Schedule schema
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/{{self.__class__.__name__}}.post'
responses:
201:
description: Report schedule added
content:
application/json:
schema:
type: object
properties:
id:
type: number
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.post'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
item = self.add_model_schema.load(request.json)
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = CreateReportScheduleCommand(g.user, item).run()
return self.response(201, id=new_model.id, result=item)
except ReportScheduleNotFoundError as ex:
return self.response_400(message=str(ex))
except ReportScheduleInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ReportScheduleCreateFailedError as ex:
logger.error(
"Error creating report schedule %s: %s",
self.__class__.__name__,
str(ex),
)
return self.response_422(message=str(ex))
@expose("/<int:pk>", methods=["PUT"])
@protect()
@safe
@statsd_metrics
@permission_name("put")
def put(self, pk: int) -> Response:
"""Updates an Report Schedule
---
put:
description: >-
Updates a Report Schedule
parameters:
- in: path
schema:
type: integer
name: pk
description: The Report Schedule pk
requestBody:
description: Report Schedule schema
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/{{self.__class__.__name__}}.put'
responses:
200:
description: Report Schedule changed
content:
application/json:
schema:
type: object
properties:
id:
type: number
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.put'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
if not request.is_json:
return self.response_400(message="Request is not JSON")
try:
item = self.edit_model_schema.load(request.json)
# This validates custom Schema with custom validations
except ValidationError as error:
return self.response_400(message=error.messages)
try:
new_model = UpdateReportScheduleCommand(g.user, pk, item).run()
return self.response(200, id=new_model.id, result=item)
except ReportScheduleNotFoundError:
return self.response_404()
except ReportScheduleInvalidError as ex:
return self.response_422(message=ex.normalized_messages())
except ReportScheduleUpdateFailedError as ex:
logger.error(
"Error updating report %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))
@expose("/", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@rison(get_delete_ids_schema)
def bulk_delete(self, **kwargs: Any) -> Response:
"""Delete bulk Report Schedule layers
---
delete:
description: >-
Deletes multiple report schedules in a bulk operation.
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_delete_ids_schema'
responses:
200:
description: Report Schedule bulk delete
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
item_ids = kwargs["rison"]
try:
BulkDeleteReportScheduleCommand(g.user, item_ids).run()
return self.response(
200,
message=ngettext(
"Deleted %(num)d report schedule",
"Deleted %(num)d report schedules",
num=len(item_ids),
),
)
except ReportScheduleNotFoundError:
return self.response_404()
except ReportScheduleBulkDeleteFailedError as ex:
return self.response_422(message=str(ex))

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -0,0 +1,63 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, Dict, List
from marshmallow import ValidationError
from superset.charts.dao import ChartDAO
from superset.commands.base import BaseCommand
from superset.dashboards.dao import DashboardDAO
from superset.reports.commands.exceptions import (
ChartNotFoundValidationError,
DashboardNotFoundValidationError,
ReportScheduleChartOrDashboardValidationError,
)
logger = logging.getLogger(__name__)
class BaseReportScheduleCommand(BaseCommand):
_properties: Dict[str, Any]
def run(self) -> Any:
pass
def validate(self) -> None:
pass
def validate_chart_dashboard(
self, exceptions: List[ValidationError], update: bool = False
) -> None:
""" Validate chart or dashboard relation """
chart_id = self._properties.get("chart")
dashboard_id = self._properties.get("dashboard")
if chart_id and dashboard_id:
exceptions.append(ReportScheduleChartOrDashboardValidationError())
if chart_id:
chart = ChartDAO.find_by_id(chart_id)
if not chart:
exceptions.append(ChartNotFoundValidationError())
self._properties["chart"] = chart
elif dashboard_id:
dashboard = DashboardDAO.find_by_id(dashboard_id)
if not dashboard:
exceptions.append(DashboardNotFoundValidationError())
self._properties["dashboard"] = dashboard
elif not update:
exceptions.append(ReportScheduleChartOrDashboardValidationError())

View File

@ -0,0 +1,53 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import List, Optional
from flask_appbuilder.security.sqla.models import User
from superset.commands.base import BaseCommand
from superset.dao.exceptions import DAODeleteFailedError
from superset.models.reports import ReportSchedule
from superset.reports.commands.exceptions import (
ReportScheduleBulkDeleteFailedError,
ReportScheduleNotFoundError,
)
from superset.reports.dao import ReportScheduleDAO
logger = logging.getLogger(__name__)
class BulkDeleteReportScheduleCommand(BaseCommand):
def __init__(self, user: User, model_ids: List[int]):
self._actor = user
self._model_ids = model_ids
self._models: Optional[List[ReportSchedule]] = None
def run(self) -> None:
self.validate()
try:
ReportScheduleDAO.bulk_delete(self._models)
return None
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise ReportScheduleBulkDeleteFailedError()
def validate(self) -> None:
# Validate/populate model exists
self._models = ReportScheduleDAO.find_by_ids(self._model_ids)
if not self._models or len(self._models) != len(self._model_ids):
raise ReportScheduleNotFoundError()

View File

@ -0,0 +1,98 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from typing import Any, Dict, List, Optional
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.security.sqla.models import User
from marshmallow import ValidationError
from superset.commands.utils import populate_owners
from superset.dao.exceptions import DAOCreateFailedError
from superset.databases.dao import DatabaseDAO
from superset.models.reports import ReportScheduleType
from superset.reports.commands.base import BaseReportScheduleCommand
from superset.reports.commands.exceptions import (
DatabaseNotFoundValidationError,
ReportScheduleAlertRequiredDatabaseValidationError,
ReportScheduleCreateFailedError,
ReportScheduleInvalidError,
ReportScheduleNameUniquenessValidationError,
ReportScheduleRequiredTypeValidationError,
)
from superset.reports.dao import ReportScheduleDAO
logger = logging.getLogger(__name__)
class CreateReportScheduleCommand(BaseReportScheduleCommand):
def __init__(self, user: User, data: Dict[str, Any]):
self._actor = user
self._properties = data.copy()
def run(self) -> Model:
self.validate()
try:
report_schedule = ReportScheduleDAO.create(self._properties)
except DAOCreateFailedError as ex:
logger.exception(ex.exception)
raise ReportScheduleCreateFailedError()
return report_schedule
def validate(self) -> None:
exceptions: List[ValidationError] = list()
owner_ids: Optional[List[int]] = self._properties.get("owners")
name = self._properties.get("name", "")
report_type = self._properties.get("type")
# Validate type is required
if not report_type:
exceptions.append(ReportScheduleRequiredTypeValidationError())
# Validate name uniqueness
if not ReportScheduleDAO.validate_update_uniqueness(name):
exceptions.append(ReportScheduleNameUniquenessValidationError())
# validate relation by report type
if report_type == ReportScheduleType.ALERT:
database_id = self._properties.get("database")
if not database_id:
exceptions.append(ReportScheduleAlertRequiredDatabaseValidationError())
else:
database = DatabaseDAO.find_by_id(database_id)
if not database:
exceptions.append(DatabaseNotFoundValidationError())
self._properties["database"] = database
# Validate chart or dashboard relations
self.validate_chart_dashboard(exceptions)
if "validator_config_json" in self._properties:
self._properties["validator_config_json"] = json.dumps(
self._properties["validator_config_json"]
)
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = ReportScheduleInvalidError()
exception.add_list(exceptions)
raise exception

View File

@ -0,0 +1,54 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Optional
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.security.sqla.models import User
from superset.commands.base import BaseCommand
from superset.dao.exceptions import DAODeleteFailedError
from superset.models.reports import ReportSchedule
from superset.reports.commands.exceptions import (
ReportScheduleDeleteFailedError,
ReportScheduleNotFoundError,
)
from superset.reports.dao import ReportScheduleDAO
logger = logging.getLogger(__name__)
class DeleteReportScheduleCommand(BaseCommand):
def __init__(self, user: User, model_id: int):
self._actor = user
self._model_id = model_id
self._model: Optional[ReportSchedule] = None
def run(self) -> Model:
self.validate()
try:
report_schedule = ReportScheduleDAO.delete(self._model)
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise ReportScheduleDeleteFailedError()
return report_schedule
def validate(self) -> None:
# Validate/populate model exists
self._model = ReportScheduleDAO.find_by_id(self._model_id)
if not self._model:
raise ReportScheduleNotFoundError()

View File

@ -0,0 +1,112 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask_babel import lazy_gettext as _
from superset.commands.exceptions import (
CommandException,
CommandInvalidError,
CreateFailedError,
DeleteFailedError,
ValidationError,
)
class DatabaseNotFoundValidationError(ValidationError):
"""
Marshmallow validation error for database does not exist
"""
def __init__(self) -> None:
super().__init__(_("Database does not exist"), field_name="database")
class DashboardNotFoundValidationError(ValidationError):
"""
Marshmallow validation error for dashboard does not exist
"""
def __init__(self) -> None:
super().__init__(_("Dashboard does not exist"), field_name="dashboard")
class ChartNotFoundValidationError(ValidationError):
"""
Marshmallow validation error for chart does not exist
"""
def __init__(self) -> None:
super().__init__(_("Chart does not exist"), field_name="chart")
class ReportScheduleAlertRequiredDatabaseValidationError(ValidationError):
"""
Marshmallow validation error for report schedule alert missing database field
"""
def __init__(self) -> None:
super().__init__(_("Database is required for alerts"), field_name="database")
class ReportScheduleRequiredTypeValidationError(ValidationError):
"""
Marshmallow type validation error for report schedule missing type field
"""
def __init__(self) -> None:
super().__init__(_("Type is required"), field_name="type")
class ReportScheduleChartOrDashboardValidationError(ValidationError):
"""
Marshmallow validation error for report schedule accept exlusive chart or dashboard
"""
def __init__(self) -> None:
super().__init__(_("Choose a chart or dashboard not both"), field_name="chart")
class ReportScheduleInvalidError(CommandInvalidError):
message = _("Report Schedule parameters are invalid.")
class ReportScheduleBulkDeleteFailedError(DeleteFailedError):
message = _("Report Schedule could not be deleted.")
class ReportScheduleCreateFailedError(CreateFailedError):
message = _("Report Schedule could not be created.")
class ReportScheduleUpdateFailedError(CreateFailedError):
message = _("Report Schedule could not be updated.")
class ReportScheduleNotFoundError(CommandException):
message = _("Report Schedule not found.")
class ReportScheduleDeleteFailedError(CommandException):
message = _("Report Schedule delete failed.")
class ReportScheduleNameUniquenessValidationError(ValidationError):
"""
Marshmallow validation error for Report Schedule name already exists
"""
def __init__(self) -> None:
super().__init__([_("Name must be unique")], field_name="name")

View File

@ -0,0 +1,101 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, Dict, List, Optional
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.security.sqla.models import User
from marshmallow import ValidationError
from superset.commands.utils import populate_owners
from superset.dao.exceptions import DAOUpdateFailedError
from superset.databases.dao import DatabaseDAO
from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.reports.commands.base import BaseReportScheduleCommand
from superset.reports.commands.exceptions import (
DatabaseNotFoundValidationError,
ReportScheduleInvalidError,
ReportScheduleNameUniquenessValidationError,
ReportScheduleNotFoundError,
ReportScheduleUpdateFailedError,
)
from superset.reports.dao import ReportScheduleDAO
logger = logging.getLogger(__name__)
class UpdateReportScheduleCommand(BaseReportScheduleCommand):
def __init__(self, user: User, model_id: int, data: Dict[str, Any]):
self._actor = user
self._model_id = model_id
self._properties = data.copy()
self._model: Optional[ReportSchedule] = None
def run(self) -> Model:
self.validate()
try:
report_schedule = ReportScheduleDAO.update(self._model, self._properties)
except DAOUpdateFailedError as ex:
logger.exception(ex.exception)
raise ReportScheduleUpdateFailedError()
return report_schedule
def validate(self) -> None:
exceptions: List[ValidationError] = list()
owner_ids: Optional[List[int]] = self._properties.get("owners")
report_type = self._properties.get("type", ReportScheduleType.ALERT)
name = self._properties.get("name", "")
self._model = ReportScheduleDAO.find_by_id(self._model_id)
# Does the report exist?
if not self._model:
raise ReportScheduleNotFoundError()
# Validate name uniqueness
if not ReportScheduleDAO.validate_update_uniqueness(
name, report_schedule_id=self._model_id
):
exceptions.append(ReportScheduleNameUniquenessValidationError())
# validate relation by report type
if not report_type:
report_type = self._model.type
if report_type == ReportScheduleType.ALERT:
database_id = self._properties.get("database")
# If database_id was sent let's validate it exists
if database_id:
database = DatabaseDAO.find_by_id(database_id)
if not database:
exceptions.append(DatabaseNotFoundValidationError())
self._properties["database"] = database
# Validate chart or dashboard relations
self.validate_chart_dashboard(exceptions, update=True)
# Validate/Populate owner
if owner_ids is None:
owner_ids = [owner.id for owner in self._model.owners]
try:
owners = populate_owners(self._actor, owner_ids)
self._properties["owners"] = owners
except ValidationError as ex:
exceptions.append(ex)
if exceptions:
exception = ReportScheduleInvalidError()
exception.add_list(exceptions)
raise exception

137
superset/reports/dao.py Normal file
View File

@ -0,0 +1,137 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, Dict, List, Optional
from flask_appbuilder import Model
from sqlalchemy.exc import SQLAlchemyError
from superset.dao.base import BaseDAO
from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError
from superset.extensions import db
from superset.models.reports import ReportRecipients, ReportSchedule
logger = logging.getLogger(__name__)
class ReportScheduleDAO(BaseDAO):
model_cls = ReportSchedule
@staticmethod
def bulk_delete(
models: Optional[List[ReportSchedule]], commit: bool = True
) -> None:
item_ids = [model.id for model in models] if models else []
try:
# Clean owners secondary table
report_schedules = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.id.in_(item_ids))
.all()
)
for report_schedule in report_schedules:
report_schedule.owners = []
for report_schedule in report_schedules:
db.session.delete(report_schedule)
if commit:
db.session.commit()
except SQLAlchemyError:
if commit:
db.session.rollback()
raise DAODeleteFailedError()
@staticmethod
def validate_update_uniqueness(
name: str, report_schedule_id: Optional[int] = None
) -> bool:
"""
Validate if this name is unique.
:param name: The report schedule name
:param report_schedule_id: The report schedule current id
(only for validating on updates)
:return: bool
"""
query = db.session.query(ReportSchedule).filter(ReportSchedule.name == name)
if report_schedule_id:
query = query.filter(ReportSchedule.id != report_schedule_id)
return not db.session.query(query.exists()).scalar()
@classmethod
def create(cls, properties: Dict[str, Any], commit: bool = True) -> Model:
"""
create a report schedule and nested recipients
:raises: DAOCreateFailedError
"""
import json
try:
model = ReportSchedule()
for key, value in properties.items():
if key != "recipients":
setattr(model, key, value)
recipients = properties.get("recipients", [])
for recipient in recipients:
model.recipients.append( # pylint: disable=no-member
ReportRecipients(
type=recipient["type"],
recipient_config_json=json.dumps(
recipient["recipient_config_json"]
),
)
)
db.session.add(model)
if commit:
db.session.commit()
return model
except SQLAlchemyError:
db.session.rollback()
raise DAOCreateFailedError
@classmethod
def update(
cls, model: Model, properties: Dict[str, Any], commit: bool = True
) -> Model:
"""
create a report schedule and nested recipients
:raises: DAOCreateFailedError
"""
import json
try:
for key, value in properties.items():
if key != "recipients":
setattr(model, key, value)
if "recipients" in properties:
recipients = properties["recipients"]
model.recipients = [
ReportRecipients(
type=recipient["type"],
recipient_config_json=json.dumps(
recipient["recipient_config_json"]
),
report_schedule=model,
)
for recipient in recipients
]
db.session.merge(model)
if commit:
db.session.commit()
return model
except SQLAlchemyError:
db.session.rollback()
raise DAOCreateFailedError

View File

@ -0,0 +1,41 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any
from flask_babel import lazy_gettext as _
from sqlalchemy import or_
from sqlalchemy.orm.query import Query
from superset.models.reports import ReportSchedule
from superset.views.base import BaseFilter
class ReportScheduleAllTextFilter(BaseFilter): # pylint: disable=too-few-public-methods
name = _("All Text")
arg_name = "report_all_text"
def apply(self, query: Query, value: Any) -> Query:
if not value:
return query
ilike_value = f"%{value}%"
return query.filter(
or_(
ReportSchedule.name.ilike(ilike_value),
ReportSchedule.description.ilike(ilike_value),
ReportSchedule.sql.ilike((ilike_value)),
)
)

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -0,0 +1,196 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Any, Dict
from flask import Response
from flask_appbuilder.api import expose, permission_name, protect, rison, safe
from flask_appbuilder.api.schemas import get_item_schema, get_list_schema
from flask_appbuilder.models.sqla.interface import SQLAInterface
from superset.constants import RouteMethod
from superset.models.reports import ReportExecutionLog
from superset.reports.logs.schemas import openapi_spec_methods_override
from superset.views.base_api import BaseSupersetModelRestApi
logger = logging.getLogger(__name__)
class ReportExecutionLogRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(ReportExecutionLog)
include_route_methods = {RouteMethod.GET, RouteMethod.GET_LIST}
class_permission_name = "ReportSchedule"
resource_name = "report"
allow_browser_login = True
show_columns = [
"id",
"scheduled_dttm",
"end_dttm",
"start_dttm",
"value",
"value_row_json",
"state",
"error_message",
]
list_columns = [
"id",
"end_dttm",
"start_dttm",
"value",
"value_row_json",
"state",
"error_message",
]
order_columns = [
"state",
"value",
"error_message",
"end_dttm",
"start_dttm",
]
openapi_spec_tag = "Report Schedules"
openapi_spec_methods = openapi_spec_methods_override
@staticmethod
def _apply_layered_relation_to_rison( # pylint: disable=invalid-name
layer_id: int, rison_parameters: Dict[str, Any]
) -> None:
if "filters" not in rison_parameters:
rison_parameters["filters"] = []
rison_parameters["filters"].append(
{"col": "report_schedule", "opr": "rel_o_m", "value": layer_id}
)
@expose("/<int:pk>/log/", methods=["GET"])
@protect()
@safe
@permission_name("get")
@rison(get_list_schema)
def get_list( # pylint: disable=arguments-differ
self, pk: int, **kwargs: Dict[str, Any]
) -> Response:
"""Get a list of report schedule logs
---
get:
description: >-
Get a list of report schedule logs
parameters:
- in: path
schema:
type: integer
description: The report schedule id for these logs
name: pk
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_list_schema'
responses:
200:
description: Items from logs
content:
application/json:
schema:
type: object
properties:
ids:
description: >-
A list of log ids
type: array
items:
type: string
count:
description: >-
The total record count on the backend
type: number
result:
description: >-
The result from the get list query
type: array
items:
$ref: '#/components/schemas/{{self.__class__.__name__}}.get_list' # pylint: disable=line-too-long
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
self._apply_layered_relation_to_rison(pk, kwargs["rison"])
return self.get_list_headless(**kwargs)
@expose("/<int:pk>/log/<int:log_id>", methods=["GET"])
@protect()
@safe
@permission_name("get")
@rison(get_item_schema)
def get( # pylint: disable=arguments-differ
self, pk: int, log_id: int, **kwargs: Dict[str, Any]
) -> Response:
"""Get a report schedule log
---
get:
description: >-
Get a report schedule log
parameters:
- in: path
schema:
type: integer
name: pk
description: The report schedule pk for log
- in: path
schema:
type: integer
name: log_id
description: The log pk
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_item_schema'
responses:
200:
description: Item log
content:
application/json:
schema:
type: object
properties:
id:
description: The log id
type: string
result:
$ref: '#/components/schemas/{{self.__class__.__name__}}.get'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
self._apply_layered_relation_to_rison(pk, kwargs["rison"])
return self.get_headless(log_id, **kwargs)

View File

@ -0,0 +1,28 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
openapi_spec_methods_override = {
"get": {"get": {"description": "Get a report schedule log"}},
"get_list": {
"get": {
"description": "Get a list of report schedule logs, use Rison or JSON "
"query parameters for filtering, sorting,"
" pagination and for selecting specific"
" columns and metadata.",
}
},
}

209
superset/reports/schemas.py Normal file
View File

@ -0,0 +1,209 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Union
from croniter import croniter
from marshmallow import fields, Schema, validate
from marshmallow.validate import Length, ValidationError
from superset.models.reports import (
ReportRecipientType,
ReportScheduleType,
ReportScheduleValidatorType,
)
openapi_spec_methods_override = {
"get": {"get": {"description": "Get a report schedule"}},
"get_list": {
"get": {
"description": "Get a list of report schedules, use Rison or JSON "
"query parameters for filtering, sorting,"
" pagination and for selecting specific"
" columns and metadata.",
}
},
"post": {"post": {"description": "Create a report schedule"}},
"put": {"put": {"description": "Update a report schedule"}},
"delete": {"delete": {"description": "Delete a report schedule"}},
}
get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}}
type_description = "The report schedule type"
name_description = "The report schedule name."
# :)
description_description = "Use a nice description to give context to this Alert/Report"
context_markdown_description = "Markdown description"
crontab_description = (
"A CRON expression."
"[Crontab Guru](https://crontab.guru/) is "
"a helpful resource that can help you craft a CRON expression."
)
sql_description = (
"A SQL statement that defines whether the alert should get triggered or "
"not. The query is expected to return either NULL or a number value."
)
owners_description = (
"Owner are users ids allowed to delete or change this report. "
"If left empty you will be one of the owners of the report."
)
validator_type_description = (
"Determines when to trigger alert based off value from alert query. "
"Alerts will be triggered with these validator types:\n"
"- Not Null - When the return value is Not NULL, Empty, or 0\n"
"- Operator - When `sql_return_value comparison_operator threshold`"
" is True e.g. `50 <= 75`<br>Supports the comparison operators <, <=, "
">, >=, ==, and !="
)
validator_config_json_op_description = (
"The operation to compare with a threshold to apply to the SQL output\n"
)
log_retention_description = "How long to keep the logs around for this report (in days)"
grace_period_description = (
"Once an alert is triggered, how long, in seconds, before "
"Superset nags you again. (in seconds)"
)
def validate_crontab(value: Union[bytes, bytearray, str]) -> None:
if not croniter.is_valid(str(value)):
raise ValidationError("Cron expression is not valid")
class ValidatorConfigJSONSchema(Schema):
operation = fields.String(
description=validator_config_json_op_description,
validate=validate.OneOf(choices=["<", "<=", ">", ">=", "==", "!="]),
)
threshold = fields.Integer()
class ReportRecipientConfigJSONSchema(Schema):
# TODO if email check validity
target = fields.String()
class ReportRecipientSchema(Schema):
type = fields.String(
description="The recipient type, check spec for valid options",
allow_none=False,
required=True,
validate=validate.OneOf(
choices=tuple(key.value for key in ReportRecipientType)
),
)
recipient_config_json = fields.Nested(ReportRecipientConfigJSONSchema)
class ReportSchedulePostSchema(Schema):
type = fields.String(
description=type_description,
allow_none=False,
required=True,
validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)),
)
name = fields.String(
description=name_description,
allow_none=False,
required=True,
validate=[Length(1, 150)],
example="Daily dashboard email",
)
description = fields.String(
description=description_description,
allow_none=True,
required=False,
example="Daily sales dashboard to marketing",
)
context_markdown = fields.String(
description=context_markdown_description, allow_none=True, required=False
)
active = fields.Boolean()
crontab = fields.String(
description=crontab_description,
validate=[validate_crontab, Length(1, 50)],
example="*/5 * * * * *",
allow_none=False,
required=True,
)
sql = fields.String(
description=sql_description, example="SELECT value FROM time_series_table"
)
chart = fields.Integer(required=False)
dashboard = fields.Integer(required=False)
database = fields.Integer(required=False)
owners = fields.List(fields.Integer(description=owners_description))
validator_type = fields.String(
description=validator_type_description,
validate=validate.OneOf(
choices=tuple(key.value for key in ReportScheduleValidatorType)
),
)
validator_config_json = fields.Nested(ValidatorConfigJSONSchema)
log_retention = fields.Integer(description=log_retention_description, example=90)
grace_period = fields.Integer(description=grace_period_description, example=14400)
recipients = fields.List(fields.Nested(ReportRecipientSchema))
class ReportSchedulePutSchema(Schema):
type = fields.String(
description=type_description,
required=False,
validate=validate.OneOf(choices=tuple(key.value for key in ReportScheduleType)),
)
name = fields.String(
description=name_description, required=False, validate=[Length(1, 150)]
)
description = fields.String(
description=description_description,
allow_none=True,
required=False,
example="Daily sales dashboard to marketing",
)
context_markdown = fields.String(
description=context_markdown_description, allow_none=True, required=False
)
active = fields.Boolean(required=False)
crontab = fields.String(
description=crontab_description,
validate=[validate_crontab, Length(1, 50)],
required=False,
)
sql = fields.String(
description=sql_description,
example="SELECT value FROM time_series_table",
required=False,
)
chart = fields.Integer(required=False)
dashboard = fields.Integer(required=False)
database = fields.Integer(required=False)
owners = fields.List(fields.Integer(description=owners_description), required=False)
validator_type = fields.String(
description=validator_type_description,
validate=validate.OneOf(
choices=tuple(key.value for key in ReportScheduleValidatorType)
),
required=False,
)
validator_config_json = fields.Nested(ValidatorConfigJSONSchema, required=False)
log_retention = fields.Integer(
description=log_retention_description, example=90, required=False
)
grace_period = fields.Integer(
description=grace_period_description, example=14400, required=False
)
recipients = fields.List(fields.Nested(ReportRecipientSchema), required=False)

View File

@ -21,7 +21,7 @@ from typing import Any, Callable, cast, Dict, List, Optional, Set, Tuple, Type,
from apispec import APISpec from apispec import APISpec
from apispec.exceptions import DuplicateComponentNameError from apispec.exceptions import DuplicateComponentNameError
from flask import Blueprint, g, Response from flask import Blueprint, g, Response
from flask_appbuilder import AppBuilder, ModelRestApi from flask_appbuilder import AppBuilder, Model, ModelRestApi
from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.api import expose, protect, rison, safe
from flask_appbuilder.models.filters import BaseFilter, Filters from flask_appbuilder.models.filters import BaseFilter, Filters
from flask_appbuilder.models.sqla.filters import FilterStartsWith from flask_appbuilder.models.sqla.filters import FilterStartsWith
@ -170,6 +170,18 @@ class BaseSupersetModelRestApi(ModelRestApi):
} }
""" # pylint: disable=pointless-string-statement """ # pylint: disable=pointless-string-statement
allowed_rel_fields: Set[str] = set() allowed_rel_fields: Set[str] = set()
"""
Declare a set of allowed related fields that the `related` endpoint supports
""" # pylint: disable=pointless-string-statement
text_field_rel_fields: Dict[str, str] = {}
"""
Declare an alternative for the human readable representation of the Model object::
text_field_rel_fields = {
"<RELATED_FIELD>": "<RELATED_OBJECT_FIELD>"
}
""" # pylint: disable=pointless-string-statement
allowed_distinct_fields: Set[str] = set() allowed_distinct_fields: Set[str] = set()
@ -380,6 +392,14 @@ class BaseSupersetModelRestApi(ModelRestApi):
500: 500:
$ref: '#/components/responses/500' $ref: '#/components/responses/500'
""" """
def get_text_for_model(model: Model) -> str:
if column_name in self.text_field_rel_fields:
model_column_name = self.text_field_rel_fields.get(column_name)
if model_column_name:
return getattr(model, model_column_name)
return str(model)
if column_name not in self.allowed_rel_fields: if column_name not in self.allowed_rel_fields:
self.incr_stats("error", self.related.__name__) self.incr_stats("error", self.related.__name__)
return self.response_404() return self.response_404()
@ -405,7 +425,7 @@ class BaseSupersetModelRestApi(ModelRestApi):
) )
# produce response # produce response
result = [ result = [
{"value": datamodel.get_pk_value(value), "text": str(value)} {"value": datamodel.get_pk_value(value), "text": get_text_for_model(value)}
for value in values for value in values
] ]
return self.response(200, count=count, result=result) return self.response(200, count=count, result=result)

16
tests/reports/__init__.py Normal file
View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

864
tests/reports/api_tests.py Normal file
View File

@ -0,0 +1,864 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
from datetime import datetime
from typing import List, Optional
import json
from flask_appbuilder.security.sqla.models import User
import pytest
import prison
from sqlalchemy.sql import func
import tests.test_app
from superset import db
from superset.models.core import Database
from superset.models.slice import Slice
from superset.models.dashboard import Dashboard
from superset.models.reports import (
ReportSchedule,
ReportRecipients,
ReportExecutionLog,
ReportScheduleType,
ReportRecipientType,
ReportLogState,
)
from tests.base_tests import SupersetTestCase
from superset.utils.core import get_example_database
REPORTS_COUNT = 10
class TestReportSchedulesApi(SupersetTestCase):
def insert_report_schedule(
self,
type: str,
name: str,
crontab: str,
sql: Optional[str] = None,
description: Optional[str] = None,
chart: Optional[Slice] = None,
dashboard: Optional[Dashboard] = None,
database: Optional[Database] = None,
owners: Optional[List[User]] = None,
validator_type: Optional[str] = None,
validator_config_json: Optional[str] = None,
log_retention: Optional[int] = None,
grace_period: Optional[int] = None,
recipients: Optional[List[ReportRecipients]] = None,
logs: Optional[List[ReportExecutionLog]] = None,
) -> ReportSchedule:
owners = owners or []
recipients = recipients or []
logs = logs or []
report_schedule = ReportSchedule(
type=type,
name=name,
crontab=crontab,
sql=sql,
description=description,
chart=chart,
dashboard=dashboard,
database=database,
owners=owners,
validator_type=validator_type,
validator_config_json=validator_config_json,
log_retention=log_retention,
grace_period=grace_period,
recipients=recipients,
logs=logs,
)
db.session.add(report_schedule)
db.session.commit()
return report_schedule
@pytest.fixture()
def create_report_schedules(self):
with self.create_app().app_context():
report_schedules = []
admin_user = self.get_user("admin")
alpha_user = self.get_user("alpha")
chart = db.session.query(Slice).first()
example_db = get_example_database()
for cx in range(REPORTS_COUNT):
recipients = []
logs = []
for cy in range(cx):
config_json = {"target": f"target{cy}@email.com"}
recipients.append(
ReportRecipients(
type=ReportRecipientType.EMAIL,
recipient_config_json=json.dumps(config_json),
)
)
logs.append(
ReportExecutionLog(
scheduled_dttm=datetime(2020, 1, 1),
state=ReportLogState.ERROR,
error_message=f"Error {cy}",
)
)
report_schedules.append(
self.insert_report_schedule(
type=ReportScheduleType.ALERT,
name=f"name{cx}",
crontab=f"*/{cx} * * * *",
sql=f"SELECT value from table{cx}",
description=f"Some description {cx}",
chart=chart,
database=example_db,
owners=[admin_user, alpha_user],
recipients=recipients,
logs=logs,
)
)
yield report_schedules
report_schedules = db.session.query(ReportSchedule).all()
# rollback changes (assuming cascade delete)
for report_schedule in report_schedules:
db.session.delete(report_schedule)
db.session.commit()
@pytest.mark.usefixtures("create_report_schedules")
def test_get_report_schedule(self):
"""
ReportSchedule Api: Test get report schedule
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name1")
.first()
)
self.login(username="admin")
uri = f"api/v1/report/{report_schedule.id}"
rv = self.get_assert_metric(uri, "get")
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 200
expected_result = {
"active": report_schedule.active,
"chart": {"id": report_schedule.chart.id},
"context_markdown": report_schedule.context_markdown,
"crontab": report_schedule.crontab,
"dashboard": None,
"database": {"id": report_schedule.database.id},
"description": report_schedule.description,
"grace_period": report_schedule.grace_period,
"id": report_schedule.id,
"last_eval_dttm": report_schedule.last_eval_dttm,
"last_state": report_schedule.last_state,
"last_value": report_schedule.last_value,
"last_value_row_json": report_schedule.last_value_row_json,
"log_retention": report_schedule.log_retention,
"name": report_schedule.name,
"owners": [
{"first_name": "admin", "id": 1, "last_name": "user"},
{"first_name": "alpha", "id": 5, "last_name": "user"},
],
"recipients": [
{
"id": report_schedule.recipients[0].id,
"recipient_config_json": '{"target": "target0@email.com"}',
"type": "Email",
}
],
"type": report_schedule.type,
"validator_config_json": report_schedule.validator_config_json,
"validator_type": report_schedule.validator_type,
}
assert data["result"] == expected_result
def test_info_report_schedule(self):
"""
ReportSchedule API: Test info
"""
self.login(username="admin")
uri = f"api/v1/report/_info"
rv = self.get_assert_metric(uri, "info")
assert rv.status_code == 200
@pytest.mark.usefixtures("create_report_schedules")
def test_get_report_schedule_not_found(self):
"""
ReportSchedule Api: Test get report schedule not found
"""
max_id = db.session.query(func.max(ReportSchedule.id)).scalar()
self.login(username="admin")
uri = f"api/v1/report/{max_id + 1}"
rv = self.get_assert_metric(uri, "get")
assert rv.status_code == 404
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule(self):
"""
ReportSchedule Api: Test get list report schedules
"""
self.login(username="admin")
uri = f"api/v1/report/"
rv = self.get_assert_metric(uri, "get_list")
expected_fields = [
"active",
"changed_by",
"changed_on",
"changed_on_delta_humanized",
"created_by",
"created_on",
"id",
"last_eval_dttm",
"last_state",
"name",
"owners",
"recipients",
"type",
]
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == REPORTS_COUNT
data_keys = sorted(list(data["result"][0].keys()))
assert expected_fields == data_keys
# Assert nested fields
expected_owners_fields = ["first_name", "id", "last_name"]
data_keys = sorted(list(data["result"][0]["owners"][0].keys()))
assert expected_owners_fields == data_keys
expected_recipients_fields = ["id", "type"]
data_keys = sorted(list(data["result"][1]["recipients"][0].keys()))
assert expected_recipients_fields == data_keys
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_sorting(self):
"""
ReportSchedule Api: Test sorting on get list report schedules
"""
self.login(username="admin")
uri = f"api/v1/report/"
order_columns = [
"active",
"created_by.first_name",
"changed_by.first_name",
"changed_on",
"changed_on_delta_humanized",
"created_on",
"name",
"type",
]
for order_column in order_columns:
arguments = {"order_column": order_column, "order_direction": "asc"}
uri = f"api/v1/report/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_filter_name(self):
"""
ReportSchedule Api: Test filter name on get list report schedules
"""
self.login(username="admin")
# Test normal contains filter
arguments = {
"columns": ["name"],
"filters": [{"col": "name", "opr": "ct", "value": "2"}],
}
uri = f"api/v1/report/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
expected_result = {
"name": "name2",
}
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == 1
assert data["result"][0] == expected_result
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_filter_custom(self):
"""
ReportSchedule Api: Test custom filter on get list report schedules
"""
self.login(username="admin")
# Test custom all text filter
arguments = {
"columns": ["name"],
"filters": [{"col": "name", "opr": "report_all_text", "value": "table3"}],
}
uri = f"api/v1/report/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
expected_result = {
"name": "name3",
}
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == 1
assert data["result"][0] == expected_result
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_filter_active(self):
"""
ReportSchedule Api: Test active filter on get list report schedules
"""
self.login(username="admin")
arguments = {
"columns": ["name"],
"filters": [{"col": "active", "opr": "eq", "value": True}],
}
uri = f"api/v1/report/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == REPORTS_COUNT
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_filter_type(self):
"""
ReportSchedule Api: Test type filter on get list report schedules
"""
self.login(username="admin")
arguments = {
"columns": ["name"],
"filters": [
{"col": "type", "opr": "eq", "value": ReportScheduleType.ALERT}
],
}
uri = f"api/v1/report/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == REPORTS_COUNT
# Test type filter
arguments = {
"columns": ["name"],
"filters": [
{"col": "type", "opr": "eq", "value": ReportScheduleType.REPORT}
],
}
uri = f"api/v1/report/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == 0
@pytest.mark.usefixtures("create_report_schedules")
def test_get_related_report_schedule(self):
"""
ReportSchedule Api: Test get releated report schedule
"""
self.login(username="admin")
related_columns = ["created_by", "chart", "dashboard"]
for related_column in related_columns:
uri = f"api/v1/report/related/{related_column}"
rv = self.client.get(uri)
assert rv.status_code == 200
@pytest.mark.usefixtures("create_report_schedules")
def test_create_report_schedule(self):
"""
ReportSchedule Api: Test create report schedule
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"recipients": [
{
"type": ReportRecipientType.EMAIL,
"recipient_config_json": {"target": "target@superset.org"},
},
{
"type": ReportRecipientType.SLACK,
"recipient_config_json": {"target": "channel"},
},
],
"chart": chart.id,
"database": example_db.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201
created_model = db.session.query(ReportSchedule).get(data.get("id"))
assert created_model is not None
assert created_model.name == report_schedule_data["name"]
assert created_model.description == report_schedule_data["description"]
assert created_model.crontab == report_schedule_data["crontab"]
assert created_model.chart.id == report_schedule_data["chart"]
assert created_model.database.id == report_schedule_data["database"]
# Rollback changes
db.session.delete(created_model)
db.session.commit()
@pytest.mark.usefixtures("create_report_schedules")
def test_create_report_schedule_uniqueness(self):
"""
ReportSchedule Api: Test create report schedule uniqueness
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "name3",
"description": "description",
"crontab": "0 9 * * *",
"chart": chart.id,
"database": example_db.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {"message": {"name": ["Name must be unique"]}}
@pytest.mark.usefixtures("create_report_schedules")
def test_create_report_schedule_chart_dash_validation(self):
"""
ReportSchedule Api: Test create report schedule chart and dashboard validation
"""
self.login(username="admin")
# Test we can submit a chart or a dashboard not both
chart = db.session.query(Slice).first()
dashboard = db.session.query(Dashboard).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"chart": chart.id,
"dashboard": dashboard.id,
"database": example_db.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {"message": {"chart": "Choose a chart or dashboard not both"}}
@pytest.mark.usefixtures("create_report_schedules")
def test_create_report_schedule_chart_db_validation(self):
"""
ReportSchedule Api: Test create report schedule chart and database validation
"""
self.login(username="admin")
# Test database required for alerts
chart = db.session.query(Slice).first()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"chart": chart.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {"message": {"database": "Database is required for alerts"}}
@pytest.mark.usefixtures("create_report_schedules")
def test_create_report_schedule_relations_exist(self):
"""
ReportSchedule Api: Test create report schedule
relations (chart, dash, db) exist
"""
self.login(username="admin")
# Test chart and database do not exist
chart_max_id = db.session.query(func.max(Slice.id)).scalar()
database_max_id = db.session.query(func.max(Database.id)).scalar()
examples_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"chart": chart_max_id + 1,
"database": database_max_id + 1,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {
"message": {
"chart": "Chart does not exist",
"database": "Database does not exist",
}
}
# Test dashboard does not exist
dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"dashboard": dashboard_max_id + 1,
"database": examples_db.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {"message": {"dashboard": "Dashboard does not exist"}}
@pytest.mark.usefixtures("create_report_schedules")
def test_update_report_schedule(self):
"""
ReportSchedule Api: Test update report schedule
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name2")
.one_or_none()
)
self.login(username="admin")
chart = db.session.query(Slice).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "changed",
"description": "description",
"crontab": "0 10 * * *",
"recipients": [
{
"type": ReportRecipientType.EMAIL,
"recipient_config_json": {"target": "target@superset.org"},
}
],
"chart": chart.id,
"database": example_db.id,
}
uri = f"api/v1/report/{report_schedule.id}"
rv = self.client.put(uri, json=report_schedule_data)
assert rv.status_code == 200
updated_model = db.session.query(ReportSchedule).get(report_schedule.id)
assert updated_model is not None
assert updated_model.name == report_schedule_data["name"]
assert updated_model.description == report_schedule_data["description"]
assert len(updated_model.recipients) == 1
assert updated_model.crontab == report_schedule_data["crontab"]
assert updated_model.chart_id == report_schedule_data["chart"]
assert updated_model.database_id == report_schedule_data["database"]
@pytest.mark.usefixtures("create_report_schedules")
def test_update_report_schedule_uniqueness(self):
"""
ReportSchedule Api: Test update report schedule uniqueness
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name2")
.one_or_none()
)
self.login(username="admin")
report_schedule_data = {"name": "name3", "description": "changed_description"}
uri = f"api/v1/report/{report_schedule.id}"
rv = self.client.put(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert data == {"message": {"name": ["Name must be unique"]}}
@pytest.mark.usefixtures("create_report_schedules")
def test_update_report_schedule_not_found(self):
"""
ReportSchedule Api: Test update report schedule not found
"""
max_id = db.session.query(func.max(ReportSchedule.id)).scalar()
self.login(username="admin")
report_schedule_data = {"name": "changed"}
uri = f"api/v1/report/{max_id + 1}"
rv = self.client.put(uri, json=report_schedule_data)
assert rv.status_code == 404
@pytest.mark.usefixtures("create_report_schedules")
def test_update_report_schedule_chart_dash_validation(self):
"""
ReportSchedule Api: Test update report schedule chart and dashboard validation
"""
self.login(username="admin")
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name2")
.one_or_none()
)
# Test we can submit a chart or a dashboard not both
chart = db.session.query(Slice).first()
dashboard = db.session.query(Dashboard).first()
example_db = get_example_database()
report_schedule_data = {
"chart": chart.id,
"dashboard": dashboard.id,
"database": example_db.id,
}
uri = f"api/v1/report/{report_schedule.id}"
rv = self.client.put(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {"message": {"chart": "Choose a chart or dashboard not both"}}
@pytest.mark.usefixtures("create_report_schedules")
def test_update_report_schedule_relations_exist(self):
"""
ReportSchedule Api: Test update report schedule relations exist
relations (chart, dash, db) exist
"""
self.login(username="admin")
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name2")
.one_or_none()
)
# Test chart and database do not exist
chart_max_id = db.session.query(func.max(Slice.id)).scalar()
database_max_id = db.session.query(func.max(Database.id)).scalar()
examples_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"chart": chart_max_id + 1,
"database": database_max_id + 1,
}
uri = f"api/v1/report/{report_schedule.id}"
rv = self.client.put(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {
"message": {
"chart": "Chart does not exist",
"database": "Database does not exist",
}
}
# Test dashboard does not exist
dashboard_max_id = db.session.query(func.max(Dashboard.id)).scalar()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"dashboard": dashboard_max_id + 1,
"database": examples_db.id,
}
uri = f"api/v1/report/{report_schedule.id}"
rv = self.client.put(uri, json=report_schedule_data)
assert rv.status_code == 422
data = json.loads(rv.data.decode("utf-8"))
assert data == {"message": {"dashboard": "Dashboard does not exist"}}
@pytest.mark.usefixtures("create_report_schedules")
def test_delete_report_schedule(self):
"""
ReportSchedule Api: Test update report schedule
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name1")
.one_or_none()
)
self.login(username="admin")
uri = f"api/v1/report/{report_schedule.id}"
rv = self.client.delete(uri)
assert rv.status_code == 200
deleted_report_schedule = db.session.query(ReportSchedule).get(
report_schedule.id
)
assert deleted_report_schedule is None
deleted_recipients = (
db.session.query(ReportRecipients)
.filter(ReportRecipients.report_schedule_id == report_schedule.id)
.all()
)
assert deleted_recipients == []
deleted_logs = (
db.session.query(ReportExecutionLog)
.filter(ReportExecutionLog.report_schedule_id == report_schedule.id)
.all()
)
assert deleted_logs == []
@pytest.mark.usefixtures("create_report_schedules")
def test_delete_report_schedule_not_found(self):
"""
ReportSchedule Api: Test delete report schedule not found
"""
max_id = db.session.query(func.max(ReportSchedule.id)).scalar()
self.login(username="admin")
uri = f"api/v1/report/{max_id + 1}"
rv = self.client.delete(uri)
assert rv.status_code == 404
@pytest.mark.usefixtures("create_report_schedules")
def test_bulk_delete_report_schedule(self):
"""
ReportSchedule Api: Test bulk delete report schedules
"""
query_report_schedules = db.session.query(ReportSchedule)
report_schedules = query_report_schedules.all()
report_schedules_ids = [
report_schedule.id for report_schedule in report_schedules
]
self.login(username="admin")
uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}"
rv = self.client.delete(uri)
assert rv.status_code == 200
deleted_report_schedules = query_report_schedules.all()
assert deleted_report_schedules == []
response = json.loads(rv.data.decode("utf-8"))
expected_response = {
"message": f"Deleted {len(report_schedules_ids)} report schedules"
}
assert response == expected_response
@pytest.mark.usefixtures("create_report_schedules")
def test_bulk_delete_report_schedule_not_found(self):
"""
ReportSchedule Api: Test bulk delete report schedule not found
"""
report_schedules = db.session.query(ReportSchedule).all()
report_schedules_ids = [
report_schedule.id for report_schedule in report_schedules
]
max_id = db.session.query(func.max(ReportSchedule.id)).scalar()
report_schedules_ids.append(max_id + 1)
self.login(username="admin")
uri = f"api/v1/report/?q={prison.dumps(report_schedules_ids)}"
rv = self.client.delete(uri)
assert rv.status_code == 404
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_logs(self):
"""
ReportSchedule Api: Test get list report schedules logs
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name3")
.one_or_none()
)
self.login(username="admin")
uri = f"api/v1/report/{report_schedule.id}/log/"
rv = self.client.get(uri)
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == 3
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_logs_sorting(self):
"""
ReportSchedule Api: Test get list report schedules logs
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name3")
.one_or_none()
)
self.login(username="admin")
uri = f"api/v1/report/{report_schedule.id}/log/"
order_columns = [
"state",
"value",
"error_message",
"end_dttm",
"start_dttm",
]
for order_column in order_columns:
arguments = {"order_column": order_column, "order_direction": "asc"}
uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
@pytest.mark.usefixtures("create_report_schedules")
def test_get_list_report_schedule_logs_filters(self):
"""
ReportSchedule Api: Test get list report schedules log filters
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name3")
.one_or_none()
)
self.login(username="admin")
arguments = {
"columns": ["name"],
"filters": [{"col": "state", "opr": "eq", "value": ReportLogState.SUCCESS}],
}
uri = f"api/v1/report/{report_schedule.id}/log/?q={prison.dumps(arguments)}"
rv = self.get_assert_metric(uri, "get_list")
assert rv.status_code == 200
data = json.loads(rv.data.decode("utf-8"))
assert data["count"] == 0
@pytest.mark.usefixtures("create_report_schedules")
def test_report_schedule_logs_no_mutations(self):
"""
ReportSchedule Api: Test assert there's no way to alter logs
"""
report_schedule = (
db.session.query(ReportSchedule)
.filter(ReportSchedule.name == "name3")
.one_or_none()
)
data = {"state": ReportLogState.ERROR, "error_message": "New error changed"}
self.login(username="admin")
uri = f"api/v1/report/{report_schedule.id}/log/"
rv = self.client.post(uri, json=data)
assert rv.status_code == 405
uri = f"api/v1/report/{report_schedule.id}/log/{report_schedule.logs[0].id}"
rv = self.client.put(uri, json=data)
assert rv.status_code == 405
rv = self.client.delete(uri)
assert rv.status_code == 405

View File

@ -57,6 +57,7 @@ FEATURE_FLAGS = {
"ENABLE_TEMPLATE_PROCESSING": True, "ENABLE_TEMPLATE_PROCESSING": True,
"ENABLE_REACT_CRUD_VIEWS": os.environ.get("ENABLE_REACT_CRUD_VIEWS", False), "ENABLE_REACT_CRUD_VIEWS": os.environ.get("ENABLE_REACT_CRUD_VIEWS", False),
"ROW_LEVEL_SECURITY": True, "ROW_LEVEL_SECURITY": True,
"ALERTS_REPORTS": True,
} }