feat: Backend Validation for Creation Method (#16375)

* backend creation method validation

* added tests

* Update superset/reports/dao.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update superset/reports/dao.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update tests/integration_tests/reports/api_tests.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update tests/integration_tests/reports/api_tests.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update superset/reports/dao.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update superset/reports/dao.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update superset/reports/commands/create.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* Update superset/reports/commands/exceptions.py

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>

* revisions

Co-authored-by: Beto Dealmeida <roberto@dealmeida.net>
This commit is contained in:
AAfghahi 2021-09-08 19:35:18 -04:00 committed by GitHub
parent 788c0c3dae
commit c66f278b42
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 157 additions and 4 deletions

View File

@ -53,7 +53,7 @@ slice_user = Table(
logger = logging.getLogger(__name__)
class Slice( # pylint: disable=too-many-public-methods
class Slice( # pylint: disable=too-many-public-methods, too-many-instance-attributes
Model, AuditMixinNullable, ImportExportMixin
):
"""A slice is essentially a report or a view on data"""

View File

@ -271,7 +271,6 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi):
@expose("/", methods=["POST"])
@protect()
@safe
@statsd_metrics
@permission_name("post")
def post(self) -> Response:

View File

@ -25,12 +25,13 @@ from marshmallow import ValidationError
from superset.commands.base import CreateMixin
from superset.dao.exceptions import DAOCreateFailedError
from superset.databases.dao import DatabaseDAO
from superset.models.reports import ReportScheduleType
from superset.models.reports import ReportCreationMethodType, ReportScheduleType
from superset.reports.commands.base import BaseReportScheduleCommand
from superset.reports.commands.exceptions import (
DatabaseNotFoundValidationError,
ReportScheduleAlertRequiredDatabaseValidationError,
ReportScheduleCreateFailedError,
ReportScheduleCreationMethodUniquenessValidationError,
ReportScheduleInvalidError,
ReportScheduleNameUniquenessValidationError,
ReportScheduleRequiredTypeValidationError,
@ -59,6 +60,10 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand):
owner_ids: Optional[List[int]] = self._properties.get("owners")
name = self._properties.get("name", "")
report_type = self._properties.get("type")
creation_method = self._properties.get("creation_method")
chart_id = self._properties.get("chart")
dashboard_id = self._properties.get("dashboard")
user_id = self._actor.id
# Validate type is required
if not report_type:
@ -84,6 +89,16 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand):
# Validate chart or dashboard relations
self.validate_chart_dashboard(exceptions)
# Validate that each chart or dashboard only has one report with
# the respective creation method.
if (
creation_method != ReportCreationMethodType.ALERTS_REPORTS
and not ReportScheduleDAO.validate_unique_creation_method(
user_id, dashboard_id, chart_id
)
):
raise ReportScheduleCreationMethodUniquenessValidationError()
if "validator_config_json" in self._properties:
self._properties["validator_config_json"] = json.dumps(
self._properties["validator_config_json"]

View File

@ -167,6 +167,11 @@ class ReportScheduleNameUniquenessValidationError(ValidationError):
super().__init__([_("Name must be unique")], field_name="name")
class ReportScheduleCreationMethodUniquenessValidationError(CommandException):
status = 409
message = "Resource already has an attached report."
class AlertQueryMultipleRowsError(CommandException):
message = _("Alert query returned more then one row.")

View File

@ -113,6 +113,24 @@ class ReportScheduleDAO(BaseDAO):
db.session.rollback()
raise DAODeleteFailedError(str(ex)) from ex
@staticmethod
def validate_unique_creation_method(
user_id: int, dashboard_id: Optional[int] = None, chart_id: Optional[int] = None
) -> bool:
"""
Validate if the user already has a chart or dashboard
with a report attached form the self subscribe reports
"""
query = db.session.query(ReportSchedule).filter_by(created_by_fk=user_id)
if dashboard_id is not None:
query = query.filter(ReportSchedule.dashboard_id == dashboard_id)
if chart_id is not None:
query = query.filter(ReportSchedule.chart_id == chart_id)
return not db.session.query(query.exists()).scalar()
@staticmethod
def validate_update_uniqueness(
name: str, report_type: str, report_schedule_id: Optional[int] = None

View File

@ -768,7 +768,7 @@ class TestReportSchedulesApi(SupersetTestCase):
)
def test_no_dashboard_report_schedule_schema(self):
"""
ReportSchedule Api: Test create report schedule with not dashboard id
ReportSchedule Api: Test create report schedule with no dashboard id
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
@ -790,6 +790,122 @@ class TestReportSchedulesApi(SupersetTestCase):
== "Please save your dashboard first, then try creating a new email report."
)
@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices", "create_report_schedules"
)
def test_create_multiple_creation_method_report_schedule_charts(self):
"""
ReportSchedule Api: Test create multiple reports with the same creation method
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
dashboard = db.session.query(Dashboard).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name4",
"description": "description",
"creation_method": ReportCreationMethodType.CHARTS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"chart": chart.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201
# this second time it should receive an error because the chart has an attached report
# with the same creation method from the same user.
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name5",
"description": "description",
"creation_method": ReportCreationMethodType.CHARTS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"chart": chart.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 409
assert data == {
"errors": [
{
"message": "Resource already has an attached report.",
"error_type": "GENERIC_COMMAND_ERROR",
"level": "warning",
"extra": {
"issue_codes": [
{
"code": 1010,
"message": "Issue 1010 - Superset encountered an error while running a command.",
}
]
},
}
]
}
@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices", "create_report_schedules"
)
def test_create_multiple_creation_method_report_schedule_dashboards(self):
"""
ReportSchedule Api: Test create multiple reports with the same creation method
"""
self.login(username="admin")
chart = db.session.query(Slice).first()
dashboard = db.session.query(Dashboard).first()
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name4",
"description": "description",
"creation_method": ReportCreationMethodType.DASHBOARDS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"dashboard": dashboard.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 201
# this second time it should receive an error because the dashboard has an attached report
# with the same creation method from the same user.
report_schedule_data = {
"type": ReportScheduleType.REPORT,
"name": "name5",
"description": "description",
"creation_method": ReportCreationMethodType.DASHBOARDS,
"crontab": "0 9 * * *",
"working_timeout": 3600,
"dashboard": dashboard.id,
}
uri = "api/v1/report/"
rv = self.client.post(uri, json=report_schedule_data)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 409
assert data == {
"errors": [
{
"message": "Resource already has an attached report.",
"error_type": "GENERIC_COMMAND_ERROR",
"level": "warning",
"extra": {
"issue_codes": [
{
"code": 1010,
"message": "Issue 1010 - Superset encountered an error while running a command.",
}
]
},
}
]
}
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_create_report_schedule_chart_dash_validation(self):
"""