From d1e93078f45b30b141620b465f6d81b63fd9c2a7 Mon Sep 17 00:00:00 2001 From: Daniel Vaz Gaspar Date: Mon, 15 Mar 2021 18:14:26 +0000 Subject: [PATCH] fix: delete dataset columns and metrics on the REST API (#13389) * feat: delete dataset columns and metrics on the REST API * fix openapi spec * change delete comparison to id * delete columns and metrics on their namespace * add missing licenses * add failed test * address comment --- superset/app.py | 4 + superset/datasets/api.py | 2 +- superset/datasets/columns/__init__.py | 16 ++ superset/datasets/columns/api.py | 104 +++++++++ .../datasets/columns/commands/__init__.py | 16 ++ superset/datasets/columns/commands/delete.py | 65 ++++++ .../datasets/columns/commands/exceptions.py | 31 +++ superset/datasets/dao.py | 141 ++++++++--- superset/datasets/metrics/__init__.py | 16 ++ superset/datasets/metrics/api.py | 104 +++++++++ .../datasets/metrics/commands/__init__.py | 16 ++ superset/datasets/metrics/commands/delete.py | 65 ++++++ .../datasets/metrics/commands/exceptions.py | 31 +++ tests/datasets/api_tests.py | 221 ++++++++++++++++++ 14 files changed, 805 insertions(+), 27 deletions(-) create mode 100644 superset/datasets/columns/__init__.py create mode 100644 superset/datasets/columns/api.py create mode 100644 superset/datasets/columns/commands/__init__.py create mode 100644 superset/datasets/columns/commands/delete.py create mode 100644 superset/datasets/columns/commands/exceptions.py create mode 100644 superset/datasets/metrics/__init__.py create mode 100644 superset/datasets/metrics/api.py create mode 100644 superset/datasets/metrics/commands/__init__.py create mode 100644 superset/datasets/metrics/commands/delete.py create mode 100644 superset/datasets/metrics/commands/exceptions.py diff --git a/superset/app.py b/superset/app.py index 432d3fb463..f0f5dc1510 100644 --- a/superset/app.py +++ b/superset/app.py @@ -148,6 +148,8 @@ class SupersetAppInitializer: from superset.dashboards.api import DashboardRestApi from superset.databases.api import DatabaseRestApi from superset.datasets.api import DatasetRestApi + from superset.datasets.columns.api import DatasetColumnsRestApi + from superset.datasets.metrics.api import DatasetMetricRestApi from superset.queries.api import QueryRestApi from superset.security.api import SecurityRestApi from superset.queries.saved_queries.api import SavedQueryRestApi @@ -213,6 +215,8 @@ class SupersetAppInitializer: appbuilder.add_api(DashboardRestApi) appbuilder.add_api(DatabaseRestApi) appbuilder.add_api(DatasetRestApi) + appbuilder.add_api(DatasetColumnsRestApi) + appbuilder.add_api(DatasetMetricRestApi) appbuilder.add_api(QueryRestApi) appbuilder.add_api(SavedQueryRestApi) if feature_flag_manager.is_feature_enabled("ALERT_REPORTS"): diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 85adae5de7..fce1812c91 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -262,7 +262,7 @@ class DatasetRestApi(BaseSupersetModelRestApi): schema: type: integer name: pk - - in: path + - in: query schema: type: bool name: override_columns diff --git a/superset/datasets/columns/__init__.py b/superset/datasets/columns/__init__.py new file mode 100644 index 0000000000..13a83393a9 --- /dev/null +++ b/superset/datasets/columns/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/datasets/columns/api.py b/superset/datasets/columns/api.py new file mode 100644 index 0000000000..a459806db6 --- /dev/null +++ b/superset/datasets/columns/api.py @@ -0,0 +1,104 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging + +from flask import g, Response +from flask_appbuilder.api import expose, permission_name, protect, safe +from flask_appbuilder.models.sqla.interface import SQLAInterface + +from superset.connectors.sqla.models import TableColumn +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP +from superset.datasets.columns.commands.delete import DeleteDatasetColumnCommand +from superset.datasets.columns.commands.exceptions import ( + DatasetColumnDeleteFailedError, + DatasetColumnForbiddenError, + DatasetColumnNotFoundError, +) +from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics + +logger = logging.getLogger(__name__) + + +class DatasetColumnsRestApi(BaseSupersetModelRestApi): + datamodel = SQLAInterface(TableColumn) + + include_route_methods = {"delete"} + class_permission_name = "Dataset" + method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP + + resource_name = "dataset" + allow_browser_login = True + + openapi_spec_tag = "Datasets" + + @expose("//column/", methods=["DELETE"]) + @protect() + @safe + @statsd_metrics + @permission_name("delete") + def delete( # pylint: disable=arguments-differ + self, pk: int, column_id: int + ) -> Response: + """Deletes a Dataset column + --- + delete: + description: >- + Delete a Dataset column + parameters: + - in: path + schema: + type: integer + name: pk + description: The dataset pk for this column + - in: path + schema: + type: integer + name: column_id + description: The column id for this dataset + responses: + 200: + description: Column deleted + content: + application/json: + schema: + type: object + properties: + message: + type: string + 401: + $ref: '#/components/responses/401' + 403: + $ref: '#/components/responses/403' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + try: + DeleteDatasetColumnCommand(g.user, pk, column_id).run() + return self.response(200, message="OK") + except DatasetColumnNotFoundError: + return self.response_404() + except DatasetColumnForbiddenError: + return self.response_403() + except DatasetColumnDeleteFailedError as ex: + logger.error( + "Error deleting dataset column %s: %s", self.__class__.__name__, str(ex) + ) + return self.response_422(message=str(ex)) diff --git a/superset/datasets/columns/commands/__init__.py b/superset/datasets/columns/commands/__init__.py new file mode 100644 index 0000000000..13a83393a9 --- /dev/null +++ b/superset/datasets/columns/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/datasets/columns/commands/delete.py b/superset/datasets/columns/commands/delete.py new file mode 100644 index 0000000000..1076c00909 --- /dev/null +++ b/superset/datasets/columns/commands/delete.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Optional + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User + +from superset.commands.base import BaseCommand +from superset.connectors.sqla.models import TableColumn +from superset.dao.exceptions import DAODeleteFailedError +from superset.datasets.columns.commands.exceptions import ( + DatasetColumnDeleteFailedError, + DatasetColumnForbiddenError, + DatasetColumnNotFoundError, +) +from superset.datasets.dao import DatasetDAO +from superset.exceptions import SupersetSecurityException +from superset.views.base import check_ownership + +logger = logging.getLogger(__name__) + + +class DeleteDatasetColumnCommand(BaseCommand): + def __init__(self, user: User, dataset_id: int, model_id: int): + self._actor = user + self._dataset_id = dataset_id + self._model_id = model_id + self._model: Optional[TableColumn] = None + + def run(self) -> Model: + self.validate() + try: + if not self._model: + raise DatasetColumnNotFoundError() + column = DatasetDAO.delete_column(self._model) + return column + except DAODeleteFailedError as ex: + logger.exception(ex.exception) + raise DatasetColumnDeleteFailedError() + + def validate(self) -> None: + # Validate/populate model exists + self._model = DatasetDAO.find_dataset_column(self._dataset_id, self._model_id) + if not self._model: + raise DatasetColumnNotFoundError() + # Check ownership + try: + check_ownership(self._model) + except SupersetSecurityException: + raise DatasetColumnForbiddenError() diff --git a/superset/datasets/columns/commands/exceptions.py b/superset/datasets/columns/commands/exceptions.py new file mode 100644 index 0000000000..d881a82f80 --- /dev/null +++ b/superset/datasets/columns/commands/exceptions.py @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from flask_babel import lazy_gettext as _ + +from superset.commands.exceptions import CommandException + + +class DatasetColumnNotFoundError(CommandException): + message = _("Dataset column not found.") + + +class DatasetColumnDeleteFailedError(CommandException): + message = _("Dataset column delete failed.") + + +class DatasetColumnForbiddenError(CommandException): + message = _("Changing this dataset is forbidden.") diff --git a/superset/datasets/dao.py b/superset/datasets/dao.py index aaede30f26..08df867e67 100644 --- a/superset/datasets/dao.py +++ b/superset/datasets/dao.py @@ -31,7 +31,7 @@ from superset.views.base import DatasourceFilter logger = logging.getLogger(__name__) -class DatasetDAO(BaseDAO): +class DatasetDAO(BaseDAO): # pylint: disable=too-many-public-methods model_cls = SqlaTable base_filter = DatasourceFilter @@ -154,30 +154,13 @@ class DatasetDAO(BaseDAO): Updates a Dataset model on the metadata DB """ if "columns" in properties: - new_columns = list() - for column in properties.get("columns", []): - if column.get("id"): - column_obj = db.session.query(TableColumn).get(column.get("id")) - column_obj = DatasetDAO.update_column( - column_obj, column, commit=commit - ) - else: - column_obj = DatasetDAO.create_column(column, commit=commit) - new_columns.append(column_obj) - properties["columns"] = new_columns - + properties["columns"] = cls.update_columns( + model, properties.get("columns", []), commit=commit + ) if "metrics" in properties: - new_metrics = list() - for metric in properties.get("metrics", []): - if metric.get("id"): - metric_obj = db.session.query(SqlMetric).get(metric.get("id")) - metric_obj = DatasetDAO.update_metric( - metric_obj, metric, commit=commit - ) - else: - metric_obj = DatasetDAO.create_metric(metric, commit=commit) - new_metrics.append(metric_obj) - properties["metrics"] = new_metrics + properties["metrics"] = cls.update_metrics( + model, properties.get("metrics", []), commit=commit + ) if override_columns: # remove columns initially for full refresh @@ -186,9 +169,87 @@ class DatasetDAO(BaseDAO): super().update(model, properties, commit=commit) properties["columns"] = original_properties - super().update(model, properties, commit=False) + updated_model = super().update(model, properties, commit=False) model.health_check(force=True, commit=False) - return super().update(model, properties, commit=commit) + return updated_model + + @classmethod + def update_columns( + cls, + model: SqlaTable, + property_columns: List[Dict[str, Any]], + commit: bool = True, + ) -> List[TableColumn]: + """ + Creates/updates and/or deletes a list of columns, based on a + list of Dict. + + - If a column Dict has an `id` property then we update. + - If a column Dict does not have an `id` then we create a new metric. + - If there are extra columns on the metadata db that are not defined on the List + then we delete. + """ + new_columns = [] + for column in property_columns: + column_id = column.get("id") + + if column_id: + column_obj = db.session.query(TableColumn).get(column_id) + column_obj = DatasetDAO.update_column(column_obj, column, commit=commit) + else: + column_obj = DatasetDAO.create_column(column, commit=commit) + new_columns.append(column_obj) + # Checks if an exiting column is missing from properties and delete it + for existing_column in model.columns: + if existing_column.id not in [column.id for column in new_columns]: + DatasetDAO.delete_column(existing_column) + return new_columns + + @classmethod + def update_metrics( + cls, + model: SqlaTable, + property_metrics: List[Dict[str, Any]], + commit: bool = True, + ) -> List[SqlMetric]: + """ + Creates/updates and/or deletes a list of metrics, based on a + list of Dict. + + - If a metric Dict has an `id` property then we update. + - If a metric Dict does not have an `id` then we create a new metric. + - If there are extra metrics on the metadata db that are not defined on the List + then we delete. + """ + new_metrics = list() + for metric in property_metrics: + metric_id = metric.get("id") + if metric.get("id"): + metric_obj = db.session.query(SqlMetric).get(metric_id) + metric_obj = DatasetDAO.update_metric(metric_obj, metric, commit=commit) + else: + metric_obj = DatasetDAO.create_metric(metric, commit=commit) + new_metrics.append(metric_obj) + + # Checks if an exiting column is missing from properties and delete it + for existing_metric in model.metrics: + if existing_metric.id not in [metric.id for metric in new_metrics]: + DatasetDAO.delete_metric(existing_metric) + return new_metrics + + @classmethod + def find_dataset_column( + cls, dataset_id: int, column_id: int + ) -> Optional[TableColumn]: + # We want to apply base dataset filters + dataset = DatasetDAO.find_by_id(dataset_id) + if not dataset: + return None + return ( + db.session.query(TableColumn) + .filter(TableColumn.table_id == dataset_id, TableColumn.id == column_id) + .one_or_none() + ) @classmethod def update_column( @@ -205,6 +266,34 @@ class DatasetDAO(BaseDAO): """ return DatasetColumnDAO.create(properties, commit=commit) + @classmethod + def delete_column( + cls, model: TableColumn, commit: bool = True + ) -> Optional[TableColumn]: + """ + Deletes a Dataset column + """ + return cls.delete(model, commit=commit) + + @classmethod + def find_dataset_metric( + cls, dataset_id: int, metric_id: int + ) -> Optional[SqlMetric]: + # We want to apply base dataset filters + dataset = DatasetDAO.find_by_id(dataset_id) + if not dataset: + return None + return db.session.query(SqlMetric).get(metric_id) + + @classmethod + def delete_metric( + cls, model: SqlMetric, commit: bool = True + ) -> Optional[TableColumn]: + """ + Deletes a Dataset metric + """ + return cls.delete(model, commit=commit) + @classmethod def update_metric( cls, model: SqlMetric, properties: Dict[str, Any], commit: bool = True diff --git a/superset/datasets/metrics/__init__.py b/superset/datasets/metrics/__init__.py new file mode 100644 index 0000000000..13a83393a9 --- /dev/null +++ b/superset/datasets/metrics/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/datasets/metrics/api.py b/superset/datasets/metrics/api.py new file mode 100644 index 0000000000..948ede0ed7 --- /dev/null +++ b/superset/datasets/metrics/api.py @@ -0,0 +1,104 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging + +from flask import g, Response +from flask_appbuilder.api import expose, permission_name, protect, safe +from flask_appbuilder.models.sqla.interface import SQLAInterface + +from superset.connectors.sqla.models import TableColumn +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP +from superset.datasets.metrics.commands.delete import DeleteDatasetMetricCommand +from superset.datasets.metrics.commands.exceptions import ( + DatasetMetricDeleteFailedError, + DatasetMetricForbiddenError, + DatasetMetricNotFoundError, +) +from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics + +logger = logging.getLogger(__name__) + + +class DatasetMetricRestApi(BaseSupersetModelRestApi): + datamodel = SQLAInterface(TableColumn) + + include_route_methods = {"delete"} + class_permission_name = "Dataset" + method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP + + resource_name = "dataset" + allow_browser_login = True + + openapi_spec_tag = "Datasets" + + @expose("//metric/", methods=["DELETE"]) + @protect() + @safe + @statsd_metrics + @permission_name("delete") + def delete( # pylint: disable=arguments-differ + self, pk: int, metric_id: int + ) -> Response: + """Deletes a Dataset metric + --- + delete: + description: >- + Delete a Dataset metric + parameters: + - in: path + schema: + type: integer + name: pk + description: The dataset pk for this column + - in: path + schema: + type: integer + name: metric_id + description: The metric id for this dataset + responses: + 200: + description: Metric deleted + content: + application/json: + schema: + type: object + properties: + message: + type: string + 401: + $ref: '#/components/responses/401' + 403: + $ref: '#/components/responses/403' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + try: + DeleteDatasetMetricCommand(g.user, pk, metric_id).run() + return self.response(200, message="OK") + except DatasetMetricNotFoundError: + return self.response_404() + except DatasetMetricForbiddenError: + return self.response_403() + except DatasetMetricDeleteFailedError as ex: + logger.error( + "Error deleting dataset column %s: %s", self.__class__.__name__, str(ex) + ) + return self.response_422(message=str(ex)) diff --git a/superset/datasets/metrics/commands/__init__.py b/superset/datasets/metrics/commands/__init__.py new file mode 100644 index 0000000000..13a83393a9 --- /dev/null +++ b/superset/datasets/metrics/commands/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/datasets/metrics/commands/delete.py b/superset/datasets/metrics/commands/delete.py new file mode 100644 index 0000000000..e6f68dc981 --- /dev/null +++ b/superset/datasets/metrics/commands/delete.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Optional + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User + +from superset.commands.base import BaseCommand +from superset.connectors.sqla.models import SqlMetric +from superset.dao.exceptions import DAODeleteFailedError +from superset.datasets.dao import DatasetDAO +from superset.datasets.metrics.commands.exceptions import ( + DatasetMetricDeleteFailedError, + DatasetMetricForbiddenError, + DatasetMetricNotFoundError, +) +from superset.exceptions import SupersetSecurityException +from superset.views.base import check_ownership + +logger = logging.getLogger(__name__) + + +class DeleteDatasetMetricCommand(BaseCommand): + def __init__(self, user: User, dataset_id: int, model_id: int): + self._actor = user + self._dataset_id = dataset_id + self._model_id = model_id + self._model: Optional[SqlMetric] = None + + def run(self) -> Model: + self.validate() + try: + if not self._model: + raise DatasetMetricNotFoundError() + column = DatasetDAO.delete_metric(self._model) + return column + except DAODeleteFailedError as ex: + logger.exception(ex.exception) + raise DatasetMetricDeleteFailedError() + + def validate(self) -> None: + # Validate/populate model exists + self._model = DatasetDAO.find_dataset_metric(self._dataset_id, self._model_id) + if not self._model: + raise DatasetMetricNotFoundError() + # Check ownership + try: + check_ownership(self._model) + except SupersetSecurityException: + raise DatasetMetricForbiddenError() diff --git a/superset/datasets/metrics/commands/exceptions.py b/superset/datasets/metrics/commands/exceptions.py new file mode 100644 index 0000000000..0d07683c0f --- /dev/null +++ b/superset/datasets/metrics/commands/exceptions.py @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from flask_babel import lazy_gettext as _ + +from superset.commands.exceptions import CommandException + + +class DatasetMetricNotFoundError(CommandException): + message = _("Dataset metric not found.") + + +class DatasetMetricDeleteFailedError(CommandException): + message = _("Dataset metric delete failed.") + + +class DatasetMetricForbiddenError(CommandException): + message = _("Changing this dataset is forbidden.") diff --git a/tests/datasets/api_tests.py b/tests/datasets/api_tests.py index 00be830fcf..a2809d7ad2 100644 --- a/tests/datasets/api_tests.py +++ b/tests/datasets/api_tests.py @@ -659,6 +659,53 @@ class TestDatasetApi(SupersetTestCase): db.session.delete(dataset) db.session.commit() + def test_update_dataset_delete_column(self): + """ + Dataset API: Test update dataset delete column + """ + # create example dataset by Command + dataset = self.insert_default_dataset() + + new_column_data = { + "column_name": "new_col", + "description": "description", + "expression": "expression", + "type": "INTEGER", + "verbose_name": "New Col", + } + uri = f"api/v1/dataset/{dataset.id}" + # Get current cols and append the new column + self.login(username="admin") + rv = self.get_assert_metric(uri, "get") + data = json.loads(rv.data.decode("utf-8")) + + for column in data["result"]["columns"]: + column.pop("changed_on", None) + column.pop("created_on", None) + + data["result"]["columns"].append(new_column_data) + rv = self.client.put(uri, json={"columns": data["result"]["columns"]}) + + assert rv.status_code == 200 + + # Remove this new column + data["result"]["columns"].remove(new_column_data) + rv = self.client.put(uri, json={"columns": data["result"]["columns"]}) + assert rv.status_code == 200 + + columns = ( + db.session.query(TableColumn) + .filter_by(table_id=dataset.id) + .order_by("column_name") + .all() + ) + assert columns[0].column_name == "id" + assert columns[1].column_name == "name" + assert len(columns) == 2 + + db.session.delete(dataset) + db.session.commit() + def test_update_dataset_update_column(self): """ Dataset API: Test update dataset columns @@ -694,6 +741,49 @@ class TestDatasetApi(SupersetTestCase): db.session.delete(dataset) db.session.commit() + def test_update_dataset_delete_metric(self): + """ + Dataset API: Test update dataset delete metric + """ + dataset = self.insert_default_dataset() + metrics_query = ( + db.session.query(SqlMetric) + .filter_by(table_id=dataset.id) + .order_by("metric_name") + ) + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}" + data = { + "metrics": [ + {"metric_name": "metric1", "expression": "COUNT(*)"}, + {"metric_name": "metric2", "expression": "DIFF_COUNT(*)"}, + ] + } + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 200 + + metrics = metrics_query.all() + assert len(metrics) == 2 + + data = { + "metrics": [ + { + "id": metrics[0].id, + "metric_name": "metric1", + "expression": "COUNT(*)", + }, + ] + } + rv = self.put_assert_metric(uri, data, "put") + assert rv.status_code == 200 + + metrics = metrics_query.all() + assert len(metrics) == 1 + + db.session.delete(dataset) + db.session.commit() + def test_update_dataset_update_column_uniqueness(self): """ Dataset API: Test update dataset columns uniqueness @@ -922,6 +1012,137 @@ class TestDatasetApi(SupersetTestCase): db.session.delete(dataset) db.session.commit() + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_column(self): + """ + Dataset API: Test delete dataset column + """ + dataset = self.get_fixture_datasets()[0] + column_id = dataset.columns[0].id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/column/{column_id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + assert db.session.query(TableColumn).get(column_id) == None + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_column_not_found(self): + """ + Dataset API: Test delete dataset column not found + """ + dataset = self.get_fixture_datasets()[0] + non_id = self.get_nonexistent_numeric_id(TableColumn) + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/column/{non_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + non_id = self.get_nonexistent_numeric_id(SqlaTable) + column_id = dataset.columns[0].id + + self.login(username="admin") + uri = f"api/v1/dataset/{non_id}/column/{column_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_column_not_owned(self): + """ + Dataset API: Test delete dataset column not owned + """ + dataset = self.get_fixture_datasets()[0] + column_id = dataset.columns[0].id + + self.login(username="alpha") + uri = f"api/v1/dataset/{dataset.id}/column/{column_id}" + rv = self.client.delete(uri) + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_datasets") + @patch("superset.datasets.dao.DatasetDAO.delete") + def test_delete_dataset_column_fail(self, mock_dao_delete): + """ + Dataset API: Test delete dataset column + """ + mock_dao_delete.side_effect = DAODeleteFailedError() + dataset = self.get_fixture_datasets()[0] + column_id = dataset.columns[0].id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/column/{column_id}" + rv = self.client.delete(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset column delete failed."} + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_metric(self): + """ + Dataset API: Test delete dataset metric + """ + dataset = self.get_fixture_datasets()[0] + test_metric = SqlMetric( + metric_name="metric1", expression="COUNT(*)", table=dataset + ) + db.session.add(test_metric) + db.session.commit() + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/metric/{test_metric.id}" + rv = self.client.delete(uri) + assert rv.status_code == 200 + assert db.session.query(SqlMetric).get(test_metric.id) == None + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_metric_not_found(self): + """ + Dataset API: Test delete dataset metric not found + """ + dataset = self.get_fixture_datasets()[0] + non_id = self.get_nonexistent_numeric_id(SqlMetric) + + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/metric/{non_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + non_id = self.get_nonexistent_numeric_id(SqlaTable) + metric_id = dataset.metrics[0].id + + self.login(username="admin") + uri = f"api/v1/dataset/{non_id}/metric/{metric_id}" + rv = self.client.delete(uri) + assert rv.status_code == 404 + + @pytest.mark.usefixtures("create_datasets") + def test_delete_dataset_metric_not_owned(self): + """ + Dataset API: Test delete dataset metric not owned + """ + dataset = self.get_fixture_datasets()[0] + metric_id = dataset.metrics[0].id + + self.login(username="alpha") + uri = f"api/v1/dataset/{dataset.id}/metric/{metric_id}" + rv = self.client.delete(uri) + assert rv.status_code == 403 + + @pytest.mark.usefixtures("create_datasets") + @patch("superset.datasets.dao.DatasetDAO.delete") + def test_delete_dataset_metric_fail(self, mock_dao_delete): + """ + Dataset API: Test delete dataset metric + """ + mock_dao_delete.side_effect = DAODeleteFailedError() + dataset = self.get_fixture_datasets()[0] + column_id = dataset.metrics[0].id + self.login(username="admin") + uri = f"api/v1/dataset/{dataset.id}/metric/{column_id}" + rv = self.client.delete(uri) + data = json.loads(rv.data.decode("utf-8")) + assert rv.status_code == 422 + assert data == {"message": "Dataset metric delete failed."} + @pytest.mark.usefixtures("create_datasets") def test_bulk_delete_dataset_items(self): """