fix: delete dataset columns and metrics on the REST API (#13389)

* feat: delete dataset columns and metrics on the REST API

* fix openapi spec

* change delete comparison to id

* delete columns and metrics on their namespace

* add missing licenses

* add failed test

* address comment
This commit is contained in:
Daniel Vaz Gaspar 2021-03-15 18:14:26 +00:00 committed by GitHub
parent 4de6040d89
commit d1e93078f4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 805 additions and 27 deletions

View File

@ -148,6 +148,8 @@ class SupersetAppInitializer:
from superset.dashboards.api import DashboardRestApi
from superset.databases.api import DatabaseRestApi
from superset.datasets.api import DatasetRestApi
from superset.datasets.columns.api import DatasetColumnsRestApi
from superset.datasets.metrics.api import DatasetMetricRestApi
from superset.queries.api import QueryRestApi
from superset.security.api import SecurityRestApi
from superset.queries.saved_queries.api import SavedQueryRestApi
@ -213,6 +215,8 @@ class SupersetAppInitializer:
appbuilder.add_api(DashboardRestApi)
appbuilder.add_api(DatabaseRestApi)
appbuilder.add_api(DatasetRestApi)
appbuilder.add_api(DatasetColumnsRestApi)
appbuilder.add_api(DatasetMetricRestApi)
appbuilder.add_api(QueryRestApi)
appbuilder.add_api(SavedQueryRestApi)
if feature_flag_manager.is_feature_enabled("ALERT_REPORTS"):

View File

@ -262,7 +262,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
schema:
type: integer
name: pk
- in: path
- in: query
schema:
type: bool
name: override_columns

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -0,0 +1,104 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from flask import g, Response
from flask_appbuilder.api import expose, permission_name, protect, safe
from flask_appbuilder.models.sqla.interface import SQLAInterface
from superset.connectors.sqla.models import TableColumn
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
from superset.datasets.columns.commands.delete import DeleteDatasetColumnCommand
from superset.datasets.columns.commands.exceptions import (
DatasetColumnDeleteFailedError,
DatasetColumnForbiddenError,
DatasetColumnNotFoundError,
)
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
logger = logging.getLogger(__name__)
class DatasetColumnsRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(TableColumn)
include_route_methods = {"delete"}
class_permission_name = "Dataset"
method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
resource_name = "dataset"
allow_browser_login = True
openapi_spec_tag = "Datasets"
@expose("/<int:pk>/column/<int:column_id>", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@permission_name("delete")
def delete( # pylint: disable=arguments-differ
self, pk: int, column_id: int
) -> Response:
"""Deletes a Dataset column
---
delete:
description: >-
Delete a Dataset column
parameters:
- in: path
schema:
type: integer
name: pk
description: The dataset pk for this column
- in: path
schema:
type: integer
name: column_id
description: The column id for this dataset
responses:
200:
description: Column deleted
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
DeleteDatasetColumnCommand(g.user, pk, column_id).run()
return self.response(200, message="OK")
except DatasetColumnNotFoundError:
return self.response_404()
except DatasetColumnForbiddenError:
return self.response_403()
except DatasetColumnDeleteFailedError as ex:
logger.error(
"Error deleting dataset column %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -0,0 +1,65 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Optional
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.security.sqla.models import User
from superset.commands.base import BaseCommand
from superset.connectors.sqla.models import TableColumn
from superset.dao.exceptions import DAODeleteFailedError
from superset.datasets.columns.commands.exceptions import (
DatasetColumnDeleteFailedError,
DatasetColumnForbiddenError,
DatasetColumnNotFoundError,
)
from superset.datasets.dao import DatasetDAO
from superset.exceptions import SupersetSecurityException
from superset.views.base import check_ownership
logger = logging.getLogger(__name__)
class DeleteDatasetColumnCommand(BaseCommand):
def __init__(self, user: User, dataset_id: int, model_id: int):
self._actor = user
self._dataset_id = dataset_id
self._model_id = model_id
self._model: Optional[TableColumn] = None
def run(self) -> Model:
self.validate()
try:
if not self._model:
raise DatasetColumnNotFoundError()
column = DatasetDAO.delete_column(self._model)
return column
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise DatasetColumnDeleteFailedError()
def validate(self) -> None:
# Validate/populate model exists
self._model = DatasetDAO.find_dataset_column(self._dataset_id, self._model_id)
if not self._model:
raise DatasetColumnNotFoundError()
# Check ownership
try:
check_ownership(self._model)
except SupersetSecurityException:
raise DatasetColumnForbiddenError()

View File

@ -0,0 +1,31 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask_babel import lazy_gettext as _
from superset.commands.exceptions import CommandException
class DatasetColumnNotFoundError(CommandException):
message = _("Dataset column not found.")
class DatasetColumnDeleteFailedError(CommandException):
message = _("Dataset column delete failed.")
class DatasetColumnForbiddenError(CommandException):
message = _("Changing this dataset is forbidden.")

View File

@ -31,7 +31,7 @@ from superset.views.base import DatasourceFilter
logger = logging.getLogger(__name__)
class DatasetDAO(BaseDAO):
class DatasetDAO(BaseDAO): # pylint: disable=too-many-public-methods
model_cls = SqlaTable
base_filter = DatasourceFilter
@ -154,30 +154,13 @@ class DatasetDAO(BaseDAO):
Updates a Dataset model on the metadata DB
"""
if "columns" in properties:
new_columns = list()
for column in properties.get("columns", []):
if column.get("id"):
column_obj = db.session.query(TableColumn).get(column.get("id"))
column_obj = DatasetDAO.update_column(
column_obj, column, commit=commit
)
else:
column_obj = DatasetDAO.create_column(column, commit=commit)
new_columns.append(column_obj)
properties["columns"] = new_columns
properties["columns"] = cls.update_columns(
model, properties.get("columns", []), commit=commit
)
if "metrics" in properties:
new_metrics = list()
for metric in properties.get("metrics", []):
if metric.get("id"):
metric_obj = db.session.query(SqlMetric).get(metric.get("id"))
metric_obj = DatasetDAO.update_metric(
metric_obj, metric, commit=commit
)
else:
metric_obj = DatasetDAO.create_metric(metric, commit=commit)
new_metrics.append(metric_obj)
properties["metrics"] = new_metrics
properties["metrics"] = cls.update_metrics(
model, properties.get("metrics", []), commit=commit
)
if override_columns:
# remove columns initially for full refresh
@ -186,9 +169,87 @@ class DatasetDAO(BaseDAO):
super().update(model, properties, commit=commit)
properties["columns"] = original_properties
super().update(model, properties, commit=False)
updated_model = super().update(model, properties, commit=False)
model.health_check(force=True, commit=False)
return super().update(model, properties, commit=commit)
return updated_model
@classmethod
def update_columns(
cls,
model: SqlaTable,
property_columns: List[Dict[str, Any]],
commit: bool = True,
) -> List[TableColumn]:
"""
Creates/updates and/or deletes a list of columns, based on a
list of Dict.
- If a column Dict has an `id` property then we update.
- If a column Dict does not have an `id` then we create a new metric.
- If there are extra columns on the metadata db that are not defined on the List
then we delete.
"""
new_columns = []
for column in property_columns:
column_id = column.get("id")
if column_id:
column_obj = db.session.query(TableColumn).get(column_id)
column_obj = DatasetDAO.update_column(column_obj, column, commit=commit)
else:
column_obj = DatasetDAO.create_column(column, commit=commit)
new_columns.append(column_obj)
# Checks if an exiting column is missing from properties and delete it
for existing_column in model.columns:
if existing_column.id not in [column.id for column in new_columns]:
DatasetDAO.delete_column(existing_column)
return new_columns
@classmethod
def update_metrics(
cls,
model: SqlaTable,
property_metrics: List[Dict[str, Any]],
commit: bool = True,
) -> List[SqlMetric]:
"""
Creates/updates and/or deletes a list of metrics, based on a
list of Dict.
- If a metric Dict has an `id` property then we update.
- If a metric Dict does not have an `id` then we create a new metric.
- If there are extra metrics on the metadata db that are not defined on the List
then we delete.
"""
new_metrics = list()
for metric in property_metrics:
metric_id = metric.get("id")
if metric.get("id"):
metric_obj = db.session.query(SqlMetric).get(metric_id)
metric_obj = DatasetDAO.update_metric(metric_obj, metric, commit=commit)
else:
metric_obj = DatasetDAO.create_metric(metric, commit=commit)
new_metrics.append(metric_obj)
# Checks if an exiting column is missing from properties and delete it
for existing_metric in model.metrics:
if existing_metric.id not in [metric.id for metric in new_metrics]:
DatasetDAO.delete_metric(existing_metric)
return new_metrics
@classmethod
def find_dataset_column(
cls, dataset_id: int, column_id: int
) -> Optional[TableColumn]:
# We want to apply base dataset filters
dataset = DatasetDAO.find_by_id(dataset_id)
if not dataset:
return None
return (
db.session.query(TableColumn)
.filter(TableColumn.table_id == dataset_id, TableColumn.id == column_id)
.one_or_none()
)
@classmethod
def update_column(
@ -205,6 +266,34 @@ class DatasetDAO(BaseDAO):
"""
return DatasetColumnDAO.create(properties, commit=commit)
@classmethod
def delete_column(
cls, model: TableColumn, commit: bool = True
) -> Optional[TableColumn]:
"""
Deletes a Dataset column
"""
return cls.delete(model, commit=commit)
@classmethod
def find_dataset_metric(
cls, dataset_id: int, metric_id: int
) -> Optional[SqlMetric]:
# We want to apply base dataset filters
dataset = DatasetDAO.find_by_id(dataset_id)
if not dataset:
return None
return db.session.query(SqlMetric).get(metric_id)
@classmethod
def delete_metric(
cls, model: SqlMetric, commit: bool = True
) -> Optional[TableColumn]:
"""
Deletes a Dataset metric
"""
return cls.delete(model, commit=commit)
@classmethod
def update_metric(
cls, model: SqlMetric, properties: Dict[str, Any], commit: bool = True

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -0,0 +1,104 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from flask import g, Response
from flask_appbuilder.api import expose, permission_name, protect, safe
from flask_appbuilder.models.sqla.interface import SQLAInterface
from superset.connectors.sqla.models import TableColumn
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP
from superset.datasets.metrics.commands.delete import DeleteDatasetMetricCommand
from superset.datasets.metrics.commands.exceptions import (
DatasetMetricDeleteFailedError,
DatasetMetricForbiddenError,
DatasetMetricNotFoundError,
)
from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
logger = logging.getLogger(__name__)
class DatasetMetricRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(TableColumn)
include_route_methods = {"delete"}
class_permission_name = "Dataset"
method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
resource_name = "dataset"
allow_browser_login = True
openapi_spec_tag = "Datasets"
@expose("/<int:pk>/metric/<int:metric_id>", methods=["DELETE"])
@protect()
@safe
@statsd_metrics
@permission_name("delete")
def delete( # pylint: disable=arguments-differ
self, pk: int, metric_id: int
) -> Response:
"""Deletes a Dataset metric
---
delete:
description: >-
Delete a Dataset metric
parameters:
- in: path
schema:
type: integer
name: pk
description: The dataset pk for this column
- in: path
schema:
type: integer
name: metric_id
description: The metric id for this dataset
responses:
200:
description: Metric deleted
content:
application/json:
schema:
type: object
properties:
message:
type: string
401:
$ref: '#/components/responses/401'
403:
$ref: '#/components/responses/403'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
DeleteDatasetMetricCommand(g.user, pk, metric_id).run()
return self.response(200, message="OK")
except DatasetMetricNotFoundError:
return self.response_404()
except DatasetMetricForbiddenError:
return self.response_403()
except DatasetMetricDeleteFailedError as ex:
logger.error(
"Error deleting dataset column %s: %s", self.__class__.__name__, str(ex)
)
return self.response_422(message=str(ex))

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -0,0 +1,65 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Optional
from flask_appbuilder.models.sqla import Model
from flask_appbuilder.security.sqla.models import User
from superset.commands.base import BaseCommand
from superset.connectors.sqla.models import SqlMetric
from superset.dao.exceptions import DAODeleteFailedError
from superset.datasets.dao import DatasetDAO
from superset.datasets.metrics.commands.exceptions import (
DatasetMetricDeleteFailedError,
DatasetMetricForbiddenError,
DatasetMetricNotFoundError,
)
from superset.exceptions import SupersetSecurityException
from superset.views.base import check_ownership
logger = logging.getLogger(__name__)
class DeleteDatasetMetricCommand(BaseCommand):
def __init__(self, user: User, dataset_id: int, model_id: int):
self._actor = user
self._dataset_id = dataset_id
self._model_id = model_id
self._model: Optional[SqlMetric] = None
def run(self) -> Model:
self.validate()
try:
if not self._model:
raise DatasetMetricNotFoundError()
column = DatasetDAO.delete_metric(self._model)
return column
except DAODeleteFailedError as ex:
logger.exception(ex.exception)
raise DatasetMetricDeleteFailedError()
def validate(self) -> None:
# Validate/populate model exists
self._model = DatasetDAO.find_dataset_metric(self._dataset_id, self._model_id)
if not self._model:
raise DatasetMetricNotFoundError()
# Check ownership
try:
check_ownership(self._model)
except SupersetSecurityException:
raise DatasetMetricForbiddenError()

View File

@ -0,0 +1,31 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask_babel import lazy_gettext as _
from superset.commands.exceptions import CommandException
class DatasetMetricNotFoundError(CommandException):
message = _("Dataset metric not found.")
class DatasetMetricDeleteFailedError(CommandException):
message = _("Dataset metric delete failed.")
class DatasetMetricForbiddenError(CommandException):
message = _("Changing this dataset is forbidden.")

View File

@ -659,6 +659,53 @@ class TestDatasetApi(SupersetTestCase):
db.session.delete(dataset)
db.session.commit()
def test_update_dataset_delete_column(self):
"""
Dataset API: Test update dataset delete column
"""
# create example dataset by Command
dataset = self.insert_default_dataset()
new_column_data = {
"column_name": "new_col",
"description": "description",
"expression": "expression",
"type": "INTEGER",
"verbose_name": "New Col",
}
uri = f"api/v1/dataset/{dataset.id}"
# Get current cols and append the new column
self.login(username="admin")
rv = self.get_assert_metric(uri, "get")
data = json.loads(rv.data.decode("utf-8"))
for column in data["result"]["columns"]:
column.pop("changed_on", None)
column.pop("created_on", None)
data["result"]["columns"].append(new_column_data)
rv = self.client.put(uri, json={"columns": data["result"]["columns"]})
assert rv.status_code == 200
# Remove this new column
data["result"]["columns"].remove(new_column_data)
rv = self.client.put(uri, json={"columns": data["result"]["columns"]})
assert rv.status_code == 200
columns = (
db.session.query(TableColumn)
.filter_by(table_id=dataset.id)
.order_by("column_name")
.all()
)
assert columns[0].column_name == "id"
assert columns[1].column_name == "name"
assert len(columns) == 2
db.session.delete(dataset)
db.session.commit()
def test_update_dataset_update_column(self):
"""
Dataset API: Test update dataset columns
@ -694,6 +741,49 @@ class TestDatasetApi(SupersetTestCase):
db.session.delete(dataset)
db.session.commit()
def test_update_dataset_delete_metric(self):
"""
Dataset API: Test update dataset delete metric
"""
dataset = self.insert_default_dataset()
metrics_query = (
db.session.query(SqlMetric)
.filter_by(table_id=dataset.id)
.order_by("metric_name")
)
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}"
data = {
"metrics": [
{"metric_name": "metric1", "expression": "COUNT(*)"},
{"metric_name": "metric2", "expression": "DIFF_COUNT(*)"},
]
}
rv = self.put_assert_metric(uri, data, "put")
assert rv.status_code == 200
metrics = metrics_query.all()
assert len(metrics) == 2
data = {
"metrics": [
{
"id": metrics[0].id,
"metric_name": "metric1",
"expression": "COUNT(*)",
},
]
}
rv = self.put_assert_metric(uri, data, "put")
assert rv.status_code == 200
metrics = metrics_query.all()
assert len(metrics) == 1
db.session.delete(dataset)
db.session.commit()
def test_update_dataset_update_column_uniqueness(self):
"""
Dataset API: Test update dataset columns uniqueness
@ -922,6 +1012,137 @@ class TestDatasetApi(SupersetTestCase):
db.session.delete(dataset)
db.session.commit()
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_column(self):
"""
Dataset API: Test delete dataset column
"""
dataset = self.get_fixture_datasets()[0]
column_id = dataset.columns[0].id
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
rv = self.client.delete(uri)
assert rv.status_code == 200
assert db.session.query(TableColumn).get(column_id) == None
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_column_not_found(self):
"""
Dataset API: Test delete dataset column not found
"""
dataset = self.get_fixture_datasets()[0]
non_id = self.get_nonexistent_numeric_id(TableColumn)
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}/column/{non_id}"
rv = self.client.delete(uri)
assert rv.status_code == 404
non_id = self.get_nonexistent_numeric_id(SqlaTable)
column_id = dataset.columns[0].id
self.login(username="admin")
uri = f"api/v1/dataset/{non_id}/column/{column_id}"
rv = self.client.delete(uri)
assert rv.status_code == 404
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_column_not_owned(self):
"""
Dataset API: Test delete dataset column not owned
"""
dataset = self.get_fixture_datasets()[0]
column_id = dataset.columns[0].id
self.login(username="alpha")
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
rv = self.client.delete(uri)
assert rv.status_code == 403
@pytest.mark.usefixtures("create_datasets")
@patch("superset.datasets.dao.DatasetDAO.delete")
def test_delete_dataset_column_fail(self, mock_dao_delete):
"""
Dataset API: Test delete dataset column
"""
mock_dao_delete.side_effect = DAODeleteFailedError()
dataset = self.get_fixture_datasets()[0]
column_id = dataset.columns[0].id
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
rv = self.client.delete(uri)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert data == {"message": "Dataset column delete failed."}
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_metric(self):
"""
Dataset API: Test delete dataset metric
"""
dataset = self.get_fixture_datasets()[0]
test_metric = SqlMetric(
metric_name="metric1", expression="COUNT(*)", table=dataset
)
db.session.add(test_metric)
db.session.commit()
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}/metric/{test_metric.id}"
rv = self.client.delete(uri)
assert rv.status_code == 200
assert db.session.query(SqlMetric).get(test_metric.id) == None
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_metric_not_found(self):
"""
Dataset API: Test delete dataset metric not found
"""
dataset = self.get_fixture_datasets()[0]
non_id = self.get_nonexistent_numeric_id(SqlMetric)
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}/metric/{non_id}"
rv = self.client.delete(uri)
assert rv.status_code == 404
non_id = self.get_nonexistent_numeric_id(SqlaTable)
metric_id = dataset.metrics[0].id
self.login(username="admin")
uri = f"api/v1/dataset/{non_id}/metric/{metric_id}"
rv = self.client.delete(uri)
assert rv.status_code == 404
@pytest.mark.usefixtures("create_datasets")
def test_delete_dataset_metric_not_owned(self):
"""
Dataset API: Test delete dataset metric not owned
"""
dataset = self.get_fixture_datasets()[0]
metric_id = dataset.metrics[0].id
self.login(username="alpha")
uri = f"api/v1/dataset/{dataset.id}/metric/{metric_id}"
rv = self.client.delete(uri)
assert rv.status_code == 403
@pytest.mark.usefixtures("create_datasets")
@patch("superset.datasets.dao.DatasetDAO.delete")
def test_delete_dataset_metric_fail(self, mock_dao_delete):
"""
Dataset API: Test delete dataset metric
"""
mock_dao_delete.side_effect = DAODeleteFailedError()
dataset = self.get_fixture_datasets()[0]
column_id = dataset.metrics[0].id
self.login(username="admin")
uri = f"api/v1/dataset/{dataset.id}/metric/{column_id}"
rv = self.client.delete(uri)
data = json.loads(rv.data.decode("utf-8"))
assert rv.status_code == 422
assert data == {"message": "Dataset metric delete failed."}
@pytest.mark.usefixtures("create_datasets")
def test_bulk_delete_dataset_items(self):
"""