chore: remove deprecated apis copy_dash, save_dash and add_slices (#24353)

This commit is contained in:
Daniel Vaz Gaspar 2023-06-13 15:08:47 +01:00 committed by GitHub
parent c09b8c6ec8
commit ca478bd5c4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 3 additions and 451 deletions

View File

@ -68,7 +68,6 @@
|can sqllab viz on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can schemas on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can sqllab history on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can copy dash on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can publish on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can csv on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can fave dashboards by username on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
@ -85,9 +84,7 @@
|can csrf token on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can created slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can annotation json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can add slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can fave dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can save dash on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can sqllab on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|
|can recent activity on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|can select star on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|

View File

@ -34,6 +34,7 @@ assists people when migrating to a new version.
### Breaking Changes
- [24353](https://github.com/apache/superset/pull/24353): Removed deprecated APIs `/copy_dash/int:dashboard_id/`, `/save_dash/int:dashboard_id/`, `/add_slices/int:dashboard_id/`.
- [24198](https://github.com/apache/superset/pull/24198) The FAB views `User Registrations` and `User's Statistics` have been changed to Admin only. To re-enable them for non-admin users, please add the following perms to your custom role: `menu access on User's Statistics` and `menu access on User Registrations`.
- [24354](https://github.com/apache/superset/pull/24354): Removed deprecated APIs `/superset/testconn`, `/superset/validate_sql_json/`, `/superset/schemas_access_for_file_upload`, `/superset/extra_table_metadata`
- [24381](https://github.com/apache/superset/pull/24381): Removed deprecated API `/superset/available_domains/`

View File

@ -336,7 +336,7 @@ export const hydrateDashboard =
metadata,
userId: user.userId ? String(user.userId) : null, // legacy, please use state.user instead
dash_edit_perm: canEdit,
dash_save_perm: findPermission('can_save_dash', 'Superset', roles),
dash_save_perm: findPermission('can_write', 'Dashboard', roles),
dash_share_perm: findPermission(
'can_share_dashboard',
'Superset',

View File

@ -61,7 +61,6 @@ from superset.connectors.sqla.models import (
from superset.constants import QUERY_EARLY_CANCEL_KEY
from superset.dashboards.commands.exceptions import DashboardAccessDeniedError
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
from superset.dashboards.dao import DashboardDAO
from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand
from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError
from superset.databases.dao import DatabaseDAO
@ -847,137 +846,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
return json_success(json.dumps(response))
@api
@has_access_api
@event_logger.log_this
@expose(
"/copy_dash/<int:dashboard_id>/",
methods=(
"GET",
"POST",
),
)
@deprecated(new_target="api/v1/dashboard/<dash_id>/copy/")
def copy_dash( # pylint: disable=no-self-use
self, dashboard_id: int
) -> FlaskResponse:
"""Copy dashboard"""
session = db.session()
data = json.loads(request.form["data"])
# client-side send back last_modified_time which was set when
# the dashboard was open. it was use to avoid mid-air collision.
# remove it to avoid confusion.
data.pop("last_modified_time", None)
dash = Dashboard()
original_dash = session.query(Dashboard).get(dashboard_id)
dash.owners = [g.user] if g.user else []
dash.dashboard_title = data["dashboard_title"]
dash.css = data.get("css")
old_to_new_slice_ids: dict[int, int] = {}
if data["duplicate_slices"]:
# Duplicating slices as well, mapping old ids to new ones
for slc in original_dash.slices:
new_slice = slc.clone()
new_slice.owners = [g.user] if g.user else []
session.add(new_slice)
session.flush()
new_slice.dashboards.append(dash)
old_to_new_slice_ids[slc.id] = new_slice.id
# update chartId of layout entities
for value in data["positions"].values():
if isinstance(value, dict) and value.get("meta", {}).get("chartId"):
old_id = value["meta"]["chartId"]
new_id = old_to_new_slice_ids.get(old_id)
value["meta"]["chartId"] = new_id
else:
dash.slices = original_dash.slices
dash.params = original_dash.params
DashboardDAO.set_dash_metadata(dash, data, old_to_new_slice_ids)
session.add(dash)
session.commit()
dash_json = json.dumps(dash.data)
session.close()
return json_success(dash_json)
@api
@has_access_api
@event_logger.log_this
@expose(
"/save_dash/<int:dashboard_id>/",
methods=(
"GET",
"POST",
),
)
@deprecated()
def save_dash( # pylint: disable=no-self-use
self, dashboard_id: int
) -> FlaskResponse:
"""Save a dashboard's metadata"""
session = db.session()
dash = session.query(Dashboard).get(dashboard_id)
security_manager.raise_for_ownership(dash)
data = json.loads(request.form["data"])
# client-side send back last_modified_time which was set when
# the dashboard was open. it was use to avoid mid-air collision.
remote_last_modified_time = data.get("last_modified_time")
current_last_modified_time = dash.changed_on.replace(microsecond=0).timestamp()
if (
remote_last_modified_time
and remote_last_modified_time < current_last_modified_time
):
return json_error_response(
__(
"This dashboard was changed recently. "
"Please reload dashboard to get latest version."
),
412,
)
# remove to avoid confusion.
data.pop("last_modified_time", None)
if data.get("css") is not None:
dash.css = data["css"]
if data.get("dashboard_title") is not None:
dash.dashboard_title = data["dashboard_title"]
DashboardDAO.set_dash_metadata(dash, data)
session.merge(dash)
session.commit()
# get updated changed_on
dash = session.query(Dashboard).get(dashboard_id)
last_modified_time = dash.changed_on.replace(microsecond=0).timestamp()
session.close()
return json_success(
json.dumps({"status": "SUCCESS", "last_modified_time": last_modified_time})
)
@api
@has_access_api
@event_logger.log_this
@expose("/add_slices/<int:dashboard_id>/", methods=("POST",))
@deprecated(new_target="api/v1/chart/<chart_id>")
def add_slices( # pylint: disable=no-self-use
self, dashboard_id: int
) -> FlaskResponse:
"""Add and save slices to a dashboard"""
data = json.loads(request.form["data"])
session = db.session()
dash = session.query(Dashboard).get(dashboard_id)
security_manager.raise_for_ownership(dash)
new_slices = session.query(Slice).filter(Slice.id.in_(data["slice_ids"]))
dash.slices += new_slices
session.merge(dash)
session.commit()
session.close()
return "SLICES ADDED"
@staticmethod
def get_user_activity_access_error(user_id: int) -> FlaskResponse | None:
try:
@ -1372,13 +1240,12 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
message=utils.error_msg_from_exception(ex),
category="danger",
)
add_extra_log_payload(
dashboard_id=dashboard.id,
dashboard_version="v2",
dash_edit_perm=(
security_manager.is_owner(dashboard)
and security_manager.can_access("can_save_dash", "Superset")
and security_manager.can_access("can_write", "Dashboard")
),
edit_mode=(
request.args.get(ReservedUrlParameters.EDIT_MODE.value) == "true"

View File

@ -16,8 +16,6 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
from datetime import datetime
import json
import re
import unittest
from random import random
@ -54,28 +52,6 @@ from .base_tests import SupersetTestCase
class TestDashboard(SupersetTestCase):
@pytest.fixture
def cleanup_copied_dash(self):
with app.app_context():
original_dashboard = (
db.session.query(Dashboard).filter_by(slug="births").first()
)
original_dashboard_id = original_dashboard.id
yield
copied_dashboard = (
db.session.query(Dashboard)
.filter(
Dashboard.dashboard_title == "Copy Of Births",
Dashboard.id != original_dashboard_id,
)
.first()
)
db.session.merge(original_dashboard)
if copied_dashboard:
db.session.delete(copied_dashboard)
db.session.commit()
@pytest.fixture
def load_dashboard(self):
with app.app_context():
@ -154,235 +130,6 @@ class TestDashboard(SupersetTestCase):
db.session.delete(created_dashboard)
db.session.commit()
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_save_dash(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()
positions = self.get_mock_positions(dash)
data = {
"css": "",
"expanded_slices": {},
"positions": positions,
"dashboard_title": dash.dashboard_title,
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
url = f"/superset/save_dash/{dash.id}/"
resp = self.get_resp(url, data=dict(data=json.dumps(data)))
self.assertIn("SUCCESS", resp)
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_save_dash_with_filter(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="world_health").first()
positions = self.get_mock_positions(dash)
filters = {str(dash.slices[0].id): {"region": ["North America"]}}
default_filters = json.dumps(filters)
data = {
"css": "",
"expanded_slices": {},
"positions": positions,
"dashboard_title": dash.dashboard_title,
"default_filters": default_filters,
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
url = f"/superset/save_dash/{dash.id}/"
resp = self.get_resp(url, data=dict(data=json.dumps(data)))
self.assertIn("SUCCESS", resp)
updatedDash = db.session.query(Dashboard).filter_by(slug="world_health").first()
new_url = updatedDash.url
self.assertIn("world_health", new_url)
self.assertNotIn("preselect_filters", new_url)
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_save_dash_with_invalid_filters(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="world_health").first()
# add an invalid filter slice
positions = self.get_mock_positions(dash)
filters = {str(99999): {"region": ["North America"]}}
default_filters = json.dumps(filters)
data = {
"css": "",
"expanded_slices": {},
"positions": positions,
"dashboard_title": dash.dashboard_title,
"default_filters": default_filters,
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
url = f"/superset/save_dash/{dash.id}/"
resp = self.get_resp(url, data=dict(data=json.dumps(data)))
self.assertIn("SUCCESS", resp)
updatedDash = db.session.query(Dashboard).filter_by(slug="world_health").first()
new_url = updatedDash.url
self.assertNotIn("region", new_url)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_save_dash_with_dashboard_title(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()
origin_title = dash.dashboard_title
positions = self.get_mock_positions(dash)
data = {
"css": "",
"expanded_slices": {},
"positions": positions,
"dashboard_title": "new title",
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
url = f"/superset/save_dash/{dash.id}/"
self.get_resp(url, data=dict(data=json.dumps(data)))
updatedDash = db.session.query(Dashboard).filter_by(slug="births").first()
self.assertEqual(updatedDash.dashboard_title, "new title")
# bring back dashboard original title
data["dashboard_title"] = origin_title
self.get_resp(url, data=dict(data=json.dumps(data)))
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_save_dash_with_colors(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()
positions = self.get_mock_positions(dash)
new_label_colors = {"data value": "random color"}
data = {
"css": "",
"expanded_slices": {},
"positions": positions,
"dashboard_title": dash.dashboard_title,
"color_namespace": "Color Namespace Test",
"color_scheme": "Color Scheme Test",
"label_colors": new_label_colors,
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
url = f"/superset/save_dash/{dash.id}/"
self.get_resp(url, data=dict(data=json.dumps(data)))
updatedDash = db.session.query(Dashboard).filter_by(slug="births").first()
self.assertIn("color_namespace", updatedDash.json_metadata)
self.assertIn("color_scheme", updatedDash.json_metadata)
self.assertIn("label_colors", updatedDash.json_metadata)
# bring back original dashboard
del data["color_namespace"]
del data["color_scheme"]
del data["label_colors"]
self.get_resp(url, data=dict(data=json.dumps(data)))
@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices",
"cleanup_copied_dash",
"load_unicode_dashboard_with_position",
)
def test_copy_dash(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()
positions = self.get_mock_positions(dash)
new_label_colors = {"data value": "random color"}
data = {
"css": "",
"duplicate_slices": False,
"expanded_slices": {},
"positions": positions,
"dashboard_title": "Copy Of Births",
"color_namespace": "Color Namespace Test",
"color_scheme": "Color Scheme Test",
"label_colors": new_label_colors,
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
# Save changes to Births dashboard and retrieve updated dash
dash_id = dash.id
url = f"/superset/save_dash/{dash_id}/"
self.client.post(url, data=dict(data=json.dumps(data)))
dash = db.session.query(Dashboard).filter_by(id=dash_id).first()
orig_json_data = dash.data
# Verify that copy matches original
url = f"/superset/copy_dash/{dash_id}/"
resp = self.get_json_resp(url, data=dict(data=json.dumps(data)))
self.assertEqual(resp["dashboard_title"], "Copy Of Births")
self.assertEqual(resp["position_json"], orig_json_data["position_json"])
self.assertEqual(resp["metadata"], orig_json_data["metadata"])
# check every attribute in each dashboard's slices list,
# exclude modified and changed_on attribute
for index, slc in enumerate(orig_json_data["slices"]):
for key in slc:
if key not in ["modified", "changed_on", "changed_on_humanized"]:
self.assertEqual(slc[key], resp["slices"][index][key])
@pytest.mark.usefixtures(
"load_energy_table_with_slice", "load_birth_names_dashboard_with_slices"
)
def test_add_slices(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()
new_slice = (
db.session.query(Slice).filter_by(slice_name="Energy Force Layout").first()
)
existing_slice = (
db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first()
)
data = {
"slice_ids": [new_slice.data["slice_id"], existing_slice.data["slice_id"]]
}
url = f"/superset/add_slices/{dash.id}/"
resp = self.client.post(url, data=dict(data=json.dumps(data)))
assert "SLICES ADDED" in resp.data.decode("utf-8")
dash = db.session.query(Dashboard).filter_by(slug="births").first()
new_slice = (
db.session.query(Slice).filter_by(slice_name="Energy Force Layout").first()
)
assert new_slice in dash.slices
assert len(set(dash.slices)) == len(dash.slices)
# cleaning up
dash = db.session.query(Dashboard).filter_by(slug="births").first()
dash.slices = [o for o in dash.slices if o.slice_name != "Energy Force Layout"]
db.session.commit()
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_remove_slices(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()
origin_slices_length = len(dash.slices)
positions = self.get_mock_positions(dash)
# remove one chart
chart_keys = []
for key in positions.keys():
if key.startswith("DASHBOARD_CHART_TYPE"):
chart_keys.append(key)
positions.pop(chart_keys[0])
data = {
"css": "",
"expanded_slices": {},
"positions": positions,
"dashboard_title": dash.dashboard_title,
# set a further modified_time for unit test
"last_modified_time": datetime.now().timestamp() + 1000,
}
# save dash
dash_id = dash.id
url = f"/superset/save_dash/{dash_id}/"
self.client.post(url, data=dict(data=json.dumps(data)))
dash = db.session.query(Dashboard).filter_by(id=dash_id).first()
# verify slices data
data = dash.data
self.assertEqual(len(data["slices"]), origin_slices_length - 1)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@pytest.mark.usefixtures("public_role_like_gamma")
def test_public_user_dashboard_access(self):
@ -443,25 +190,6 @@ class TestDashboard(SupersetTestCase):
# Cleanup
self.revoke_public_access_to_table(table)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_only_owners_can_save(self):
dash = db.session.query(Dashboard).filter_by(slug="births").first()
dash.owners = []
db.session.merge(dash)
db.session.commit()
self.test_save_dash("admin")
self.logout()
self.assertRaises(Exception, self.test_save_dash, "alpha")
alpha = security_manager.find_user("alpha")
dash = db.session.query(Dashboard).filter_by(slug="births").first()
dash.owners = [alpha]
db.session.merge(dash)
db.session.commit()
self.test_save_dash("alpha")
@pytest.mark.usefixtures("load_energy_table_with_slice", "load_dashboard")
def test_users_can_list_published_dashboard(self):
self.login("alpha")

View File

@ -14,8 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
from typing import Any, Union
import prison
from flask import Response
@ -48,17 +46,6 @@ class DashboardTestCase(SupersetTestCase):
def get_dashboards_api_response(self) -> Response:
return self.client.get(DASHBOARDS_API_URL)
def save_dashboard_via_view(
self, dashboard_id: Union[str, int], dashboard_data: dict[str, Any]
) -> Response:
save_dash_url = SAVE_DASHBOARD_URL_FORMAT.format(dashboard_id)
return self.get_resp(save_dash_url, data=dict(data=json.dumps(dashboard_data)))
def save_dashboard(
self, dashboard_id: Union[str, int], dashboard_data: dict[str, Any]
) -> Response:
return self.save_dashboard_via_view(dashboard_id, dashboard_data)
def delete_dashboard_via_view(self, dashboard_id: int) -> Response:
delete_dashboard_url = DELETE_DASHBOARD_VIEW_URL_FORMAT.format(dashboard_id)
return self.get_resp(delete_dashboard_url, {})
@ -90,26 +77,6 @@ class DashboardTestCase(SupersetTestCase):
view_menu = security_manager.find_view_menu(deleted_dashboard.view_name)
self.assertIsNone(view_menu)
def save_dash_basic_case(self, username=ADMIN_USERNAME):
# arrange
self.login(username=username)
(
dashboard_to_save,
data_before_change,
data_after_change,
) = build_save_dash_parts()
# act
save_dash_response = self.save_dashboard_via_view(
dashboard_to_save.id, data_after_change
)
# assert
self.assertIn("SUCCESS", save_dash_response)
# post test
self.save_dashboard(dashboard_to_save.id, data_before_change)
def clean_created_objects(self):
with app.test_request_context():
self.logout()

View File

@ -24,8 +24,6 @@ EXPORT_DASHBOARDS_API_URL = DASHBOARDS_API_URL + "export/"
EXPORT_DASHBOARDS_API_URL_WITH_QUERY_FORMAT = EXPORT_DASHBOARDS_API_URL + QUERY_FORMAT
GET_DASHBOARD_VIEW_URL_FORMAT = "/superset/dashboard/{}/"
SAVE_DASHBOARD_URL_FORMAT = "/superset/save_dash/{}/"
ADD_SLICES_URL_FORMAT = "/superset/add_slices/{}/"
DELETE_DASHBOARD_VIEW_URL_FORMAT = "/dashboard/delete/{}"
GET_DASHBOARDS_LIST_VIEW = "/dashboard/list/"

View File

@ -1350,8 +1350,6 @@ class TestRolePermission(SupersetTestCase):
# make sure that user can create slices and dashboards
self.assert_can_all("Dashboard", perm_set)
self.assert_can_all("Chart", perm_set)
self.assertIn(("can_add_slices", "Superset"), perm_set)
self.assertIn(("can_copy_dash", "Superset"), perm_set)
self.assertIn(("can_created_dashboards", "Superset"), perm_set)
self.assertIn(("can_created_slices", "Superset"), perm_set)
self.assertIn(("can_csv", "Superset"), perm_set)
@ -1362,7 +1360,6 @@ class TestRolePermission(SupersetTestCase):
self.assertIn(("can_explore_json", "Superset"), perm_set)
self.assertIn(("can_fave_dashboards", "Superset"), perm_set)
self.assertIn(("can_fave_slices", "Superset"), perm_set)
self.assertIn(("can_save_dash", "Superset"), perm_set)
self.assertIn(("can_explore_json", "Superset"), perm_set)
self.assertIn(("can_userinfo", "UserDBModelView"), perm_set)
self.assert_can_menu("Databases", perm_set)
@ -1530,8 +1527,6 @@ class TestRolePermission(SupersetTestCase):
self.assert_cannot_write("UserDBModelView", gamma_perm_set)
self.assert_cannot_write("RoleModelView", gamma_perm_set)
self.assertIn(("can_add_slices", "Superset"), gamma_perm_set)
self.assertIn(("can_copy_dash", "Superset"), gamma_perm_set)
self.assertIn(("can_created_dashboards", "Superset"), gamma_perm_set)
self.assertIn(("can_created_slices", "Superset"), gamma_perm_set)
self.assertIn(("can_csv", "Superset"), gamma_perm_set)
@ -1542,7 +1537,6 @@ class TestRolePermission(SupersetTestCase):
self.assertIn(("can_explore_json", "Superset"), gamma_perm_set)
self.assertIn(("can_fave_dashboards", "Superset"), gamma_perm_set)
self.assertIn(("can_fave_slices", "Superset"), gamma_perm_set)
self.assertIn(("can_save_dash", "Superset"), gamma_perm_set)
self.assertIn(("can_userinfo", "UserDBModelView"), gamma_perm_set)
def test_views_are_secured(self):