refactor copy filter_scopes and add tests (#9224)

* refactor copy filter_scopes and add tests

* fix review comments
This commit is contained in:
Grace Guo 2020-03-02 12:55:30 -08:00 committed by GitHub
parent 7b2c2e8a33
commit ccd6e44edf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 82 additions and 33 deletions

View File

@ -257,7 +257,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes
"children": ["DASHBOARD_CHART_TYPE-2"]
},
"DASHBOARD_CHART_TYPE-2": {
"type": "DASHBOARD_CHART_TYPE",
"type": "CHART",
"id": "DASHBOARD_CHART_TYPE-2",
"children": [],
"meta": {

View File

@ -75,7 +75,7 @@ def convert_filter_scopes(json_metadata: Dict, filters: List[Slice]):
def copy_filter_scopes(
old_to_new_slc_id_dict: Dict[int, int], old_filter_scopes: Dict[str, Dict]
) -> Dict:
new_filter_scopes = {}
new_filter_scopes: Dict[str, Dict] = {}
for (filter_id, scopes) in old_filter_scopes.items():
new_filter_key = old_to_new_slc_id_dict.get(int(filter_id))
if new_filter_key:

View File

@ -1176,16 +1176,16 @@ class Superset(BaseSupersetView):
dash.owners = [g.user] if g.user else []
dash.dashboard_title = data["dashboard_title"]
old_to_new_slice_ids: Dict[int, int] = {}
if data["duplicate_slices"]:
# Duplicating slices as well, mapping old ids to new ones
old_to_new_sliceids: Dict[int, int] = {}
for slc in original_dash.slices:
new_slice = slc.clone()
new_slice.owners = [g.user] if g.user else []
session.add(new_slice)
session.flush()
new_slice.dashboards.append(dash)
old_to_new_sliceids[slc.id] = new_slice.id
old_to_new_slice_ids[slc.id] = new_slice.id
# update chartId of layout entities
for value in data["positions"].values():
@ -1195,29 +1195,14 @@ class Superset(BaseSupersetView):
and value.get("meta").get("chartId")
):
old_id = value.get("meta").get("chartId")
new_id = old_to_new_sliceids[old_id]
new_id = old_to_new_slice_ids[old_id]
value["meta"]["chartId"] = new_id
# replace filter_id and immune ids from old slice id to new slice id:
if "filter_scopes" in data:
new_filter_scopes = copy_filter_scopes(
old_to_new_slc_id_dict=old_to_new_sliceids,
old_filter_scopes=json.loads(data["filter_scopes"] or "{}"),
)
data["filter_scopes"] = json.dumps(new_filter_scopes)
else:
dash.slices = original_dash.slices
# remove slice id from filter_scopes metadata if slice is removed from dashboard
if "filter_scopes" in data:
new_filter_scopes = copy_filter_scopes(
old_to_new_slc_id_dict={slc.id: slc.id for slc in dash.slices},
old_filter_scopes=json.loads(data["filter_scopes"] or "{}"),
)
data["filter_scopes"] = json.dumps(new_filter_scopes)
dash.params = original_dash.params
self._set_dash_metadata(dash, data)
self._set_dash_metadata(dash, data, old_to_new_slice_ids)
session.add(dash)
session.commit()
dash_json = json.dumps(dash.data)
@ -1233,12 +1218,6 @@ class Superset(BaseSupersetView):
dash = session.query(Dashboard).get(dashboard_id)
check_ownership(dash, raise_if_false=True)
data = json.loads(request.form.get("data"))
if "filter_scopes" in data:
new_filter_scopes = copy_filter_scopes(
old_to_new_slc_id_dict={slc.id: slc.id for slc in dash.slices},
old_filter_scopes=json.loads(data["filter_scopes"] or "{}"),
)
data["filter_scopes"] = json.dumps(new_filter_scopes)
self._set_dash_metadata(dash, data)
session.merge(dash)
session.commit()
@ -1246,7 +1225,10 @@ class Superset(BaseSupersetView):
return json_success(json.dumps({"status": "SUCCESS"}))
@staticmethod
def _set_dash_metadata(dashboard, data):
def _set_dash_metadata(
dashboard, data, old_to_new_slice_ids: Optional[Dict[int, int]] = None
):
old_to_new_slice_ids = old_to_new_slice_ids or {}
positions = data["positions"]
# find slices in the position data
slice_ids = []
@ -1287,9 +1269,21 @@ class Superset(BaseSupersetView):
if "timed_refresh_immune_slices" not in md:
md["timed_refresh_immune_slices"] = []
new_filter_scopes: Dict[str, Dict] = {}
if "filter_scopes" in data:
md["filter_scopes"] = json.loads(data["filter_scopes"] or "{}")
md["expanded_slices"] = data["expanded_slices"]
# replace filter_id and immune ids from old slice id to new slice id:
# and remove slice ids that are not in dash anymore
new_filter_scopes = copy_filter_scopes(
old_to_new_slc_id_dict={
sid: old_to_new_slice_ids.get(sid, sid) for sid in slice_ids
},
old_filter_scopes=json.loads(data["filter_scopes"] or "{}"),
)
if new_filter_scopes:
md["filter_scopes"] = new_filter_scopes
else:
md.pop("filter_scopes", None)
md["expanded_slices"] = data.get("expanded_slices", {})
md["refresh_frequency"] = data.get("refresh_frequency", 0)
default_filters_data = json.loads(data.get("default_filters", "{}"))
applicable_filters = {

View File

@ -16,12 +16,14 @@
# under the License.
# isort:skip_file
"""Unit tests for Superset"""
import copy
import json
import unittest
from random import random
from flask import escape
from sqlalchemy import func
from typing import Dict
import tests.test_app
from superset import db, security_manager
@ -29,6 +31,7 @@ from superset.connectors.sqla.models import SqlaTable
from superset.models import core as models
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.views import core as views
from .base_tests import SupersetTestCase
@ -52,7 +55,7 @@ class DashboardTests(SupersetTestCase):
for i, slc in enumerate(dash.slices):
id = "DASHBOARD_CHART_TYPE-{}".format(i)
d = {
"type": "DASHBOARD_CHART_TYPE",
"type": "CHART",
"id": id,
"children": [],
"meta": {"width": 4, "height": 50, "chartId": slc.id},
@ -235,6 +238,58 @@ class DashboardTests(SupersetTestCase):
if key not in ["modified", "changed_on"]:
self.assertEqual(slc[key], resp["slices"][index][key])
def test_set_dash_metadata(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="world_health").first()
data = dash.data
positions = data["position_json"]
data.update({"positions": positions})
original_data = copy.deepcopy(data)
# add filter scopes
filter_slice = dash.slices[0]
immune_slices = dash.slices[2:]
filter_scopes = {
str(filter_slice.id): {
"region": {
"scope": ["ROOT_ID"],
"immune": [slc.id for slc in immune_slices],
}
}
}
data.update({"filter_scopes": json.dumps(filter_scopes)})
views.Superset._set_dash_metadata(dash, data)
updated_metadata = json.loads(dash.json_metadata)
self.assertEqual(updated_metadata["filter_scopes"], filter_scopes)
# remove a slice and change slice ids (as copy slices)
removed_slice = immune_slices.pop()
removed_component = [
key
for (key, value) in positions.items()
if isinstance(value, dict)
and value.get("type") == "CHART"
and value["meta"]["chartId"] == removed_slice.id
]
positions.pop(removed_component[0], None)
data.update({"positions": positions})
old_to_new_sliceids = {slc.id: slc.id + 10 for slc in dash.slices}
views.Superset._set_dash_metadata(dash, data, old_to_new_sliceids)
updated_metadata = json.loads(dash.json_metadata)
updated_filter_scopes = {
str(filter_slice.id + 10): {
"region": {
"scope": ["ROOT_ID"],
"immune": [slc.id + 10 for slc in immune_slices],
}
}
}
self.assertEqual(updated_metadata["filter_scopes"], updated_filter_scopes)
# reset dash to original data
views.Superset._set_dash_metadata(dash, original_data)
def test_add_slices(self, username="admin"):
self.login(username=username)
dash = db.session.query(Dashboard).filter_by(slug="births").first()

View File

@ -360,7 +360,7 @@ class ImportExportTests(SupersetTestCase):
dash_with_1_slice.position_json = """
{{"DASHBOARD_VERSION_KEY": "v2",
"DASHBOARD_CHART_TYPE-{0}": {{
"type": "DASHBOARD_CHART_TYPE",
"type": "CHART",
"id": {0},
"children": [],
"meta": {{
@ -540,7 +540,7 @@ class ImportExportTests(SupersetTestCase):
dash_with_1_slice.position_json = """
{{"DASHBOARD_VERSION_KEY": "v2",
"DASHBOARD_CHART_TYPE-{0}": {{
"type": "DASHBOARD_CHART_TYPE",
"type": "CHART",
"id": {0},
"children": [],
"meta": {{