From c81204aeefcecc97ee8478d41a9f511b0dfc7580 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 22 Oct 2020 20:02:40 -0700 Subject: [PATCH] feat: export dashboards as ZIP files (#11351) * Export datasets as ZIP files * Add logging when failing to parse extra * Export datasets as ZIP files * Export charts as Zip file * Export dashboards as a Zip file * Add logging --- superset/charts/commands/export.py | 5 +- superset/dashboards/api.py | 34 ++++- superset/dashboards/commands/export.py | 84 +++++++++++ superset/examples/helpers.py | 1 + tests/dashboards/api_tests.py | 59 ++++++++ tests/dashboards/commands_tests.py | 190 +++++++++++++++++++++++++ 6 files changed, 370 insertions(+), 3 deletions(-) create mode 100644 superset/dashboards/commands/export.py create mode 100644 tests/dashboards/commands_tests.py diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py index 00e0fd42c2..db90e742da 100644 --- a/superset/charts/commands/export.py +++ b/superset/charts/commands/export.py @@ -17,6 +17,7 @@ # isort:skip_file import json +import logging from typing import Iterator, List, Tuple import yaml @@ -28,6 +29,8 @@ from superset.datasets.commands.export import ExportDatasetsCommand from superset.utils.dict_import_export import IMPORT_EXPORT_VERSION, sanitize from superset.models.slice import Slice +logger = logging.getLogger(__name__) + # keys present in the standard export that are not needed REMOVE_KEYS = ["datasource_type", "datasource_name"] @@ -59,7 +62,7 @@ class ExportChartsCommand(BaseCommand): try: payload["params"] = json.loads(payload["params"]) except json.decoder.JSONDecodeError: - pass + logger.info("Unable to decode `params` field: %s", payload["params"]) payload["version"] = IMPORT_EXPORT_VERSION if chart.table: diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 43fabd09f8..8f65f35fdb 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -15,9 +15,12 @@ # specific language governing permissions and limitations # under the License. import logging +from datetime import datetime +from io import BytesIO from typing import Any, Dict +from zipfile import ZipFile -from flask import g, make_response, redirect, request, Response, url_for +from flask import g, make_response, redirect, request, Response, send_file, url_for from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import ngettext @@ -39,6 +42,7 @@ from superset.dashboards.commands.exceptions import ( DashboardNotFoundError, DashboardUpdateFailedError, ) +from superset.dashboards.commands.export import ExportDashboardsCommand from superset.dashboards.commands.update import UpdateDashboardCommand from superset.dashboards.filters import ( DashboardFavoriteFilter, @@ -459,8 +463,34 @@ class DashboardRestApi(BaseSupersetModelRestApi): 500: $ref: '#/components/responses/500' """ + requested_ids = kwargs["rison"] + + if is_feature_enabled("VERSIONED_EXPORT"): + timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") + root = f"dashboard_export_{timestamp}" + filename = f"{root}.zip" + + buf = BytesIO() + with ZipFile(buf, "w") as bundle: + try: + for file_name, file_content in ExportDashboardsCommand( + requested_ids + ).run(): + with bundle.open(f"{root}/{file_name}", "w") as fp: + fp.write(file_content.encode()) + except DashboardNotFoundError: + return self.response_404() + buf.seek(0) + + return send_file( + buf, + mimetype="application/zip", + as_attachment=True, + attachment_filename=filename, + ) + query = self.datamodel.session.query(Dashboard).filter( - Dashboard.id.in_(kwargs["rison"]) + Dashboard.id.in_(requested_ids) ) query = self._base_filters.apply_all(query) ids = [item.id for item in query.all()] diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py new file mode 100644 index 0000000000..f769a67480 --- /dev/null +++ b/superset/dashboards/commands/export.py @@ -0,0 +1,84 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# isort:skip_file + +import json +import logging +from typing import Iterator, List, Tuple + +import yaml + +from superset.commands.base import BaseCommand +from superset.charts.commands.export import ExportChartsCommand +from superset.dashboards.commands.exceptions import DashboardNotFoundError +from superset.dashboards.dao import DashboardDAO +from superset.models.dashboard import Dashboard +from superset.utils.dict_import_export import IMPORT_EXPORT_VERSION, sanitize + +logger = logging.getLogger(__name__) + + +# keys stored as JSON are loaded and the prefix/suffix removed +JSON_KEYS = {"position_json": "position", "json_metadata": "metadata"} + + +class ExportDashboardsCommand(BaseCommand): + def __init__(self, dashboard_ids: List[int]): + self.dashboard_ids = dashboard_ids + + # this will be set when calling validate() + self._models: List[Dashboard] = [] + + @staticmethod + def export_dashboard(dashboard: Dashboard) -> Iterator[Tuple[str, str]]: + dashboard_slug = sanitize(dashboard.dashboard_title) + file_name = f"dashboards/{dashboard_slug}.yaml" + + payload = dashboard.export_to_dict( + recursive=False, + include_parent_ref=False, + include_defaults=True, + export_uuids=True, + ) + # TODO (betodealmeida): move this logic to export_to_dict once this + # becomes the default export endpoint + for key, new_name in JSON_KEYS.items(): + if payload.get(key): + value = payload.pop(key) + try: + payload[new_name] = json.loads(value) + except json.decoder.JSONDecodeError: + logger.info("Unable to decode `%s` field: %s", key, value) + + payload["version"] = IMPORT_EXPORT_VERSION + + file_content = yaml.safe_dump(payload, sort_keys=False) + yield file_name, file_content + + chart_ids = [chart.id for chart in dashboard.slices] + yield from ExportChartsCommand(chart_ids).run() + + def run(self) -> Iterator[Tuple[str, str]]: + self.validate() + + for dashboard in self._models: + yield from self.export_dashboard(dashboard) + + def validate(self) -> None: + self._models = DashboardDAO.find_by_ids(self.dashboard_ids) + if len(self._models) != len(self.dashboard_ids): + raise DashboardNotFoundError() diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py index 58f8de293d..2b872a1512 100644 --- a/superset/examples/helpers.py +++ b/superset/examples/helpers.py @@ -51,6 +51,7 @@ def update_slice_ids(layout_dict: Dict[Any, Any], slices: List[Slice]) -> None: for i, chart_component in enumerate(sorted_charts): if i < len(slices): chart_component["meta"]["chartId"] = int(slices[i].id) + chart_component["meta"]["uuid"] = str(slices[i].uuid) def merge_slice(slc: Slice) -> None: diff --git a/tests/dashboards/api_tests.py b/tests/dashboards/api_tests.py index 6fc0385cc3..7df1176d50 100644 --- a/tests/dashboards/api_tests.py +++ b/tests/dashboards/api_tests.py @@ -17,7 +17,10 @@ # isort:skip_file """Unit tests for Superset""" import json +from io import BytesIO from typing import List, Optional +from unittest.mock import patch +from zipfile import is_zipfile import pytest import prison @@ -989,3 +992,59 @@ class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin): self.assertEqual(rv.status_code, 404) db.session.delete(dashboard) db.session.commit() + + @patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"VERSIONED_EXPORT": True}, + clear=True, + ) + def test_export_bundle(self): + """ + Dashboard API: Test dashboard export + """ + argument = [1, 2] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + + self.login(username="admin") + rv = self.client.get(uri) + + assert rv.status_code == 200 + + buf = BytesIO(rv.data) + assert is_zipfile(buf) + + @patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"VERSIONED_EXPORT": True}, + clear=True, + ) + def test_export_bundle_not_found(self): + """ + Dashboard API: Test dashboard export not found + """ + self.login(username="admin") + argument = [1000] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + @patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"VERSIONED_EXPORT": True}, + clear=True, + ) + def test_export_bundle_not_allowed(self): + """ + Dashboard API: Test dashboard export not allowed + """ + admin_id = self.get_user("admin").id + dashboard = self.insert_dashboard("title", "slug1", [admin_id], published=False) + + self.login(username="gamma") + argument = [dashboard.id] + uri = f"api/v1/dashboard/export/?q={prison.dumps(argument)}" + rv = self.client.get(uri) + assert rv.status_code == 404 + + db.session.delete(dashboard) + db.session.commit() diff --git a/tests/dashboards/commands_tests.py b/tests/dashboards/commands_tests.py new file mode 100644 index 0000000000..10acf16e81 --- /dev/null +++ b/tests/dashboards/commands_tests.py @@ -0,0 +1,190 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest.mock import patch + +import yaml + +from superset import db, security_manager +from superset.dashboards.commands.exceptions import DashboardNotFoundError +from superset.dashboards.commands.export import ExportDashboardsCommand +from superset.models.dashboard import Dashboard +from tests.base_tests import SupersetTestCase + + +class TestExportDashboardsCommand(SupersetTestCase): + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command(self, mock_g1, mock_g2): + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + + example_dashboard = db.session.query(Dashboard).filter_by(id=1).one() + command = ExportDashboardsCommand(dashboard_ids=[example_dashboard.id]) + contents = dict(command.run()) + + expected_paths = { + "dashboards/world_banks_data.yaml", + "charts/box_plot.yaml", + "datasets/examples/wb_health_population.yaml", + "databases/examples.yaml", + "charts/treemap.yaml", + "charts/region_filter.yaml", + "charts/_rural.yaml", + "charts/worlds_population.yaml", + "charts/most_populated_countries.yaml", + "charts/growth_rate.yaml", + "charts/life_expectancy_vs_rural_.yaml", + "charts/rural_breakdown.yaml", + "charts/worlds_pop_growth.yaml", + } + + assert expected_paths == set(contents.keys()) + + metadata = yaml.safe_load(contents["dashboards/world_banks_data.yaml"]) + + # remove chart UUIDs from metadata so we can compare + for chart_info in metadata["position"].values(): + if isinstance(chart_info, dict) and "uuid" in chart_info.get("meta", {}): + del chart_info["meta"]["chartId"] + del chart_info["meta"]["uuid"] + + assert metadata == { + "dashboard_title": "World Bank's Data", + "description": None, + "css": "", + "slug": "world_health", + "uuid": str(example_dashboard.uuid), + "position": { + "DASHBOARD_CHART_TYPE-0": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-0", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-1": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-1", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-2": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-2", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-3": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-3", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-4": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-4", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-5": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-5", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-6": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-6", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-7": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-7", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-8": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-8", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_CHART_TYPE-9": { + "children": [], + "id": "DASHBOARD_CHART_TYPE-9", + "meta": {"height": 50, "width": 4}, + "type": "CHART", + }, + "DASHBOARD_VERSION_KEY": "v2", + }, + "metadata": { + "timed_refresh_immune_slices": [], + "expanded_slices": {}, + "refresh_frequency": 0, + "default_filters": "{}", + "color_scheme": None, + }, + "version": "1.0.0", + } + + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_no_access(self, mock_g1, mock_g2): + """Test that users can't export datasets they don't have access to""" + mock_g1.user = security_manager.find_user("gamma") + mock_g2.user = security_manager.find_user("gamma") + + example_dashboard = db.session.query(Dashboard).filter_by(id=1).one() + command = ExportDashboardsCommand(dashboard_ids=[example_dashboard.id]) + contents = command.run() + with self.assertRaises(DashboardNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_invalid_dataset(self, mock_g1, mock_g2): + """Test that an error is raised when exporting an invalid dataset""" + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + command = ExportDashboardsCommand(dashboard_ids=[-1]) + contents = command.run() + with self.assertRaises(DashboardNotFoundError): + next(contents) + + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_key_order(self, mock_g1, mock_g2): + """Test that they keys in the YAML have the same order as export_fields""" + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + + example_dashboard = db.session.query(Dashboard).filter_by(id=1).one() + command = ExportDashboardsCommand(dashboard_ids=[example_dashboard.id]) + contents = dict(command.run()) + + metadata = yaml.safe_load(contents["dashboards/world_banks_data.yaml"]) + assert list(metadata.keys()) == [ + "dashboard_title", + "description", + "css", + "slug", + "uuid", + "position", + "metadata", + "version", + ]