feat: add a command to import dashboards (#11749)

* feat: add a command to import dashboards

* Fix lint

* Remove print()
This commit is contained in:
Beto Dealmeida 2020-11-23 16:28:26 -08:00 committed by GitHub
parent 2566319554
commit 25345bea64
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 509 additions and 3 deletions

View File

@ -56,7 +56,7 @@ class ExportModelsCommand(BaseCommand):
}
yield METADATA_FILE_NAME, yaml.safe_dump(metadata, sort_keys=False)
seen = set()
seen = {METADATA_FILE_NAME}
for model in self._models:
for file_name, file_content in self.export(model):
if file_name not in seen:

View File

@ -56,12 +56,13 @@ class ExportDashboardsCommand(ExportModelsCommand):
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
for key, new_name in JSON_KEYS.items():
if payload.get(key):
if key in payload:
value = payload.pop(key)
try:
payload[new_name] = json.loads(value)
except json.decoder.JSONDecodeError:
except (TypeError, json.decoder.JSONDecodeError):
logger.info("Unable to decode `%s` field: %s", key, value)
payload[new_name] = ""
payload["version"] = EXPORT_VERSION

View File

@ -0,0 +1,195 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Dict, Iterator, List, Optional, Set, Tuple
from marshmallow import Schema, validate
from marshmallow.exceptions import ValidationError
from sqlalchemy.orm import Session
from sqlalchemy.sql import select
from superset import db
from superset.charts.commands.importers.v1.utils import import_chart
from superset.charts.schemas import ImportV1ChartSchema
from superset.commands.base import BaseCommand
from superset.commands.exceptions import CommandInvalidError
from superset.commands.importers.v1.utils import (
load_metadata,
load_yaml,
METADATA_FILE_NAME,
)
from superset.dashboards.commands.importers.v1.utils import import_dashboard
from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.commands.importers.v1.utils import import_database
from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.commands.importers.v1.utils import import_dataset
from superset.datasets.schemas import ImportV1DatasetSchema
from superset.models.dashboard import Dashboard, dashboard_slices
schemas: Dict[str, Schema] = {
"charts/": ImportV1ChartSchema(),
"dashboards/": ImportV1DashboardSchema(),
"datasets/": ImportV1DatasetSchema(),
"databases/": ImportV1DatabaseSchema(),
}
def find_chart_uuids(position: Dict[str, Any]) -> Iterator[str]:
"""Find all chart UUIDs in a dashboard"""
for child in position.values():
if (
isinstance(child, dict)
and child["type"] == "CHART"
and "uuid" in child["meta"]
):
yield child["meta"]["uuid"]
class ImportDashboardsCommand(BaseCommand):
"""Import dashboards"""
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self._configs: Dict[str, Any] = {}
# TODO (betodealmeida): refactor to use code from other commands
# pylint: disable=too-many-branches, too-many-locals
def _import_bundle(self, session: Session) -> None:
# discover charts associated with dashboards
chart_uuids: Set[str] = set()
for file_name, config in self._configs.items():
if file_name.startswith("dashboards/"):
chart_uuids.update(find_chart_uuids(config["position"]))
# discover datasets associated with charts
dataset_uuids: Set[str] = set()
for file_name, config in self._configs.items():
if file_name.startswith("charts/") and config["uuid"] in chart_uuids:
dataset_uuids.add(config["dataset_uuid"])
# discover databases associated with datasets
database_uuids: Set[str] = set()
for file_name, config in self._configs.items():
if file_name.startswith("datasets/") and config["uuid"] in dataset_uuids:
database_uuids.add(config["database_uuid"])
# import related databases
database_ids: Dict[str, int] = {}
for file_name, config in self._configs.items():
if file_name.startswith("databases/") and config["uuid"] in database_uuids:
database = import_database(session, config, overwrite=False)
database_ids[str(database.uuid)] = database.id
# import datasets with the correct parent ref
dataset_info: Dict[str, Dict[str, Any]] = {}
for file_name, config in self._configs.items():
if (
file_name.startswith("datasets/")
and config["database_uuid"] in database_ids
):
config["database_id"] = database_ids[config["database_uuid"]]
dataset = import_dataset(session, config, overwrite=False)
dataset_info[str(dataset.uuid)] = {
"datasource_id": dataset.id,
"datasource_type": "view" if dataset.is_sqllab_view else "table",
"datasource_name": dataset.table_name,
}
# import charts with the correct parent ref
chart_ids: Dict[str, int] = {}
for file_name, config in self._configs.items():
if (
file_name.startswith("charts/")
and config["dataset_uuid"] in dataset_info
):
# update datasource id, type, and name
config.update(dataset_info[config["dataset_uuid"]])
chart = import_chart(session, config, overwrite=False)
chart_ids[str(chart.uuid)] = chart.id
# store the existing relationship between dashboards and charts
existing_relationships = session.execute(
select([dashboard_slices.c.dashboard_id, dashboard_slices.c.slice_id])
).fetchall()
# import dashboards
dashboard_chart_ids: List[Tuple[int, int]] = []
for file_name, config in self._configs.items():
if file_name.startswith("dashboards/"):
dashboard = import_dashboard(session, config, overwrite=True)
for uuid in find_chart_uuids(config["position"]):
chart_id = chart_ids[uuid]
if (dashboard.id, chart_id) not in existing_relationships:
dashboard_chart_ids.append((dashboard.id, chart_id))
# set ref in the dashboard_slices table
values = [
{"dashboard_id": dashboard_id, "slice_id": chart_id}
for (dashboard_id, chart_id) in dashboard_chart_ids
]
# pylint: disable=no-value-for-parameter (sqlalchemy/issues/4656)
session.execute(dashboard_slices.insert(), values)
def run(self) -> None:
self.validate()
# rollback to prevent partial imports
try:
self._import_bundle(db.session)
db.session.commit()
except Exception as exc:
db.session.rollback()
raise exc
def validate(self) -> None:
exceptions: List[ValidationError] = []
# verify that the metadata file is present and valid
try:
metadata: Optional[Dict[str, str]] = load_metadata(self.contents)
except ValidationError as exc:
exceptions.append(exc)
metadata = None
for file_name, content in self.contents.items():
prefix = file_name.split("/")[0]
schema = schemas.get(f"{prefix}/")
if schema:
try:
config = load_yaml(file_name, content)
schema.load(config)
self._configs[file_name] = config
except ValidationError as exc:
exc.messages = {file_name: exc.messages}
exceptions.append(exc)
# validate that the type declared in METADATA_FILE_NAME is correct
if metadata:
type_validator = validate.Equal(Dashboard.__name__)
try:
type_validator(metadata["type"])
except ValidationError as exc:
exc.messages = {METADATA_FILE_NAME: {"type": exc.messages}}
exceptions.append(exc)
if exceptions:
exception = CommandInvalidError("Error importing dashboard")
exception.add_list(exceptions)
raise exception

View File

@ -0,0 +1,55 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from typing import Any, Dict
from sqlalchemy.orm import Session
from superset.models.dashboard import Dashboard
logger = logging.getLogger(__name__)
JSON_KEYS = {"position": "position_json", "metadata": "json_metadata"}
def import_dashboard(
session: Session, config: Dict[str, Any], overwrite: bool = False
) -> Dashboard:
existing = session.query(Dashboard).filter_by(uuid=config["uuid"]).first()
if existing:
if not overwrite:
return existing
config["id"] = existing.id
# TODO (betodealmeida): move this logic to import_from_dict
config = config.copy()
for key, new_name in JSON_KEYS.items():
if config.get(key):
value = config.pop(key)
try:
config[new_name] = json.dumps(value)
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `%s` field: %s", key, value)
dashboard = Dashboard.import_from_dict(session, config, recursive=False)
if dashboard.id is None:
session.flush()
return dashboard

View File

@ -176,3 +176,14 @@ class GetFavStarIdsSchema(Schema):
fields.Nested(ChartFavStarResponseResult),
description="A list of results for each corresponding chart in the request",
)
class ImportV1DashboardSchema(Schema):
dashboard_title = fields.String(required=True)
description = fields.String(allow_none=True)
css = fields.String()
slug = fields.String(allow_none=True)
uuid = fields.UUID(required=True)
position = fields.Dict()
metadata = fields.Dict()
version = fields.String(required=True)

View File

@ -116,6 +116,8 @@ class TestExportChartsCommand(SupersetTestCase):
"dataset_uuid",
]
class TestImportChartsCommand(SupersetTestCase):
def test_import_v1_chart(self):
"""Test that we can import a chart"""
contents = {

View File

@ -14,16 +14,30 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=no-self-use, invalid-name
import json
from unittest.mock import patch
import pytest
import yaml
from superset import db, security_manager
from superset.commands.exceptions import CommandInvalidError
from superset.commands.importers.exceptions import IncorrectVersionError
from superset.dashboards.commands.exceptions import DashboardNotFoundError
from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.dashboards.commands.importers.v1 import ImportDashboardsCommand
from superset.models.dashboard import Dashboard
from tests.base_tests import SupersetTestCase
from tests.fixtures.importexport import (
chart_config,
dashboard_config,
dashboard_metadata_config,
database_config,
dataset_config,
dataset_metadata_config,
)
class TestExportDashboardsCommand(SupersetTestCase):
@ -188,3 +202,168 @@ class TestExportDashboardsCommand(SupersetTestCase):
"metadata",
"version",
]
class TestImportDashboardsCommand(SupersetTestCase):
def test_import_v1_dashboard(self):
"""Test that we can import a dashboard"""
contents = {
"metadata.yaml": yaml.safe_dump(dashboard_metadata_config),
"databases/imported_database.yaml": yaml.safe_dump(database_config),
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
"charts/imported_chart.yaml": yaml.safe_dump(chart_config),
"dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
}
command = ImportDashboardsCommand(contents)
command.run()
dashboard = (
db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one()
)
assert dashboard.dashboard_title == "Test dash"
assert dashboard.description is None
assert dashboard.css == ""
assert dashboard.slug is None
assert json.loads(dashboard.position_json) == {
"CHART-SVAlICPOSJ": {
"children": [],
"id": "CHART-SVAlICPOSJ",
"meta": {
"chartId": 83,
"height": 50,
"sliceName": "Number of California Births",
"uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
"width": 4,
},
"parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"],
"type": "CHART",
},
"DASHBOARD_VERSION_KEY": "v2",
"GRID_ID": {
"children": ["ROW-dP_CHaK2q"],
"id": "GRID_ID",
"parents": ["ROOT_ID"],
"type": "GRID",
},
"HEADER_ID": {
"id": "HEADER_ID",
"meta": {"text": "Test dash"},
"type": "HEADER",
},
"ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
"ROW-dP_CHaK2q": {
"children": ["CHART-SVAlICPOSJ"],
"id": "ROW-dP_CHaK2q",
"meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"},
"parents": ["ROOT_ID", "GRID_ID"],
"type": "ROW",
},
}
assert json.loads(dashboard.json_metadata) == {
"color_scheme": None,
"default_filters": "{}",
"expanded_slices": {},
"import_time": 1604342885,
"refresh_frequency": 0,
"remote_id": 7,
"timed_refresh_immune_slices": [],
}
assert len(dashboard.slices) == 1
chart = dashboard.slices[0]
assert str(chart.uuid) == chart_config["uuid"]
dataset = chart.table
assert str(dataset.uuid) == dataset_config["uuid"]
database = dataset.database
assert str(database.uuid) == database_config["uuid"]
db.session.delete(dashboard)
db.session.delete(chart)
db.session.delete(dataset)
db.session.delete(database)
db.session.commit()
def test_import_v1_dashboard_multiple(self):
"""Test that a dashboard can be imported multiple times"""
num_dashboards = db.session.query(Dashboard).count()
contents = {
"metadata.yaml": yaml.safe_dump(dashboard_metadata_config),
"databases/imported_database.yaml": yaml.safe_dump(database_config),
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
"charts/imported_chart.yaml": yaml.safe_dump(chart_config),
"dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
}
command = ImportDashboardsCommand(contents)
command.run()
command.run()
new_num_dashboards = db.session.query(Dashboard).count()
assert new_num_dashboards == num_dashboards + 1
dashboard = (
db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one()
)
chart = dashboard.slices[0]
dataset = chart.table
database = dataset.database
db.session.delete(dashboard)
db.session.delete(chart)
db.session.delete(dataset)
db.session.delete(database)
db.session.commit()
def test_import_v1_dashboard_validation(self):
"""Test different validations applied when importing a dashboard"""
# metadata.yaml must be present
contents = {
"databases/imported_database.yaml": yaml.safe_dump(database_config),
"datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
"charts/imported_chart.yaml": yaml.safe_dump(chart_config),
"dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
}
command = ImportDashboardsCommand(contents)
with pytest.raises(IncorrectVersionError) as excinfo:
command.run()
assert str(excinfo.value) == "Missing metadata.yaml"
# version should be 1.0.0
contents["metadata.yaml"] = yaml.safe_dump(
{
"version": "2.0.0",
"type": "Database",
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
)
command = ImportDashboardsCommand(contents)
with pytest.raises(IncorrectVersionError) as excinfo:
command.run()
assert str(excinfo.value) == "Must be equal to 1.0.0."
# type should be Database
contents["metadata.yaml"] = yaml.safe_dump(dataset_metadata_config)
command = ImportDashboardsCommand(contents)
with pytest.raises(CommandInvalidError) as excinfo:
command.run()
assert str(excinfo.value) == "Error importing dashboard"
assert excinfo.value.normalized_messages() == {
"metadata.yaml": {"type": ["Must be equal to Dashboard."]}
}
# must also validate datasets
broken_config = dataset_config.copy()
del broken_config["table_name"]
contents["metadata.yaml"] = yaml.safe_dump(dashboard_metadata_config)
contents["datasets/imported_dataset.yaml"] = yaml.safe_dump(broken_config)
command = ImportDashboardsCommand(contents)
with pytest.raises(CommandInvalidError) as excinfo:
command.run()
assert str(excinfo.value) == "Error importing dashboard"
assert excinfo.value.normalized_messages() == {
"datasets/imported_dataset.yaml": {
"table_name": ["Missing data for required field."],
}
}

View File

@ -279,6 +279,8 @@ class TestExportDatabasesCommand(SupersetTestCase):
"version",
]
class TestImportDatabasesCommand(SupersetTestCase):
def test_import_v1_database(self):
"""Test that a database can be imported"""
contents = {

View File

@ -200,6 +200,8 @@ class TestExportDatasetsCommand(SupersetTestCase):
"database_uuid",
]
class TestImportDatasetsCommand(SupersetTestCase):
def test_import_v1_dataset(self):
"""Test that we can import a dataset"""
contents = {

View File

@ -36,6 +36,12 @@ chart_metadata_config: Dict[str, Any] = {
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
dashboard_metadata_config: Dict[str, Any] = {
"version": "1.0.0",
"type": "Dashboard",
"timestamp": "2020-11-04T21:27:44.423819+00:00",
}
database_config: Dict[str, Any] = {
"allow_csv_upload": True,
"allow_ctas": True,
@ -135,3 +141,56 @@ chart_config: Dict[str, Any] = {
"version": "1.0.0",
"dataset_uuid": "10808100-158b-42c4-842e-f32b99d88dfb",
}
dashboard_config = {
"dashboard_title": "Test dash",
"description": None,
"css": "",
"slug": None,
"uuid": "c4b28c4e-a1fe-4cf8-a5ac-d6f11d6fdd51",
"position": {
"CHART-SVAlICPOSJ": {
"children": [],
"id": "CHART-SVAlICPOSJ",
"meta": {
"chartId": 83,
"height": 50,
"sliceName": "Number of California Births",
"uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
"width": 4,
},
"parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"],
"type": "CHART",
},
"DASHBOARD_VERSION_KEY": "v2",
"GRID_ID": {
"children": ["ROW-dP_CHaK2q"],
"id": "GRID_ID",
"parents": ["ROOT_ID"],
"type": "GRID",
},
"HEADER_ID": {
"id": "HEADER_ID",
"meta": {"text": "Test dash"},
"type": "HEADER",
},
"ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
"ROW-dP_CHaK2q": {
"children": ["CHART-SVAlICPOSJ"],
"id": "ROW-dP_CHaK2q",
"meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"},
"parents": ["ROOT_ID", "GRID_ID"],
"type": "ROW",
},
},
"metadata": {
"timed_refresh_immune_slices": [],
"expanded_slices": {},
"refresh_frequency": 0,
"default_filters": "{}",
"color_scheme": None,
"remote_id": 7,
"import_time": 1604342885,
},
"version": "1.0.0",
}