feat: add export_related flag (#19215)

* feat: add export_related flag

* Fix lint
This commit is contained in:
Beto Dealmeida 2022-03-16 16:03:06 -07:00 committed by GitHub
parent fc8721800b
commit d01fdad1d8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 196 additions and 67 deletions

View File

@ -26,7 +26,7 @@ from werkzeug.utils import secure_filename
from superset.charts.commands.exceptions import ChartNotFoundError
from superset.charts.dao import ChartDAO
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.models.slice import Slice
from superset.utils.dict_import_export import EXPORT_VERSION
@ -43,7 +43,7 @@ class ExportChartsCommand(ExportModelsCommand):
not_found = ChartNotFoundError
@staticmethod
def _export(model: Slice) -> Iterator[Tuple[str, str]]:
def _export(model: Slice, export_related: bool = True) -> Iterator[Tuple[str, str]]:
chart_slug = secure_filename(model.slice_name)
file_name = f"charts/{chart_slug}_{model.id}.yaml"
@ -72,5 +72,5 @@ class ExportChartsCommand(ExportModelsCommand):
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
if model.table:
if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()

View File

@ -0,0 +1,16 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

View File

@ -14,10 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
from datetime import datetime
from datetime import timezone
from datetime import datetime, timezone
from typing import Iterator, List, Tuple, Type
import yaml
@ -36,14 +34,15 @@ class ExportModelsCommand(BaseCommand):
dao: Type[BaseDAO] = BaseDAO
not_found: Type[CommandException] = CommandException
def __init__(self, model_ids: List[int]):
def __init__(self, model_ids: List[int], export_related: bool = True):
self.model_ids = model_ids
self.export_related = export_related
# this will be set when calling validate()
self._models: List[Model] = []
@staticmethod
def _export(model: Model) -> Iterator[Tuple[str, str]]:
def _export(model: Model, export_related: bool = True) -> Iterator[Tuple[str, str]]:
raise NotImplementedError("Subclasses MUST implement _export")
def run(self) -> Iterator[Tuple[str, str]]:
@ -58,7 +57,7 @@ class ExportModelsCommand(BaseCommand):
seen = {METADATA_FILE_NAME}
for model in self._models:
for file_name, file_content in self._export(model):
for file_name, file_content in self._export(model, self.export_related):
if file_name not in seen:
yield file_name, file_content
seen.add(file_name)

View File

@ -29,7 +29,7 @@ from superset.charts.commands.export import ExportChartsCommand
from superset.dashboards.commands.exceptions import DashboardNotFoundError
from superset.dashboards.commands.importers.v1.utils import find_chart_uuids
from superset.dashboards.dao import DashboardDAO
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.dao import DatasetDAO
from superset.models.dashboard import Dashboard
@ -106,8 +106,11 @@ class ExportDashboardsCommand(ExportModelsCommand):
dao = DashboardDAO
not_found = DashboardNotFoundError
# pylint: disable=too-many-locals
@staticmethod
def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
def _export(
model: Dashboard, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
dashboard_slug = secure_filename(model.dashboard_title)
file_name = f"dashboards/{dashboard_slug}.yaml"
@ -138,7 +141,8 @@ class ExportDashboardsCommand(ExportModelsCommand):
if dataset_id is not None:
dataset = DatasetDAO.find_by_id(dataset_id)
target["datasetUuid"] = str(dataset.uuid)
yield from ExportDatasetsCommand([dataset_id]).run()
if export_related:
yield from ExportDatasetsCommand([dataset_id]).run()
# the mapping between dashboard -> charts is inferred from the position
# attribute, so if it's not present we need to add a default config
@ -160,5 +164,6 @@ class ExportDashboardsCommand(ExportModelsCommand):
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
chart_ids = [chart.id for chart in model.slices]
yield from ExportChartsCommand(chart_ids).run()
if export_related:
chart_ids = [chart.id for chart in model.slices]
yield from ExportChartsCommand(chart_ids).run()

View File

@ -25,7 +25,7 @@ from werkzeug.utils import secure_filename
from superset.databases.commands.exceptions import DatabaseNotFoundError
from superset.databases.dao import DatabaseDAO
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.models.core import Database
from superset.utils.dict_import_export import EXPORT_VERSION
@ -55,7 +55,9 @@ class ExportDatabasesCommand(ExportModelsCommand):
not_found = DatabaseNotFoundError
@staticmethod
def _export(model: Database) -> Iterator[Tuple[str, str]]:
def _export(
model: Database, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database_name)
file_name = f"databases/{database_slug}.yaml"
@ -90,18 +92,19 @@ class ExportDatabasesCommand(ExportModelsCommand):
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
for dataset in model.tables:
dataset_slug = secure_filename(dataset.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
if export_related:
for dataset in model.tables:
dataset_slug = secure_filename(dataset.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
payload = dataset.export_to_dict(
recursive=True,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)
payload = dataset.export_to_dict(
recursive=True,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

View File

@ -23,7 +23,7 @@ from typing import Iterator, Tuple
import yaml
from werkzeug.utils import secure_filename
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasets.dao import DatasetDAO
@ -40,7 +40,9 @@ class ExportDatasetsCommand(ExportModelsCommand):
not_found = DatasetNotFoundError
@staticmethod
def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
def _export(
model: SqlaTable, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database.database_name)
dataset_slug = secure_filename(model.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
@ -76,23 +78,24 @@ class ExportDatasetsCommand(ExportModelsCommand):
yield file_name, file_content
# include database as well
file_name = f"databases/{database_slug}.yaml"
if export_related:
file_name = f"databases/{database_slug}.yaml"
payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if payload.get("extra"):
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])
payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if payload.get("extra"):
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])
payload["version"] = EXPORT_VERSION
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

View File

@ -23,7 +23,7 @@ from typing import Iterator, Tuple
import yaml
from werkzeug.utils import secure_filename
from superset.commands.export import ExportModelsCommand
from superset.commands.export.models import ExportModelsCommand
from superset.models.sql_lab import SavedQuery
from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError
from superset.queries.saved_queries.dao import SavedQueryDAO
@ -38,7 +38,9 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
not_found = SavedQueryNotFoundError
@staticmethod
def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
def _export(
model: SavedQuery, export_related: bool = True
) -> Iterator[Tuple[str, str]]:
# build filename based on database, optional schema, and label
database_slug = secure_filename(model.database.database_name)
schema_slug = secure_filename(model.schema)
@ -58,23 +60,24 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
yield file_name, file_content
# include database as well
file_name = f"databases/{database_slug}.yaml"
if export_related:
file_name = f"databases/{database_slug}.yaml"
payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if "extra" in payload:
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])
payload = model.database.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
if "extra" in payload:
try:
payload["extra"] = json.loads(payload["extra"])
except json.decoder.JSONDecodeError:
logger.info("Unable to decode `extra` field: %s", payload["extra"])
payload["version"] = EXPORT_VERSION
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content

View File

@ -176,6 +176,26 @@ class TestExportChartsCommand(SupersetTestCase):
"dataset_uuid",
]
@patch("superset.security.manager.g")
@pytest.mark.usefixtures("load_energy_table_with_slice")
def test_export_chart_command_no_related(self, mock_g):
"""
Test that only the chart is exported when export_related=False.
"""
mock_g.user = security_manager.find_user("admin")
example_chart = (
db.session.query(Slice).filter_by(slice_name="Energy Sankey").one()
)
command = ExportChartsCommand([example_chart.id], export_related=False)
contents = dict(command.run())
expected = [
"metadata.yaml",
f"charts/Energy_Sankey_{example_chart.id}.yaml",
]
assert expected == list(contents.keys())
class TestImportChartsCommand(SupersetTestCase):
@patch("superset.charts.commands.importers.v1.utils.g")

View File

@ -423,6 +423,28 @@ class TestExportDashboardsCommand(SupersetTestCase):
"DASHBOARD_VERSION_KEY": "v2",
}
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
@patch("superset.security.manager.g")
@patch("superset.views.base.g")
def test_export_dashboard_command_no_related(self, mock_g1, mock_g2):
"""
Test that only the dashboard is exported when export_related=False.
"""
mock_g1.user = security_manager.find_user("admin")
mock_g2.user = security_manager.find_user("admin")
example_dashboard = (
db.session.query(Dashboard).filter_by(slug="world_health").one()
)
command = ExportDashboardsCommand([example_dashboard.id], export_related=False)
contents = dict(command.run())
expected_paths = {
"metadata.yaml",
"dashboards/World_Banks_Data.yaml",
}
assert expected_paths == set(contents.keys())
class TestImportDashboardsCommand(SupersetTestCase):
def test_import_v0_dashboard_cli_export(self):

View File

@ -358,6 +358,26 @@ class TestExportDatabasesCommand(SupersetTestCase):
"version",
]
@patch("superset.security.manager.g")
@pytest.mark.usefixtures(
"load_birth_names_dashboard_with_slices", "load_energy_table_with_slice"
)
def test_export_database_command_no_related(self, mock_g):
"""
Test that only databases are exported when export_related=False.
"""
mock_g.user = security_manager.find_user("admin")
example_db = get_example_database()
db_uuid = example_db.uuid
command = ExportDatabasesCommand([example_db.id], export_related=False)
contents = dict(command.run())
prefixes = {path.split("/")[0] for path in contents}
assert "metadata.yaml" in prefixes
assert "databases" in prefixes
assert "datasets" not in prefixes
class TestImportDatabasesCommand(SupersetTestCase):
def test_import_v1_database(self):

View File

@ -219,6 +219,26 @@ class TestExportDatasetsCommand(SupersetTestCase):
"database_uuid",
]
@patch("superset.security.manager.g")
@pytest.mark.usefixtures("load_energy_table_with_slice")
def test_export_dataset_command_no_related(self, mock_g):
"""
Test that only datasets are exported when export_related=False.
"""
mock_g.user = security_manager.find_user("admin")
example_db = get_example_database()
example_dataset = _get_table_from_list_by_name(
"energy_usage", example_db.tables
)
command = ExportDatasetsCommand([example_dataset.id], export_related=False)
contents = dict(command.run())
assert list(contents.keys()) == [
"metadata.yaml",
"datasets/examples/energy_usage.yaml",
]
class TestImportDatasetsCommand(SupersetTestCase):
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")

View File

@ -83,6 +83,24 @@ class TestExportSavedQueriesCommand(SupersetTestCase):
"database_uuid": str(self.example_database.uuid),
}
@patch("superset.queries.saved_queries.filters.g")
def test_export_query_command_no_related(self, mock_g):
"""
Test that only the query is exported when export_related=False.
"""
mock_g.user = security_manager.find_user("admin")
command = ExportSavedQueriesCommand(
[self.example_query.id], export_related=False
)
contents = dict(command.run())
expected = [
"metadata.yaml",
"queries/examples/schema1/The_answer.yaml",
]
assert expected == list(contents.keys())
@patch("superset.queries.saved_queries.filters.g")
def test_export_query_command_no_access(self, mock_g):
"""Test that users can't export datasets they don't have access to"""