perf(export): export generates unnecessary files content (#26765)

This commit is contained in:
Stepan 2024-02-22 02:28:38 +03:00 committed by GitHub
parent 744f68d637
commit 2e4f6d3f38
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 279 additions and 176 deletions

View File

@ -793,7 +793,7 @@ class ChartRestApi(BaseSupersetModelRestApi):
try:
for file_name, file_content in ExportChartsCommand(requested_ids).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except ChartNotFoundError:
return self.response_404()
buf.seek(0)

View File

@ -83,7 +83,7 @@ def export_dashboards(dashboard_file: Optional[str] = None) -> None:
with ZipFile(dashboard_file, "w") as bundle:
for file_name, file_content in ExportDashboardsCommand(dashboard_ids).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when exporting the dashboards, please check "
@ -116,7 +116,7 @@ def export_datasources(datasource_file: Optional[str] = None) -> None:
with ZipFile(datasource_file, "w") as bundle:
for file_name, file_content in ExportDatasetsCommand(dataset_ids).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except Exception: # pylint: disable=broad-except
logger.exception(
"There was an error when exporting the datasets, please check "

View File

@ -19,6 +19,7 @@
import json
import logging
from collections.abc import Iterator
from typing import Callable
import yaml
@ -42,10 +43,12 @@ class ExportChartsCommand(ExportModelsCommand):
not_found = ChartNotFoundError
@staticmethod
def _export(model: Slice, export_related: bool = True) -> Iterator[tuple[str, str]]:
def _file_name(model: Slice) -> str:
file_name = get_filename(model.slice_name, model.id)
file_path = f"charts/{file_name}.yaml"
return f"charts/{file_name}.yaml"
@staticmethod
def _file_content(model: Slice) -> str:
payload = model.export_to_dict(
recursive=False,
include_parent_ref=False,
@ -69,7 +72,15 @@ class ExportChartsCommand(ExportModelsCommand):
payload["dataset_uuid"] = str(model.table.uuid)
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_path, file_content
return file_content
@staticmethod
def _export(
model: Slice, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportChartsCommand._file_name(
model
), lambda: ExportChartsCommand._file_content(model)
if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()

View File

@ -20,7 +20,7 @@ import json
import logging
import random
import string
from typing import Any, Optional
from typing import Any, Optional, Callable
from collections.abc import Iterator
import yaml
@ -106,13 +106,61 @@ class ExportDashboardsCommand(ExportModelsCommand):
dao = DashboardDAO
not_found = DashboardNotFoundError
# pylint: disable=too-many-locals
@staticmethod
def _file_name(model: Dashboard) -> str:
file_name = get_filename(model.dashboard_title, model.id)
return f"dashboards/{file_name}.yaml"
@staticmethod
def _file_content(model: Dashboard) -> str:
payload = model.export_to_dict(
recursive=False,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
# TODO (betodealmeida): move this logic to export_to_dict once this
# becomes the default export endpoint
for key, new_name in JSON_KEYS.items():
value: Optional[str] = payload.pop(key, None)
if value:
try:
payload[new_name] = json.loads(value)
except (TypeError, json.decoder.JSONDecodeError):
logger.info("Unable to decode `%s` field: %s", key, value)
payload[new_name] = {}
# the mapping between dashboard -> charts is inferred from the position
# attribute, so if it's not present we need to add a default config
if not payload.get("position"):
payload["position"] = get_default_position(model.dashboard_title)
# if any charts or not referenced in position, we need to add them
# in a new row
referenced_charts = find_chart_uuids(payload["position"])
orphan_charts = {
chart for chart in model.slices if str(chart.uuid) not in referenced_charts
}
if orphan_charts:
payload["position"] = append_charts(payload["position"], orphan_charts)
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
return file_content
@staticmethod
def _export(
model: Dashboard, export_related: bool = True
) -> Iterator[tuple[str, str]]:
file_name = get_filename(model.dashboard_title, model.id)
file_path = f"dashboards/{file_name}.yaml"
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportDashboardsCommand._file_name(
model
), lambda: ExportDashboardsCommand._file_content(model)
if export_related:
chart_ids = [chart.id for chart in model.slices]
yield from ExportChartsCommand(chart_ids).run()
payload = model.export_to_dict(
recursive=False,
@ -144,27 +192,3 @@ class ExportDashboardsCommand(ExportModelsCommand):
target["datasetUuid"] = str(dataset.uuid)
if export_related:
yield from ExportDatasetsCommand([dataset_id]).run()
# the mapping between dashboard -> charts is inferred from the position
# attribute, so if it's not present we need to add a default config
if not payload.get("position"):
payload["position"] = get_default_position(model.dashboard_title)
# if any charts or not referenced in position, we need to add them
# in a new row
referenced_charts = find_chart_uuids(payload["position"])
orphan_charts = {
chart for chart in model.slices if str(chart.uuid) not in referenced_charts
}
if orphan_charts:
payload["position"] = append_charts(payload["position"], orphan_charts)
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_path, file_content
if export_related:
chart_ids = [chart.id for chart in model.slices]
yield from ExportChartsCommand(chart_ids).run()

View File

@ -15,10 +15,10 @@
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
import functools
import json
import logging
from typing import Any
from typing import Any, Callable
from collections.abc import Iterator
import yaml
@ -56,12 +56,12 @@ class ExportDatabasesCommand(ExportModelsCommand):
not_found = DatabaseNotFoundError
@staticmethod
def _export(
model: Database, export_related: bool = True
) -> Iterator[tuple[str, str]]:
def _file_name(model: Database) -> str:
db_file_name = get_filename(model.database_name, model.id, skip_id=True)
file_path = f"databases/{db_file_name}.yaml"
return f"databases/{db_file_name}.yaml"
@staticmethod
def _file_content(model: Database) -> str:
payload = model.export_to_dict(
recursive=False,
include_parent_ref=False,
@ -100,9 +100,18 @@ class ExportDatabasesCommand(ExportModelsCommand):
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_path, file_content
return file_content
@staticmethod
def _export(
model: Database, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportDatabasesCommand._file_name(
model
), lambda: ExportDatabasesCommand._file_content(model)
if export_related:
db_file_name = get_filename(model.database_name, model.id, skip_id=True)
for dataset in model.tables:
ds_file_name = get_filename(
dataset.table_name, dataset.id, skip_id=True
@ -118,5 +127,6 @@ class ExportDatabasesCommand(ExportModelsCommand):
payload["version"] = EXPORT_VERSION
payload["database_uuid"] = str(model.uuid)
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_path, file_content
yield file_path, functools.partial( # type: ignore
yaml.safe_dump, payload, sort_keys=False
)

View File

@ -19,6 +19,7 @@
import json
import logging
from collections.abc import Iterator
from typing import Callable
import yaml
@ -41,15 +42,15 @@ class ExportDatasetsCommand(ExportModelsCommand):
not_found = DatasetNotFoundError
@staticmethod
def _export(
model: SqlaTable, export_related: bool = True
) -> Iterator[tuple[str, str]]:
def _file_name(model: SqlaTable) -> str:
db_file_name = get_filename(
model.database.database_name, model.database.id, skip_id=True
)
ds_file_name = get_filename(model.table_name, model.id, skip_id=True)
file_path = f"datasets/{db_file_name}/{ds_file_name}.yaml"
return f"datasets/{db_file_name}/{ds_file_name}.yaml"
@staticmethod
def _file_content(model: SqlaTable) -> str:
payload = model.export_to_dict(
recursive=True,
include_parent_ref=False,
@ -78,10 +79,21 @@ class ExportDatasetsCommand(ExportModelsCommand):
payload["database_uuid"] = str(model.database.uuid)
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_path, file_content
return file_content
@staticmethod
def _export(
model: SqlaTable, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportDatasetsCommand._file_name(
model
), lambda: ExportDatasetsCommand._file_content(model)
# include database as well
if export_related:
db_file_name = get_filename(
model.database.database_name, model.database.id, skip_id=True
)
file_path = f"databases/{db_file_name}.yaml"
payload = model.database.export_to_dict(
@ -109,5 +121,4 @@ class ExportDatasetsCommand(ExportModelsCommand):
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_path, file_content
yield file_path, lambda: yaml.safe_dump(payload, sort_keys=False)

View File

@ -1,63 +1,64 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections.abc import Iterator
from datetime import datetime, timezone
import yaml
from superset.commands.base import BaseCommand
from superset.commands.chart.export import ExportChartsCommand
from superset.commands.dashboard.export import ExportDashboardsCommand
from superset.commands.database.export import ExportDatabasesCommand
from superset.commands.dataset.export import ExportDatasetsCommand
from superset.commands.query.export import ExportSavedQueriesCommand
from superset.utils.dict_import_export import EXPORT_VERSION
METADATA_FILE_NAME = "metadata.yaml"
class ExportAssetsCommand(BaseCommand):
"""
Command that exports all databases, datasets, charts, dashboards and saved queries.
"""
def run(self) -> Iterator[tuple[str, str]]:
metadata = {
"version": EXPORT_VERSION,
"type": "assets",
"timestamp": datetime.now(tz=timezone.utc).isoformat(),
}
yield METADATA_FILE_NAME, yaml.safe_dump(metadata, sort_keys=False)
seen = {METADATA_FILE_NAME}
commands = [
ExportDatabasesCommand,
ExportDatasetsCommand,
ExportChartsCommand,
ExportDashboardsCommand,
ExportSavedQueriesCommand,
]
for command in commands:
ids = [model.id for model in command.dao.find_all()]
for file_name, file_content in command(ids, export_related=False).run():
if file_name not in seen:
yield file_name, file_content
seen.add(file_name)
def validate(self) -> None:
pass
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from collections.abc import Iterator
from datetime import datetime, timezone
from typing import Callable
import yaml
from superset.commands.base import BaseCommand
from superset.commands.chart.export import ExportChartsCommand
from superset.commands.dashboard.export import ExportDashboardsCommand
from superset.commands.database.export import ExportDatabasesCommand
from superset.commands.dataset.export import ExportDatasetsCommand
from superset.commands.query.export import ExportSavedQueriesCommand
from superset.utils.dict_import_export import EXPORT_VERSION
METADATA_FILE_NAME = "metadata.yaml"
class ExportAssetsCommand(BaseCommand):
"""
Command that exports all databases, datasets, charts, dashboards and saved queries.
"""
def run(self) -> Iterator[tuple[str, Callable[[], str]]]:
metadata = {
"version": EXPORT_VERSION,
"type": "assets",
"timestamp": datetime.now(tz=timezone.utc).isoformat(),
}
yield METADATA_FILE_NAME, lambda: yaml.safe_dump(metadata, sort_keys=False)
seen = {METADATA_FILE_NAME}
commands = [
ExportDatabasesCommand,
ExportDatasetsCommand,
ExportChartsCommand,
ExportDashboardsCommand,
ExportSavedQueriesCommand,
]
for command in commands:
ids = [model.id for model in command.dao.find_all()]
for file_name, file_content in command(ids, export_related=False).run():
if file_name not in seen:
yield file_name, file_content
seen.add(file_name)
def validate(self) -> None:
pass

View File

@ -17,6 +17,7 @@
from collections.abc import Iterator
from datetime import datetime, timezone
from typing import Callable
import yaml
from flask_appbuilder import Model
@ -41,10 +42,20 @@ class ExportModelsCommand(BaseCommand):
self._models: list[Model] = []
@staticmethod
def _export(model: Model, export_related: bool = True) -> Iterator[tuple[str, str]]:
def _file_name(model: Model) -> str:
raise NotImplementedError("Subclasses MUST implement _file_name")
@staticmethod
def _file_content(model: Model) -> str:
raise NotImplementedError("Subclasses MUST implement _export")
def run(self) -> Iterator[tuple[str, str]]:
@staticmethod
def _export(
model: Model, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
raise NotImplementedError("Subclasses MUST implement _export")
def run(self) -> Iterator[tuple[str, Callable[[], str]]]:
self.validate()
metadata = {
@ -52,7 +63,7 @@ class ExportModelsCommand(BaseCommand):
"type": self.dao.model_cls.__name__, # type: ignore
"timestamp": datetime.now(tz=timezone.utc).isoformat(),
}
yield METADATA_FILE_NAME, yaml.safe_dump(metadata, sort_keys=False)
yield METADATA_FILE_NAME, lambda: yaml.safe_dump(metadata, sort_keys=False)
seen = {METADATA_FILE_NAME}
for model in self._models:

View File

@ -19,6 +19,7 @@
import json
import logging
from collections.abc import Iterator
from typing import Callable
import yaml
from werkzeug.utils import secure_filename
@ -37,10 +38,8 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
not_found = SavedQueryNotFoundError
@staticmethod
def _export(
model: SavedQuery, export_related: bool = True
) -> Iterator[tuple[str, str]]:
# build filename based on database, optional schema, and label.
def _file_name(model: SavedQuery) -> str:
# build filename based on database, optional schema, and label
# we call secure_filename() multiple times and join the directories afterwards,
# as secure_filename() replaces "/" with "_".
database_slug = secure_filename(model.database.database_name)
@ -50,7 +49,10 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
else:
schema_slug = secure_filename(model.schema)
file_name = f"queries/{database_slug}/{schema_slug}/{query_slug}.yaml"
return file_name
@staticmethod
def _file_content(model: SavedQuery) -> str:
payload = model.export_to_dict(
recursive=False,
include_parent_ref=False,
@ -61,10 +63,19 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
payload["database_uuid"] = str(model.database.uuid)
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
return file_content
# include database as well
if export_related:
@staticmethod
def _export(
model: SavedQuery, export_related: bool = True
) -> Iterator[tuple[str, Callable[[], str]]]:
yield ExportSavedQueriesCommand._file_name(
model
), lambda: ExportSavedQueriesCommand._file_content(model)
if export_related: # TODO: Maybe we can use database export command here?
# include database as well
database_slug = secure_filename(model.database.database_name)
file_name = f"databases/{database_slug}.yaml"
payload = model.database.export_to_dict(
@ -84,4 +95,4 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
payload["version"] = EXPORT_VERSION
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
yield file_name, lambda: file_content

View File

@ -756,7 +756,7 @@ class DashboardRestApi(BaseSupersetModelRestApi):
requested_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except DashboardNotFoundError:
return self.response_404()
buf.seek(0)

View File

@ -1096,7 +1096,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
requested_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except DatabaseNotFoundError:
return self.response_404()
buf.seek(0)

View File

@ -529,7 +529,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
requested_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except DatasetNotFoundError:
return self.response_404()
buf.seek(0)

View File

@ -80,7 +80,7 @@ class ImportExportRestApi(BaseSupersetApi):
with ZipFile(buf, "w") as bundle:
for file_name, file_content in ExportAssetsCommand().run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
buf.seek(0)
response = send_file(

View File

@ -276,7 +276,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
requested_ids
).run():
with bundle.open(f"{root}/{file_name}", "w") as fp:
fp.write(file_content.encode())
fp.write(file_content().encode())
except SavedQueryNotFoundError:
return self.response_404()
buf.seek(0)

View File

@ -75,7 +75,7 @@ class TestExportChartsCommand(SupersetTestCase):
assert expected == list(contents.keys())
metadata = yaml.safe_load(
contents[f"charts/Energy_Sankey_{example_chart.id}.yaml"]
contents[f"charts/Energy_Sankey_{example_chart.id}.yaml"]()
)
assert metadata == {
@ -133,7 +133,7 @@ class TestExportChartsCommand(SupersetTestCase):
contents = dict(command.run())
metadata = yaml.safe_load(
contents[f"charts/Energy_Sankey_{example_chart.id}.yaml"]
contents[f"charts/Energy_Sankey_{example_chart.id}.yaml"]()
)
assert list(metadata.keys()) == [
"slice_name",

View File

@ -78,7 +78,7 @@ class TestExportDashboardsCommand(SupersetTestCase):
assert expected_paths == set(contents.keys())
metadata = yaml.safe_load(
contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"]
contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"]()
)
# remove chart UUIDs from metadata so we can compare
@ -269,7 +269,7 @@ class TestExportDashboardsCommand(SupersetTestCase):
contents = dict(command.run())
metadata = yaml.safe_load(
contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"]
contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"]()
)
assert list(metadata.keys()) == [
"dashboard_title",

View File

@ -158,7 +158,7 @@ class TestExportDatabasesCommand(SupersetTestCase):
big_int_type = "BIGINT(20)"
else:
big_int_type = "BIGINT"
metadata = yaml.safe_load(contents["databases/examples.yaml"])
metadata = yaml.safe_load(contents["databases/examples.yaml"]())
assert metadata == (
{
"allow_csv_upload": True,
@ -176,7 +176,7 @@ class TestExportDatabasesCommand(SupersetTestCase):
}
)
metadata = yaml.safe_load(contents["datasets/examples/birth_names.yaml"])
metadata = yaml.safe_load(contents["datasets/examples/birth_names.yaml"]())
metadata.pop("uuid")
metadata["columns"].sort(key=lambda x: x["column_name"])
@ -359,7 +359,7 @@ class TestExportDatabasesCommand(SupersetTestCase):
command = ExportDatabasesCommand([example_db.id])
contents = dict(command.run())
metadata = yaml.safe_load(contents["databases/examples.yaml"])
metadata = yaml.safe_load(contents["databases/examples.yaml"]())
assert list(metadata.keys()) == [
"database_name",
"sqlalchemy_uri",

View File

@ -82,7 +82,7 @@ class TestExportDatasetsCommand(SupersetTestCase):
"databases/examples.yaml",
]
metadata = yaml.safe_load(contents["datasets/examples/energy_usage.yaml"])
metadata = yaml.safe_load(contents["datasets/examples/energy_usage.yaml"]())
# sort columns for deterministic comparison
metadata["columns"] = sorted(metadata["columns"], key=itemgetter("column_name"))
@ -216,7 +216,7 @@ class TestExportDatasetsCommand(SupersetTestCase):
command = ExportDatasetsCommand([example_dataset.id])
contents = dict(command.run())
metadata = yaml.safe_load(contents["datasets/examples/energy_usage.yaml"])
metadata = yaml.safe_load(contents["datasets/examples/energy_usage.yaml"]())
assert list(metadata.keys()) == [
"table_name",
"main_dttm_col",

View File

@ -38,7 +38,7 @@ class TestExportModelsCommand(SupersetTestCase):
command = ExportDatabasesCommand([example_db.id])
contents = dict(command.run())
metadata = yaml.safe_load(contents["metadata.yaml"])
metadata = yaml.safe_load(contents["metadata.yaml"]())
assert metadata == (
{
"version": "1.0.0",

View File

@ -70,7 +70,9 @@ class TestExportSavedQueriesCommand(SupersetTestCase):
]
assert expected == list(contents.keys())
metadata = yaml.safe_load(contents["queries/examples/schema1/The_answer.yaml"])
metadata = yaml.safe_load(
contents["queries/examples/schema1/The_answer.yaml"]()
)
assert metadata == {
"schema": "schema1",
"label": "The answer",
@ -127,7 +129,9 @@ class TestExportSavedQueriesCommand(SupersetTestCase):
command = ExportSavedQueriesCommand([self.example_query.id])
contents = dict(command.run())
metadata = yaml.safe_load(contents["queries/examples/schema1/The_answer.yaml"])
metadata = yaml.safe_load(
contents["queries/examples/schema1/The_answer.yaml"]()
)
assert list(metadata.keys()) == [
"schema",
"label",

View File

@ -32,9 +32,9 @@ def test_export_assets_command(mocker: MockFixture) -> None:
ExportDatabasesCommand.return_value.run.return_value = [
(
"metadata.yaml",
"version: 1.0.0\ntype: Database\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
lambda: "version: 1.0.0\ntype: Database\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
),
("databases/example.yaml", "<DATABASE CONTENTS>"),
("databases/example.yaml", lambda: "<DATABASE CONTENTS>"),
]
ExportDatasetsCommand = mocker.patch(
"superset.commands.export.assets.ExportDatasetsCommand"
@ -42,9 +42,9 @@ def test_export_assets_command(mocker: MockFixture) -> None:
ExportDatasetsCommand.return_value.run.return_value = [
(
"metadata.yaml",
"version: 1.0.0\ntype: Dataset\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
lambda: "version: 1.0.0\ntype: Dataset\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
),
("datasets/example/dataset.yaml", "<DATASET CONTENTS>"),
("datasets/example/dataset.yaml", lambda: "<DATASET CONTENTS>"),
]
ExportChartsCommand = mocker.patch(
"superset.commands.export.assets.ExportChartsCommand"
@ -52,9 +52,9 @@ def test_export_assets_command(mocker: MockFixture) -> None:
ExportChartsCommand.return_value.run.return_value = [
(
"metadata.yaml",
"version: 1.0.0\ntype: Slice\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
lambda: "version: 1.0.0\ntype: Slice\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
),
("charts/pie.yaml", "<CHART CONTENTS>"),
("charts/pie.yaml", lambda: "<CHART CONTENTS>"),
]
ExportDashboardsCommand = mocker.patch(
"superset.commands.export.assets.ExportDashboardsCommand"
@ -62,9 +62,9 @@ def test_export_assets_command(mocker: MockFixture) -> None:
ExportDashboardsCommand.return_value.run.return_value = [
(
"metadata.yaml",
"version: 1.0.0\ntype: Dashboard\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
lambda: "version: 1.0.0\ntype: Dashboard\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
),
("dashboards/sales.yaml", "<DASHBOARD CONTENTS>"),
("dashboards/sales.yaml", lambda: "<DASHBOARD CONTENTS>"),
]
ExportSavedQueriesCommand = mocker.patch(
"superset.commands.export.assets.ExportSavedQueriesCommand"
@ -72,14 +72,14 @@ def test_export_assets_command(mocker: MockFixture) -> None:
ExportSavedQueriesCommand.return_value.run.return_value = [
(
"metadata.yaml",
"version: 1.0.0\ntype: SavedQuery\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
lambda: "version: 1.0.0\ntype: SavedQuery\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
),
("queries/example/metric.yaml", "<SAVED QUERY CONTENTS>"),
("queries/example/metric.yaml", lambda: "<SAVED QUERY CONTENTS>"),
]
with freeze_time("2022-01-01T00:00:00Z"):
command = ExportAssetsCommand()
output = list(command.run())
output = [(file[0], file[1]()) for file in list(command.run())]
assert output == [
(

View File

@ -88,9 +88,22 @@ def test_export(session: Session) -> None:
extra=json.dumps({"warning_markdown": "*WARNING*"}),
)
export = list(
ExportDatasetsCommand._export(sqla_table) # pylint: disable=protected-access
export = [
(file[0], file[1]())
for file in list(
ExportDatasetsCommand._export(
sqla_table
) # pylint: disable=protected-access
)
]
payload = sqla_table.export_to_dict(
recursive=True,
include_parent_ref=False,
include_defaults=True,
export_uuids=True,
)
assert export == [
(
"datasets/my_database/my_table.yaml",
@ -114,7 +127,7 @@ extra:
warning_markdown: '*WARNING*'
normalize_columns: false
always_filter_main_dttm: false
uuid: null
uuid: {payload['uuid']}
metrics:
- metric_name: cnt
verbose_name: null
@ -129,12 +142,12 @@ metrics:
columns:
- column_name: profit
verbose_name: null
is_dttm: null
is_active: null
is_dttm: false
is_active: true
type: INTEGER
advanced_data_type: null
groupby: null
filterable: null
groupby: true
filterable: true
expression: revenue-expenses
description: null
python_date_format: null
@ -143,47 +156,47 @@ columns:
- column_name: ds
verbose_name: null
is_dttm: 1
is_active: null
is_active: true
type: TIMESTAMP
advanced_data_type: null
groupby: null
filterable: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra: null
- column_name: user_id
verbose_name: null
is_dttm: null
is_active: null
is_dttm: false
is_active: true
type: INTEGER
advanced_data_type: null
groupby: null
filterable: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra: null
- column_name: expenses
verbose_name: null
is_dttm: null
is_active: null
is_dttm: false
is_active: true
type: INTEGER
advanced_data_type: null
groupby: null
filterable: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null
extra: null
- column_name: revenue
verbose_name: null
is_dttm: null
is_active: null
is_dttm: false
is_active: true
type: INTEGER
advanced_data_type: null
groupby: null
filterable: null
groupby: true
filterable: true
expression: null
description: null
python_date_format: null

View File

@ -45,9 +45,16 @@ def test_export_assets(
),
("databases/example.yaml", "<DATABASE CONTENTS>"),
]
mocked_export_result = [
(
"metadata.yaml",
lambda: "version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
),
("databases/example.yaml", lambda: "<DATABASE CONTENTS>"),
]
ExportAssetsCommand = mocker.patch("superset.importexport.api.ExportAssetsCommand")
ExportAssetsCommand().run.return_value = mocked_contents[:]
ExportAssetsCommand().run.return_value = mocked_export_result[:]
response = client.get("/api/v1/assets/export/")
assert response.status_code == 200