mirror of
https://github.com/apache/superset.git
synced 2024-09-19 20:19:37 -04:00
f0c0ef7048
* Test that failing export or import is done properly For each CLI entry-point we will modify, we make sure that: - a failing process exits with a non-0 exit code, - an error is logged. Signed-off-by: Étienne Boisseau-Sierra <etienne.boisseau-sierra@unipart.io> * Exit process with error if export/import failed Bubble exception up when failing import or export During a CLI import or export of dashboards, if the process fails, the exception it caught and a simple message is sent to the logger. This makes that from a shell point of view, the script was successfull — cf. #16956. To prevent this, we want to ensure that the process exits with an error (i.e., a non-0 exit-code) should the export or import fail mid-flight. Signed-off-by: Étienne Boisseau-Sierra <etienne.boisseau-sierra@unipart.io>
361 lines
11 KiB
Python
361 lines
11 KiB
Python
# Licensed to the Apache Software Foundation (ASF) under one
|
|
# or more contributor license agreements. See the NOTICE file
|
|
# distributed with this work for additional information
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
# to you under the Apache License, Version 2.0 (the
|
|
# "License"); you may not use this file except in compliance
|
|
# with the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing,
|
|
# software distributed under the License is distributed on an
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
# KIND, either express or implied. See the License for the
|
|
# specific language governing permissions and limitations
|
|
# under the License.
|
|
|
|
import importlib
|
|
import json
|
|
import logging
|
|
from pathlib import Path
|
|
from unittest import mock
|
|
from zipfile import is_zipfile, ZipFile
|
|
|
|
import pytest
|
|
import yaml
|
|
from freezegun import freeze_time
|
|
|
|
import superset.cli
|
|
from superset import app
|
|
from tests.integration_tests.fixtures.birth_names_dashboard import (
|
|
load_birth_names_dashboard_with_slices,
|
|
)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def assert_cli_fails_properly(response, caplog):
|
|
"""
|
|
Ensure that a CLI command fails according to a predefined behaviour.
|
|
"""
|
|
# don't exit successfully
|
|
assert response.exit_code != 0
|
|
|
|
# end the logs with a record on an error
|
|
assert caplog.records[-1].levelname == "ERROR"
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
def test_export_dashboards_original(app_context, fs):
|
|
"""
|
|
Test that a JSON file is exported.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.export_dashboards, ("-f", "dashboards.json"))
|
|
|
|
assert response.exit_code == 0
|
|
assert Path("dashboards.json").exists()
|
|
|
|
# check that file is valid JSON
|
|
with open("dashboards.json") as fp:
|
|
contents = fp.read()
|
|
json.loads(contents)
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
def test_export_datasources_original(app_context, fs):
|
|
"""
|
|
Test that a YAML file is exported.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(
|
|
superset.cli.export_datasources, ("-f", "datasources.yaml")
|
|
)
|
|
|
|
assert response.exit_code == 0
|
|
assert Path("datasources.yaml").exists()
|
|
|
|
# check that file is valid JSON
|
|
with open("datasources.yaml") as fp:
|
|
contents = fp.read()
|
|
yaml.safe_load(contents)
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
def test_export_dashboards_versioned_export(app_context, fs):
|
|
"""
|
|
Test that a ZIP file is exported.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
runner = app.test_cli_runner()
|
|
with freeze_time("2021-01-01T00:00:00Z"):
|
|
response = runner.invoke(superset.cli.export_dashboards, ())
|
|
|
|
assert response.exit_code == 0
|
|
assert Path("dashboard_export_20210101T000000.zip").exists()
|
|
|
|
assert is_zipfile("dashboard_export_20210101T000000.zip")
|
|
|
|
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
@mock.patch(
|
|
"superset.dashboards.commands.export.ExportDashboardsCommand.run",
|
|
side_effect=Exception(),
|
|
)
|
|
def test_failing_export_dashboards_versioned_export(
|
|
export_dashboards_command, app_context, fs, caplog
|
|
):
|
|
"""
|
|
Test that failing to export ZIP file is done elegantly.
|
|
"""
|
|
caplog.set_level(logging.DEBUG)
|
|
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
runner = app.test_cli_runner()
|
|
with freeze_time("2021-01-01T00:00:00Z"):
|
|
response = runner.invoke(superset.cli.export_dashboards, ())
|
|
|
|
assert_cli_fails_properly(response, caplog)
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
def test_export_datasources_versioned_export(app_context, fs):
|
|
"""
|
|
Test that a ZIP file is exported.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
runner = app.test_cli_runner()
|
|
with freeze_time("2021-01-01T00:00:00Z"):
|
|
response = runner.invoke(superset.cli.export_datasources, ())
|
|
|
|
assert response.exit_code == 0
|
|
assert Path("dataset_export_20210101T000000.zip").exists()
|
|
|
|
assert is_zipfile("dataset_export_20210101T000000.zip")
|
|
|
|
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
@mock.patch(
|
|
"superset.dashboards.commands.export.ExportDatasetsCommand.run",
|
|
side_effect=Exception(),
|
|
)
|
|
def test_failing_export_datasources_versioned_export(
|
|
export_dashboards_command, app_context, fs, caplog
|
|
):
|
|
"""
|
|
Test that failing to export ZIP file is done elegantly.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
runner = app.test_cli_runner()
|
|
with freeze_time("2021-01-01T00:00:00Z"):
|
|
response = runner.invoke(superset.cli.export_datasources, ())
|
|
|
|
assert_cli_fails_properly(response, caplog)
|
|
|
|
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
@mock.patch("superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand")
|
|
def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs):
|
|
"""
|
|
Test that both ZIP and JSON can be imported.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
# write JSON file
|
|
with open("dashboards.json", "w") as fp:
|
|
fp.write('{"hello": "world"}')
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.json"))
|
|
|
|
assert response.exit_code == 0
|
|
expected_contents = {"dashboards.json": '{"hello": "world"}'}
|
|
import_dashboards_command.assert_called_with(expected_contents, overwrite=True)
|
|
|
|
# write ZIP file
|
|
with ZipFile("dashboards.zip", "w") as bundle:
|
|
with bundle.open("dashboards/dashboard.yaml", "w") as fp:
|
|
fp.write(b"hello: world")
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.zip"))
|
|
|
|
assert response.exit_code == 0
|
|
expected_contents = {"dashboard.yaml": "hello: world"}
|
|
import_dashboards_command.assert_called_with(expected_contents, overwrite=True)
|
|
|
|
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
@mock.patch(
|
|
"superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand.run",
|
|
side_effect=Exception(),
|
|
)
|
|
def test_failing_import_dashboards_versioned_export(
|
|
import_dashboards_command, app_context, fs, caplog
|
|
):
|
|
"""
|
|
Test that failing to import either ZIP and JSON is done elegantly.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_dashboards correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
# write JSON file
|
|
with open("dashboards.json", "w") as fp:
|
|
fp.write('{"hello": "world"}')
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.json"))
|
|
|
|
assert_cli_fails_properly(response, caplog)
|
|
|
|
# write ZIP file
|
|
with ZipFile("dashboards.zip", "w") as bundle:
|
|
with bundle.open("dashboards/dashboard.yaml", "w") as fp:
|
|
fp.write(b"hello: world")
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.zip"))
|
|
|
|
assert_cli_fails_properly(response, caplog)
|
|
|
|
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
@mock.patch("superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand")
|
|
def test_import_datasets_versioned_export(import_datasets_command, app_context, fs):
|
|
"""
|
|
Test that both ZIP and YAML can be imported.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_datasets correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
# write YAML file
|
|
with open("datasets.yaml", "w") as fp:
|
|
fp.write("hello: world")
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.yaml"))
|
|
|
|
assert response.exit_code == 0
|
|
expected_contents = {"datasets.yaml": "hello: world"}
|
|
import_datasets_command.assert_called_with(expected_contents, overwrite=True)
|
|
|
|
# write ZIP file
|
|
with ZipFile("datasets.zip", "w") as bundle:
|
|
with bundle.open("datasets/dataset.yaml", "w") as fp:
|
|
fp.write(b"hello: world")
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.zip"))
|
|
|
|
assert response.exit_code == 0
|
|
expected_contents = {"dataset.yaml": "hello: world"}
|
|
import_datasets_command.assert_called_with(expected_contents, overwrite=True)
|
|
|
|
|
|
@mock.patch.dict(
|
|
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
|
|
)
|
|
@mock.patch(
|
|
"superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand.run",
|
|
side_effect=Exception(),
|
|
)
|
|
def test_failing_import_datasets_versioned_export(
|
|
import_datasets_command, app_context, fs, caplog
|
|
):
|
|
"""
|
|
Test that failing to import either ZIP or YAML is done elegantly.
|
|
"""
|
|
# pylint: disable=reimported, redefined-outer-name
|
|
import superset.cli # noqa: F811
|
|
|
|
# reload to define export_datasets correctly based on the
|
|
# feature flags
|
|
importlib.reload(superset.cli)
|
|
|
|
# write YAML file
|
|
with open("datasets.yaml", "w") as fp:
|
|
fp.write("hello: world")
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.yaml"))
|
|
|
|
assert_cli_fails_properly(response, caplog)
|
|
|
|
# write ZIP file
|
|
with ZipFile("datasets.zip", "w") as bundle:
|
|
with bundle.open("datasets/dataset.yaml", "w") as fp:
|
|
fp.write(b"hello: world")
|
|
|
|
runner = app.test_cli_runner()
|
|
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.zip"))
|
|
|
|
assert_cli_fails_properly(response, caplog)
|