feat: import configuration from directory (#15686)

* feat: command to import configuration from a directory

This allows us to keep Superset updated from a repo:

```bash
$ superset import-directory /path/to/configs/
```

For example, I created a simple dashboard with a single chart:

PLACEHOLDER

I then exported it to a file `dashboard_export_20210714T104600.zip` and
unzipped it. After deleting the dashboard, chart, dataset, and database
I imported everything back with:

```bash
$ superset import-directory ~/Downloads/dashboard_export_20210714T104600/
```

I then changed the chart title in `~/Downloads/dashboard_export_20210714T104600/charts/Cnt_per_country_1.yaml` and ran the command again. The chart was succesfully updated:

PLACEHOLDER

* Small fixes
This commit is contained in:
Beto Dealmeida 2021-08-11 18:42:50 -07:00 committed by GitHub
parent 5a8484185b
commit 7de54d016e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 86 additions and 9 deletions

View File

@ -168,7 +168,7 @@ def load_examples_run(
examples.load_big_data()
# load examples that are stored as YAML config files
examples.load_from_configs(force, load_test_data)
examples.load_examples_from_configs(force, load_test_data)
@with_appcontext
@ -187,10 +187,28 @@ def load_examples(
only_metadata: bool = False,
force: bool = False,
) -> None:
"""Loads a set of Slices and Dashboards and a supporting dataset """
"""Loads a set of Slices and Dashboards and a supporting dataset"""
load_examples_run(load_test_data, load_big_data, only_metadata, force)
@with_appcontext
@superset.command()
@click.argument("directory")
@click.option(
"--overwrite", "-o", is_flag=True, help="Overwriting existing metadata definitions"
)
@click.option(
"--force", "-f", is_flag=True, help="Force load data even if table already exists"
)
def import_directory(directory: str, overwrite: bool, force: bool) -> None:
"""Imports configs from a given directory"""
from superset.examples.utils import load_configs_from_directory
load_configs_from_directory(
root=Path(directory), overwrite=overwrite, force_data=force,
)
@with_appcontext
@superset.command()
@click.option("--database_name", "-d", help="Database name to change")

View File

@ -14,8 +14,9 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=protected-access
from typing import Any, Dict, List, Tuple
from typing import Any, Dict, List, Set, Tuple
from marshmallow import Schema
from sqlalchemy.orm import Session
@ -23,19 +24,23 @@ from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.sql import select
from superset import db
from superset.charts.commands.importers.v1 import ImportChartsCommand
from superset.charts.commands.importers.v1.utils import import_chart
from superset.charts.schemas import ImportV1ChartSchema
from superset.commands.exceptions import CommandException
from superset.commands.importers.v1 import ImportModelsCommand
from superset.dao.base import BaseDAO
from superset.dashboards.commands.importers.v1 import ImportDashboardsCommand
from superset.dashboards.commands.importers.v1.utils import (
find_chart_uuids,
import_dashboard,
update_id_refs,
)
from superset.dashboards.schemas import ImportV1DashboardSchema
from superset.databases.commands.importers.v1 import ImportDatabasesCommand
from superset.databases.commands.importers.v1.utils import import_database
from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.commands.importers.v1 import ImportDatasetsCommand
from superset.datasets.commands.importers.v1.utils import import_dataset
from superset.datasets.schemas import ImportV1DatasetSchema
from superset.models.core import Database
@ -71,6 +76,15 @@ class ImportExamplesCommand(ImportModelsCommand):
db.session.rollback()
raise self.import_error()
@classmethod
def _get_uuids(cls) -> Set[str]:
return (
ImportDatabasesCommand._get_uuids()
| ImportDatasetsCommand._get_uuids()
| ImportChartsCommand._get_uuids()
| ImportDashboardsCommand._get_uuids()
)
# pylint: disable=too-many-locals, arguments-differ, too-many-branches
@staticmethod
def _import(

View File

@ -119,7 +119,7 @@ def update_id_refs(
child["meta"]["chartId"] = chart_ids[child["meta"]["uuid"]]
# fix native filter references
native_filter_configuration = fixed["metadata"].get(
native_filter_configuration = fixed.get("metadata", {}).get(
"native_filter_configuration", []
)
for native_filter in native_filter_configuration:

View File

@ -119,10 +119,12 @@ def import_dataset(
example_database = get_example_database()
try:
table_exists = example_database.has_table_by_name(dataset.table_name)
except Exception as ex:
except Exception: # pylint: disable=broad-except
# MySQL doesn't play nice with GSheets table names
logger.warning("Couldn't check if table %s exists, stopping import")
raise ex
logger.warning(
"Couldn't check if table %s exists, assuming it does", dataset.table_name
)
table_exists = True
if data_uri and (not table_exists or force_data):
load_data(data_uri, dataset, example_database, session)

View File

@ -30,5 +30,5 @@ from .paris import load_paris_iris_geojson
from .random_time_series import load_random_time_series_data
from .sf_population_polygons import load_sf_population_polygons
from .tabbed_dashboard import load_tabbed_dashboard
from .utils import load_from_configs
from .utils import load_examples_from_configs
from .world_bank import load_world_bank_health_n_pop

View File

@ -14,18 +14,29 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import re
from pathlib import Path
from typing import Any, Dict
import yaml
from pkg_resources import resource_isdir, resource_listdir, resource_stream
from superset.commands.exceptions import CommandInvalidError
from superset.commands.importers.v1.examples import ImportExamplesCommand
from superset.commands.importers.v1.utils import METADATA_FILE_NAME
_logger = logging.getLogger(__name__)
YAML_EXTENSIONS = {".yaml", ".yml"}
def load_from_configs(force_data: bool = False, load_test_data: bool = False) -> None:
def load_examples_from_configs(
force_data: bool = False, load_test_data: bool = False
) -> None:
"""
Load all the examples inside superset/examples/configs/.
"""
contents = load_contents(load_test_data)
command = ImportExamplesCommand(contents, overwrite=True, force_data=force_data)
command.run()
@ -55,3 +66,35 @@ def load_contents(load_test_data: bool = False) -> Dict[str, Any]:
)
return {str(path.relative_to(root)): content for path, content in contents.items()}
def load_configs_from_directory(
root: Path, overwrite: bool = True, force_data: bool = False
) -> None:
"""
Load all the examples from a given directory.
"""
contents: Dict[str, str] = {}
queue = [root]
while queue:
path_name = queue.pop()
if path_name.is_dir():
queue.extend(path_name.glob("*"))
elif path_name.suffix.lower() in YAML_EXTENSIONS:
with open(path_name) as fp:
contents[str(path_name.relative_to(root))] = fp.read()
# removing "type" from the metadata allows us to import any exported model
# from the unzipped directory directly
metadata = yaml.load(contents.get(METADATA_FILE_NAME, "{}"))
if "type" in metadata:
del metadata["type"]
contents[METADATA_FILE_NAME] = yaml.dump(metadata)
command = ImportExamplesCommand(
contents, overwrite=overwrite, force_data=force_data
)
try:
command.run()
except CommandInvalidError as ex:
_logger.error("An error occurred: %s", ex.normalized_messages())