diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py index 405a679fde..88a87d9015 100644 --- a/superset/charts/commands/export.py +++ b/superset/charts/commands/export.py @@ -57,7 +57,7 @@ class ExportChartsCommand(ExportModelsCommand): # becomes the default export endpoint for key in REMOVE_KEYS: del payload[key] - if "params" in payload: + if payload.get("params"): try: payload["params"] = json.loads(payload["params"]) except json.decoder.JSONDecodeError: diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py index 8f103d73c3..9a1af4721f 100644 --- a/superset/dashboards/commands/export.py +++ b/superset/dashboards/commands/export.py @@ -108,7 +108,7 @@ class ExportDashboardsCommand(ExportModelsCommand): # TODO (betodealmeida): move this logic to export_to_dict once this # becomes the default export endpoint for key, new_name in JSON_KEYS.items(): - if key in payload: + if payload.get(key): value = payload.pop(key) try: payload[new_name] = json.loads(value) diff --git a/superset/dashboards/commands/importers/v1/utils.py b/superset/dashboards/commands/importers/v1/utils.py index de3e4e84a6..5052080957 100644 --- a/superset/dashboards/commands/importers/v1/utils.py +++ b/superset/dashboards/commands/importers/v1/utils.py @@ -109,8 +109,8 @@ def import_dashboard( value = config.pop(key) try: config[new_name] = json.dumps(value) - except json.decoder.JSONDecodeError: - logger.info("Unable to decode `%s` field: %s", key, value) + except TypeError: + logger.info("Unable to encode `%s` field: %s", key, value) dashboard = Dashboard.import_from_dict(session, config, recursive=False) if dashboard.id is None: diff --git a/superset/databases/commands/export.py b/superset/databases/commands/export.py index e8937867d1..f373ce101a 100644 --- a/superset/databases/commands/export.py +++ b/superset/databases/commands/export.py @@ -50,7 +50,7 @@ class ExportDatabasesCommand(ExportModelsCommand): ) # TODO (betodealmeida): move this logic to export_to_dict once this # becomes the default export endpoint - if "extra" in payload: + if payload.get("extra"): try: payload["extra"] = json.loads(payload["extra"]) except json.decoder.JSONDecodeError: diff --git a/superset/datasets/commands/export.py b/superset/datasets/commands/export.py index e86b932295..64946f48f2 100644 --- a/superset/datasets/commands/export.py +++ b/superset/datasets/commands/export.py @@ -31,6 +31,8 @@ from superset.utils.dict_import_export import EXPORT_VERSION logger = logging.getLogger(__name__) +JSON_KEYS = {"params", "template_params", "extra"} + class ExportDatasetsCommand(ExportModelsCommand): @@ -49,6 +51,20 @@ class ExportDatasetsCommand(ExportModelsCommand): include_defaults=True, export_uuids=True, ) + # TODO (betodealmeida): move this logic to export_to_dict once this + # becomes the default export endpoint + for key in JSON_KEYS: + if payload.get(key): + try: + payload[key] = json.loads(payload[key]) + except json.decoder.JSONDecodeError: + logger.info("Unable to decode `%s` field: %s", key, payload[key]) + for metric in payload.get("metrics", []): + if metric.get("extra"): + try: + metric["extra"] = json.loads(metric["extra"]) + except json.decoder.JSONDecodeError: + logger.info("Unable to decode `extra` field: %s", metric["extra"]) payload["version"] = EXPORT_VERSION payload["database_uuid"] = str(model.database.uuid) @@ -67,7 +83,7 @@ class ExportDatasetsCommand(ExportModelsCommand): ) # TODO (betodealmeida): move this logic to export_to_dict once this # becomes the default export endpoint - if "extra" in payload: + if payload.get("extra"): try: payload["extra"] = json.loads(payload["extra"]) except json.decoder.JSONDecodeError: diff --git a/superset/datasets/commands/importers/v1/utils.py b/superset/datasets/commands/importers/v1/utils.py index 99326f3c3d..1857e05245 100644 --- a/superset/datasets/commands/importers/v1/utils.py +++ b/superset/datasets/commands/importers/v1/utils.py @@ -15,12 +15,18 @@ # specific language governing permissions and limitations # under the License. +import json +import logging from typing import Any, Dict from sqlalchemy.orm import Session from superset.connectors.sqla.models import SqlaTable +logger = logging.getLogger(__name__) + +JSON_KEYS = {"params", "template_params", "extra"} + def import_dataset( session: Session, config: Dict[str, Any], overwrite: bool = False @@ -31,6 +37,21 @@ def import_dataset( return existing config["id"] = existing.id + # TODO (betodealmeida): move this logic to import_from_dict + config = config.copy() + for key in JSON_KEYS: + if config.get(key): + try: + config[key] = json.dumps(config[key]) + except TypeError: + logger.info("Unable to encode `%s` field: %s", key, config[key]) + for metric in config.get("metrics", []): + if metric.get("extra"): + try: + metric["extra"] = json.dumps(metric["extra"]) + except TypeError: + logger.info("Unable to encode `extra` field: %s", metric["extra"]) + # should we delete columns and metrics not present in the current import? sync = ["columns", "metrics"] if overwrite else [] diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 2703228739..4cbb2e05f4 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -145,7 +145,7 @@ class ImportV1MetricSchema(Schema): expression = fields.String(required=True) description = fields.String(allow_none=True) d3format = fields.String(allow_none=True) - extra = fields.String(allow_none=True) + extra = fields.Dict(allow_none=True) warning_text = fields.String(allow_none=True) @@ -158,11 +158,11 @@ class ImportV1DatasetSchema(Schema): cache_timeout = fields.Integer(allow_none=True) schema = fields.String(allow_none=True) sql = fields.String(allow_none=True) - params = fields.String(allow_none=True) - template_params = fields.String(allow_none=True) + params = fields.Dict(allow_none=True) + template_params = fields.Dict(allow_none=True) filter_select_enabled = fields.Boolean() fetch_values_predicate = fields.String(allow_none=True) - extra = fields.String(allow_none=True) + extra = fields.Dict(allow_none=True) uuid = fields.UUID(required=True) columns = fields.List(fields.Nested(ImportV1ColumnSchema)) metrics = fields.List(fields.Nested(ImportV1MetricSchema))