2019-01-15 18:53:27 -05:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2019-11-20 10:47:06 -05:00
|
|
|
# isort:skip_file
|
2017-12-05 14:14:52 -05:00
|
|
|
"""Unit tests for Superset"""
|
|
|
|
import json
|
|
|
|
import unittest
|
2020-10-07 12:00:55 -04:00
|
|
|
from uuid import uuid4
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
import yaml
|
|
|
|
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.test_app import app
|
2017-12-05 14:14:52 -05:00
|
|
|
from superset import db
|
2022-05-04 15:48:48 -04:00
|
|
|
|
2017-12-05 14:14:52 -05:00
|
|
|
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
|
2022-01-16 01:32:50 -05:00
|
|
|
from superset.utils.database import get_example_database
|
2019-11-10 11:29:51 -05:00
|
|
|
from superset.utils.dict_import_export import export_to_dict
|
2019-10-18 17:44:27 -04:00
|
|
|
|
2017-12-05 14:14:52 -05:00
|
|
|
from .base_tests import SupersetTestCase
|
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
DBREF = "dict_import__export_test"
|
|
|
|
NAME_PREFIX = "dict_"
|
2017-12-05 14:14:52 -05:00
|
|
|
ID_PREFIX = 20000
|
|
|
|
|
|
|
|
|
2020-06-29 18:36:06 -04:00
|
|
|
class TestDictImportExport(SupersetTestCase):
|
2017-12-05 14:14:52 -05:00
|
|
|
"""Testing export import functionality for dashboards"""
|
2019-06-25 16:34:48 -04:00
|
|
|
|
2017-12-05 14:14:52 -05:00
|
|
|
@classmethod
|
|
|
|
def delete_imports(cls):
|
2019-11-20 10:47:06 -05:00
|
|
|
with app.app_context():
|
|
|
|
# Imported data clean up
|
2020-08-06 18:33:48 -04:00
|
|
|
session = db.session
|
|
|
|
for table in session.query(SqlaTable):
|
2019-11-20 10:47:06 -05:00
|
|
|
if DBREF in table.params_dict:
|
2020-08-06 18:33:48 -04:00
|
|
|
session.delete(table)
|
|
|
|
session.commit()
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
cls.delete_imports()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
cls.delete_imports()
|
|
|
|
|
2020-10-07 12:00:55 -04:00
|
|
|
def create_table(
|
2021-08-02 15:45:55 -04:00
|
|
|
self, name, schema=None, id=0, cols_names=[], cols_uuids=None, metric_names=[]
|
2020-10-07 12:00:55 -04:00
|
|
|
):
|
2019-06-25 16:34:48 -04:00
|
|
|
database_name = "main"
|
|
|
|
name = "{0}{1}".format(NAME_PREFIX, name)
|
|
|
|
params = {DBREF: id, "database_name": database_name}
|
2017-12-05 14:14:52 -05:00
|
|
|
|
2020-10-07 12:00:55 -04:00
|
|
|
if cols_uuids is None:
|
|
|
|
cols_uuids = [None] * len(cols_names)
|
|
|
|
|
2017-12-05 14:14:52 -05:00
|
|
|
dict_rep = {
|
2019-09-08 13:18:09 -04:00
|
|
|
"database_id": get_example_database().id,
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_name": name,
|
|
|
|
"schema": schema,
|
|
|
|
"id": id,
|
|
|
|
"params": json.dumps(params),
|
2020-10-07 12:00:55 -04:00
|
|
|
"columns": [
|
|
|
|
{"column_name": c, "uuid": u} for c, u in zip(cols_names, cols_uuids)
|
|
|
|
],
|
2019-06-25 16:34:48 -04:00
|
|
|
"metrics": [{"metric_name": c, "expression": ""} for c in metric_names],
|
2017-12-05 14:14:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
table = SqlaTable(
|
2019-06-25 16:34:48 -04:00
|
|
|
id=id, schema=schema, table_name=name, params=json.dumps(params)
|
2017-12-05 14:14:52 -05:00
|
|
|
)
|
2020-10-07 12:00:55 -04:00
|
|
|
for col_name, uuid in zip(cols_names, cols_uuids):
|
|
|
|
table.columns.append(TableColumn(column_name=col_name, uuid=uuid))
|
2017-12-05 14:14:52 -05:00
|
|
|
for metric_name in metric_names:
|
2019-06-25 16:34:48 -04:00
|
|
|
table.metrics.append(SqlMetric(metric_name=metric_name, expression=""))
|
2017-12-05 14:14:52 -05:00
|
|
|
return table, dict_rep
|
|
|
|
|
|
|
|
def yaml_compare(self, obj_1, obj_2):
|
|
|
|
obj_1_str = yaml.safe_dump(obj_1, default_flow_style=False)
|
|
|
|
obj_2_str = yaml.safe_dump(obj_2, default_flow_style=False)
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(obj_1_str, obj_2_str)
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
def assert_table_equals(self, expected_ds, actual_ds):
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(expected_ds.table_name, actual_ds.table_name)
|
|
|
|
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
|
|
|
|
self.assertEqual(expected_ds.schema, actual_ds.schema)
|
|
|
|
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
|
|
|
|
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
|
|
|
|
self.assertEqual(
|
2017-12-05 14:14:52 -05:00
|
|
|
set([c.column_name for c in expected_ds.columns]),
|
2019-06-25 16:34:48 -04:00
|
|
|
set([c.column_name for c in actual_ds.columns]),
|
|
|
|
)
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(
|
2017-12-05 14:14:52 -05:00
|
|
|
set([m.metric_name for m in expected_ds.metrics]),
|
2019-06-25 16:34:48 -04:00
|
|
|
set([m.metric_name for m in actual_ds.metrics]),
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
def assert_datasource_equals(self, expected_ds, actual_ds):
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(expected_ds.datasource_name, actual_ds.datasource_name)
|
|
|
|
self.assertEqual(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
|
|
|
|
self.assertEqual(len(expected_ds.metrics), len(actual_ds.metrics))
|
|
|
|
self.assertEqual(len(expected_ds.columns), len(actual_ds.columns))
|
|
|
|
self.assertEqual(
|
2017-12-05 14:14:52 -05:00
|
|
|
set([c.column_name for c in expected_ds.columns]),
|
2019-06-25 16:34:48 -04:00
|
|
|
set([c.column_name for c in actual_ds.columns]),
|
|
|
|
)
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(
|
2017-12-05 14:14:52 -05:00
|
|
|
set([m.metric_name for m in expected_ds.metrics]),
|
2019-06-25 16:34:48 -04:00
|
|
|
set([m.metric_name for m in actual_ds.metrics]),
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
def test_import_table_no_metadata(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
table, dict_table = self.create_table("pure_table", id=ID_PREFIX + 1)
|
2020-08-06 18:33:48 -04:00
|
|
|
new_table = SqlaTable.import_from_dict(db.session, dict_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
|
|
|
imported_id = new_table.id
|
2020-04-23 07:30:48 -04:00
|
|
|
imported = self.get_table_by_id(imported_id)
|
2017-12-05 14:14:52 -05:00
|
|
|
self.assert_table_equals(table, imported)
|
|
|
|
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
|
|
|
|
|
|
|
|
def test_import_table_1_col_1_met(self):
|
|
|
|
table, dict_table = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_1_col_1_met",
|
|
|
|
id=ID_PREFIX + 2,
|
|
|
|
cols_names=["col1"],
|
2020-10-07 12:00:55 -04:00
|
|
|
cols_uuids=[uuid4()],
|
2019-06-25 16:34:48 -04:00
|
|
|
metric_names=["metric1"],
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
2020-04-23 07:30:48 -04:00
|
|
|
imported = self.get_table_by_id(imported_table.id)
|
2017-12-05 14:14:52 -05:00
|
|
|
self.assert_table_equals(table, imported)
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(
|
2019-06-25 16:34:48 -04:00
|
|
|
{DBREF: ID_PREFIX + 2, "database_name": "main"}, json.loads(imported.params)
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
|
|
|
|
|
|
|
|
def test_import_table_2_col_2_met(self):
|
|
|
|
table, dict_table = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_2_col_2_met",
|
|
|
|
id=ID_PREFIX + 3,
|
|
|
|
cols_names=["c1", "c2"],
|
2020-10-07 12:00:55 -04:00
|
|
|
cols_uuids=[uuid4(), uuid4()],
|
2019-06-25 16:34:48 -04:00
|
|
|
metric_names=["m1", "m2"],
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
2020-04-23 07:30:48 -04:00
|
|
|
imported = self.get_table_by_id(imported_table.id)
|
2017-12-05 14:14:52 -05:00
|
|
|
self.assert_table_equals(table, imported)
|
|
|
|
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
|
|
|
|
|
|
|
|
def test_import_table_override_append(self):
|
|
|
|
table, dict_table = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_override", id=ID_PREFIX + 3, cols_names=["col1"], metric_names=["m1"]
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
|
|
|
table_over, dict_table_over = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_override",
|
|
|
|
id=ID_PREFIX + 3,
|
|
|
|
cols_names=["new_col1", "col2", "col3"],
|
|
|
|
metric_names=["new_metric1"],
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_over_table = SqlaTable.import_from_dict(db.session, dict_table_over)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
|
|
|
|
2020-04-23 07:30:48 -04:00
|
|
|
imported_over = self.get_table_by_id(imported_over_table.id)
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(imported_table.id, imported_over.id)
|
2017-12-05 14:14:52 -05:00
|
|
|
expected_table, _ = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_override",
|
|
|
|
id=ID_PREFIX + 3,
|
|
|
|
metric_names=["new_metric1", "m1"],
|
|
|
|
cols_names=["col1", "new_col1", "col2", "col3"],
|
2020-10-07 12:00:55 -04:00
|
|
|
cols_uuids=[col.uuid for col in imported_over.columns],
|
2019-06-25 16:34:48 -04:00
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
self.assert_table_equals(expected_table, imported_over)
|
2019-06-25 16:34:48 -04:00
|
|
|
self.yaml_compare(
|
|
|
|
expected_table.export_to_dict(), imported_over.export_to_dict()
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
def test_import_table_override_sync(self):
|
|
|
|
table, dict_table = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_override", id=ID_PREFIX + 3, cols_names=["col1"], metric_names=["m1"]
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
|
|
|
table_over, dict_table_over = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_override",
|
|
|
|
id=ID_PREFIX + 3,
|
|
|
|
cols_names=["new_col1", "col2", "col3"],
|
|
|
|
metric_names=["new_metric1"],
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
imported_over_table = SqlaTable.import_from_dict(
|
2020-08-06 18:33:48 -04:00
|
|
|
session=db.session, dict_rep=dict_table_over, sync=["metrics", "columns"]
|
2019-06-25 16:34:48 -04:00
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
|
|
|
|
2020-04-23 07:30:48 -04:00
|
|
|
imported_over = self.get_table_by_id(imported_over_table.id)
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(imported_table.id, imported_over.id)
|
2017-12-05 14:14:52 -05:00
|
|
|
expected_table, _ = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"table_override",
|
|
|
|
id=ID_PREFIX + 3,
|
|
|
|
metric_names=["new_metric1"],
|
|
|
|
cols_names=["new_col1", "col2", "col3"],
|
2020-10-07 12:00:55 -04:00
|
|
|
cols_uuids=[col.uuid for col in imported_over.columns],
|
2019-06-25 16:34:48 -04:00
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
self.assert_table_equals(expected_table, imported_over)
|
|
|
|
self.yaml_compare(
|
2019-06-25 16:34:48 -04:00
|
|
|
expected_table.export_to_dict(), imported_over.export_to_dict()
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
|
|
|
|
def test_import_table_override_identical(self):
|
|
|
|
table, dict_table = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"copy_cat",
|
|
|
|
id=ID_PREFIX + 4,
|
|
|
|
cols_names=["new_col1", "col2", "col3"],
|
|
|
|
metric_names=["new_metric1"],
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
|
|
|
copy_table, dict_copy_table = self.create_table(
|
2019-06-25 16:34:48 -04:00
|
|
|
"copy_cat",
|
|
|
|
id=ID_PREFIX + 4,
|
|
|
|
cols_names=["new_col1", "col2", "col3"],
|
|
|
|
metric_names=["new_metric1"],
|
|
|
|
)
|
2020-08-06 18:33:48 -04:00
|
|
|
imported_copy_table = SqlaTable.import_from_dict(db.session, dict_copy_table)
|
2017-12-05 14:14:52 -05:00
|
|
|
db.session.commit()
|
2019-10-21 10:49:12 -04:00
|
|
|
self.assertEqual(imported_table.id, imported_copy_table.id)
|
2020-04-23 07:30:48 -04:00
|
|
|
self.assert_table_equals(copy_table, self.get_table_by_id(imported_table.id))
|
2019-06-25 16:34:48 -04:00
|
|
|
self.yaml_compare(
|
|
|
|
imported_copy_table.export_to_dict(), imported_table.export_to_dict()
|
|
|
|
)
|
2017-12-05 14:14:52 -05:00
|
|
|
|
2019-11-10 11:29:51 -05:00
|
|
|
def test_export_datasource_ui_cli(self):
|
2020-06-17 16:46:45 -04:00
|
|
|
# TODO(bkyryliuk): find fake db is leaking from
|
|
|
|
self.delete_fake_db()
|
|
|
|
|
2019-11-10 11:29:51 -05:00
|
|
|
cli_export = export_to_dict(
|
2020-08-06 18:33:48 -04:00
|
|
|
session=db.session,
|
|
|
|
recursive=True,
|
|
|
|
back_references=False,
|
|
|
|
include_defaults=False,
|
2019-11-10 11:29:51 -05:00
|
|
|
)
|
|
|
|
self.get_resp("/login/", data=dict(username="admin", password="general"))
|
|
|
|
resp = self.get_resp(
|
|
|
|
"/databaseview/action_post", {"action": "yaml_export", "rowid": 1}
|
|
|
|
)
|
|
|
|
ui_export = yaml.safe_load(resp)
|
|
|
|
self.assertEqual(
|
|
|
|
ui_export["databases"][0]["database_name"],
|
|
|
|
cli_export["databases"][0]["database_name"],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
ui_export["databases"][0]["tables"], cli_export["databases"][0]["tables"]
|
|
|
|
)
|
|
|
|
|
2017-12-05 14:14:52 -05:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
if __name__ == "__main__":
|
2017-12-05 14:14:52 -05:00
|
|
|
unittest.main()
|