2020-03-08 05:13:08 -04:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
"""Unit tests for Superset"""
|
|
|
|
import json
|
2020-12-10 17:50:10 -05:00
|
|
|
import unittest
|
2020-10-22 13:32:08 -04:00
|
|
|
from io import BytesIO
|
2020-10-29 16:11:33 -04:00
|
|
|
from typing import List, Optional
|
2023-04-18 20:51:24 -04:00
|
|
|
from unittest.mock import ANY, patch
|
2020-11-17 17:49:33 -05:00
|
|
|
from zipfile import is_zipfile, ZipFile
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
import prison
|
2020-10-12 08:40:05 -04:00
|
|
|
import pytest
|
2020-03-27 05:30:23 -04:00
|
|
|
import yaml
|
2022-11-14 12:55:53 -05:00
|
|
|
from sqlalchemy.orm import joinedload
|
2020-03-24 13:24:08 -04:00
|
|
|
from sqlalchemy.sql import func
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
|
2020-03-20 12:32:03 -04:00
|
|
|
from superset.dao.exceptions import (
|
|
|
|
DAOCreateFailedError,
|
|
|
|
DAODeleteFailedError,
|
|
|
|
DAOUpdateFailedError,
|
|
|
|
)
|
2023-02-15 05:38:51 -05:00
|
|
|
from superset.datasets.commands.exceptions import DatasetCreateFailedError
|
2022-06-13 20:30:13 -04:00
|
|
|
from superset.datasets.models import Dataset
|
2020-04-08 03:44:35 -04:00
|
|
|
from superset.extensions import db, security_manager
|
2020-03-08 05:13:08 -04:00
|
|
|
from superset.models.core import Database
|
2022-01-16 01:32:50 -05:00
|
|
|
from superset.utils.core import backend, get_example_default_schema
|
|
|
|
from superset.utils.database import get_example_database, get_main_database
|
2020-03-27 05:30:23 -04:00
|
|
|
from superset.utils.dict_import_export import export_to_dict
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.base_tests import SupersetTestCase
|
|
|
|
from tests.integration_tests.conftest import CTAS_SCHEMA_NAME
|
|
|
|
from tests.integration_tests.fixtures.birth_names_dashboard import (
|
|
|
|
load_birth_names_dashboard_with_slices,
|
2021-12-16 19:11:47 -05:00
|
|
|
load_birth_names_data,
|
2021-07-01 11:03:07 -04:00
|
|
|
)
|
|
|
|
from tests.integration_tests.fixtures.energy_dashboard import (
|
2021-12-16 19:11:47 -05:00
|
|
|
load_energy_table_data,
|
2021-07-01 11:03:07 -04:00
|
|
|
load_energy_table_with_slice,
|
|
|
|
)
|
|
|
|
from tests.integration_tests.fixtures.importexport import (
|
2020-11-17 17:49:33 -05:00
|
|
|
database_config,
|
|
|
|
database_metadata_config,
|
|
|
|
dataset_config,
|
|
|
|
dataset_metadata_config,
|
2021-03-04 20:18:27 -05:00
|
|
|
dataset_ui_export,
|
2020-11-17 17:49:33 -05:00
|
|
|
)
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
|
2020-06-29 18:36:06 -04:00
|
|
|
class TestDatasetApi(SupersetTestCase):
|
2020-10-12 08:40:05 -04:00
|
|
|
fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu")
|
2020-10-29 16:11:33 -04:00
|
|
|
fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2")
|
2020-10-12 08:40:05 -04:00
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
@staticmethod
|
|
|
|
def insert_dataset(
|
2020-10-29 16:11:33 -04:00
|
|
|
table_name: str,
|
|
|
|
owners: List[int],
|
|
|
|
database: Database,
|
|
|
|
sql: Optional[str] = None,
|
2021-08-02 15:45:55 -04:00
|
|
|
schema: Optional[str] = None,
|
2020-03-08 05:13:08 -04:00
|
|
|
) -> SqlaTable:
|
|
|
|
obj_owners = list()
|
|
|
|
for owner in owners:
|
|
|
|
user = db.session.query(security_manager.user_model).get(owner)
|
|
|
|
obj_owners.append(user)
|
|
|
|
table = SqlaTable(
|
2020-10-29 16:11:33 -04:00
|
|
|
table_name=table_name,
|
|
|
|
schema=schema,
|
|
|
|
owners=obj_owners,
|
|
|
|
database=database,
|
|
|
|
sql=sql,
|
2020-03-08 05:13:08 -04:00
|
|
|
)
|
|
|
|
db.session.add(table)
|
|
|
|
db.session.commit()
|
2020-03-24 13:24:08 -04:00
|
|
|
table.fetch_metadata()
|
2020-03-08 05:13:08 -04:00
|
|
|
return table
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def insert_default_dataset(self):
|
|
|
|
return self.insert_dataset(
|
2021-08-02 15:45:55 -04:00
|
|
|
"ab_permission", [self.get_user("admin").id], get_main_database()
|
2020-03-24 13:24:08 -04:00
|
|
|
)
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
def get_fixture_datasets(self) -> List[SqlaTable]:
|
|
|
|
return (
|
|
|
|
db.session.query(SqlaTable)
|
2022-11-14 12:55:53 -05:00
|
|
|
.options(joinedload(SqlaTable.database))
|
2020-10-12 08:40:05 -04:00
|
|
|
.filter(SqlaTable.table_name.in_(self.fixture_tables_names))
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
2022-08-26 18:07:56 -04:00
|
|
|
def get_fixture_virtual_datasets(self) -> List[SqlaTable]:
|
|
|
|
return (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter(SqlaTable.table_name.in_(self.fixture_virtual_table_names))
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
2020-10-29 16:11:33 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_virtual_datasets(self):
|
|
|
|
with self.create_app().app_context():
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
yield
|
|
|
|
return
|
|
|
|
|
2020-10-29 16:11:33 -04:00
|
|
|
datasets = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
main_db = get_main_database()
|
|
|
|
for table_name in self.fixture_virtual_table_names:
|
|
|
|
datasets.append(
|
|
|
|
self.insert_dataset(
|
2022-03-29 13:03:09 -04:00
|
|
|
table_name,
|
|
|
|
[admin.id],
|
|
|
|
main_db,
|
|
|
|
"SELECT * from ab_view_menu;",
|
2020-10-29 16:11:33 -04:00
|
|
|
)
|
|
|
|
)
|
|
|
|
yield datasets
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for dataset in datasets:
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_datasets(self):
|
|
|
|
with self.create_app().app_context():
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
yield
|
|
|
|
return
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
datasets = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
main_db = get_main_database()
|
|
|
|
for tables_name in self.fixture_tables_names:
|
2021-08-02 15:45:55 -04:00
|
|
|
datasets.append(self.insert_dataset(tables_name, [admin.id], main_db))
|
2022-05-23 17:58:15 -04:00
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
yield datasets
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for dataset in datasets:
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-08-10 16:20:19 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_energy_usage_dataset():
|
|
|
|
example_db = get_example_database()
|
|
|
|
return (
|
|
|
|
db.session.query(SqlaTable)
|
2021-11-04 14:09:08 -04:00
|
|
|
.filter_by(
|
|
|
|
database=example_db,
|
|
|
|
table_name="energy_usage",
|
|
|
|
schema=get_example_default_schema(),
|
|
|
|
)
|
2020-08-10 16:20:19 -04:00
|
|
|
.one()
|
|
|
|
)
|
|
|
|
|
2021-12-01 14:47:22 -05:00
|
|
|
def create_dataset_import(self) -> BytesIO:
|
2020-12-10 17:50:10 -05:00
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
|
|
|
with bundle.open("dataset_export/metadata.yaml", "w") as fp:
|
|
|
|
fp.write(yaml.safe_dump(dataset_metadata_config).encode())
|
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/datasets/imported_dataset.yaml", "w"
|
|
|
|
) as fp:
|
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
return buf
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_get_dataset_list(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset list
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
example_db = get_example_database()
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "database", "opr": "rel_o_m", "value": f"{example_db.id}"},
|
2020-11-17 17:49:33 -05:00
|
|
|
{"col": "table_name", "opr": "eq", "value": "birth_names"},
|
2020-03-08 05:13:08 -04:00
|
|
|
]
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["count"] == 1
|
2020-03-08 05:13:08 -04:00
|
|
|
expected_columns = [
|
|
|
|
"changed_by",
|
2020-03-13 15:35:00 -04:00
|
|
|
"changed_by_name",
|
|
|
|
"changed_by_url",
|
2020-07-22 06:34:47 -04:00
|
|
|
"changed_on_delta_humanized",
|
|
|
|
"changed_on_utc",
|
2020-07-23 14:19:05 -04:00
|
|
|
"database",
|
2021-08-13 14:08:12 -04:00
|
|
|
"datasource_type",
|
2020-05-06 06:48:32 -04:00
|
|
|
"default_endpoint",
|
2021-08-13 14:08:12 -04:00
|
|
|
"description",
|
2020-03-13 15:35:00 -04:00
|
|
|
"explore_url",
|
2020-10-30 14:28:01 -04:00
|
|
|
"extra",
|
2020-03-13 15:35:00 -04:00
|
|
|
"id",
|
2020-06-10 15:04:40 -04:00
|
|
|
"kind",
|
|
|
|
"owners",
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema",
|
2020-06-22 03:21:06 -04:00
|
|
|
"sql",
|
2020-03-08 05:13:08 -04:00
|
|
|
"table_name",
|
|
|
|
]
|
2020-10-22 03:56:26 -04:00
|
|
|
assert sorted(list(response["result"][0].keys())) == expected_columns
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_get_dataset_list_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset list gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["result"] == []
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2023-02-24 03:45:16 -05:00
|
|
|
def test_get_dataset_list_gamma_has_database_access(self):
|
2022-05-25 04:58:15 -04:00
|
|
|
"""
|
2023-02-24 03:45:16 -05:00
|
|
|
Dataset API: Test get dataset list with database access
|
2022-05-25 04:58:15 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2023-02-24 03:45:16 -05:00
|
|
|
self.login(username="gamma")
|
|
|
|
|
|
|
|
# create new dataset
|
2022-05-25 04:58:15 -04:00
|
|
|
main_db = get_main_database()
|
2023-02-24 03:45:16 -05:00
|
|
|
dataset = self.insert_dataset("ab_user", [], main_db)
|
|
|
|
|
|
|
|
# make sure dataset is not visible due to missing perms
|
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
assert rv.status_code == 200
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert response["count"] == 0
|
|
|
|
|
|
|
|
# give database access to main db
|
|
|
|
main_db_pvm = security_manager.find_permission_view_menu(
|
|
|
|
"database_access", main_db.perm
|
2022-05-25 04:58:15 -04:00
|
|
|
)
|
2023-02-24 03:45:16 -05:00
|
|
|
gamma_role = security_manager.find_role("Gamma")
|
|
|
|
gamma_role.permissions.append(main_db_pvm)
|
|
|
|
db.session.commit()
|
2022-05-25 04:58:15 -04:00
|
|
|
|
2023-02-24 03:45:16 -05:00
|
|
|
# make sure dataset is now visible
|
2022-05-25 04:58:15 -04:00
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
assert rv.status_code == 200
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
2023-02-24 03:45:16 -05:00
|
|
|
tables = {tbl["table_name"] for tbl in response["result"]}
|
|
|
|
assert tables == {"ab_user"}
|
2022-05-25 04:58:15 -04:00
|
|
|
|
2023-02-24 03:45:16 -05:00
|
|
|
# revert gamma permission
|
|
|
|
gamma_role.permissions.remove(main_db_pvm)
|
|
|
|
db.session.delete(dataset)
|
2022-05-25 04:58:15 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_get_dataset_related_database_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset related databases gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2022-08-31 13:11:03 -04:00
|
|
|
# Add main database access to gamma role
|
|
|
|
main_db = get_main_database()
|
|
|
|
main_db_pvm = security_manager.find_permission_view_menu(
|
|
|
|
"database_access", main_db.perm
|
|
|
|
)
|
|
|
|
gamma_role = security_manager.find_role("Gamma")
|
|
|
|
gamma_role.permissions.append(main_db_pvm)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
uri = "api/v1/dataset/related/database"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2022-02-09 09:05:25 -05:00
|
|
|
|
|
|
|
assert response["count"] == 1
|
|
|
|
main_db = get_main_database()
|
|
|
|
assert filter(lambda x: x.text == main_db, response["result"]) != []
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2022-08-31 13:11:03 -04:00
|
|
|
# revert gamma permission
|
|
|
|
gamma_role.permissions.remove(main_db_pvm)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_get_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-08-10 16:20:19 -04:00
|
|
|
table = self.get_energy_usage_dataset()
|
2021-09-22 06:43:46 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{table.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"cache_timeout": None,
|
2021-09-22 06:43:46 -04:00
|
|
|
"database": {
|
|
|
|
"backend": main_db.backend,
|
|
|
|
"database_name": "examples",
|
|
|
|
"id": 1,
|
|
|
|
},
|
2020-03-08 05:13:08 -04:00
|
|
|
"default_endpoint": None,
|
2020-08-10 16:20:19 -04:00
|
|
|
"description": "Energy consumption",
|
2020-10-30 14:28:01 -04:00
|
|
|
"extra": None,
|
2020-03-08 05:13:08 -04:00
|
|
|
"fetch_values_predicate": None,
|
2023-04-13 09:32:34 -04:00
|
|
|
"filter_select_enabled": True,
|
2020-03-08 05:13:08 -04:00
|
|
|
"is_sqllab_view": False,
|
2022-06-10 16:03:48 -04:00
|
|
|
"kind": "physical",
|
2020-08-10 16:20:19 -04:00
|
|
|
"main_dttm_col": None,
|
2020-03-08 05:13:08 -04:00
|
|
|
"offset": 0,
|
|
|
|
"owners": [],
|
2021-11-04 14:09:08 -04:00
|
|
|
"schema": get_example_default_schema(),
|
2020-03-08 05:13:08 -04:00
|
|
|
"sql": None,
|
2020-08-10 16:20:19 -04:00
|
|
|
"table_name": "energy_usage",
|
2020-03-08 05:13:08 -04:00
|
|
|
"template_params": None,
|
2023-04-18 20:51:24 -04:00
|
|
|
"uid": "2__table",
|
|
|
|
"datasource_name": "energy_usage",
|
|
|
|
"name": f"{get_example_default_schema()}.energy_usage",
|
|
|
|
"column_formats": {},
|
|
|
|
"granularity_sqla": [],
|
|
|
|
"time_grain_sqla": ANY,
|
|
|
|
"order_by_choices": [
|
|
|
|
['["source", true]', "source [asc]"],
|
|
|
|
['["source", false]', "source [desc]"],
|
|
|
|
['["target", true]', "target [asc]"],
|
|
|
|
['["target", false]', "target [desc]"],
|
|
|
|
['["value", true]', "value [asc]"],
|
|
|
|
['["value", false]', "value [desc]"],
|
|
|
|
],
|
|
|
|
"verbose_map": {
|
|
|
|
"__timestamp": "Time",
|
|
|
|
"count": "COUNT(*)",
|
|
|
|
"source": "source",
|
|
|
|
"sum__value": "sum__value",
|
|
|
|
"target": "target",
|
|
|
|
"value": "value",
|
|
|
|
},
|
2020-03-08 05:13:08 -04:00
|
|
|
}
|
2021-09-22 06:43:46 -04:00
|
|
|
if response["result"]["database"]["backend"] not in ("presto", "hive"):
|
|
|
|
assert {
|
|
|
|
k: v for k, v in response["result"].items() if k in expected_result
|
|
|
|
} == expected_result
|
2020-10-22 03:56:26 -04:00
|
|
|
assert len(response["result"]["columns"]) == 3
|
|
|
|
assert len(response["result"]["metrics"]) == 2
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-08-17 10:46:59 -04:00
|
|
|
def test_get_dataset_distinct_schema(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get dataset distinct schema
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
def pg_test_query_parameter(query_parameter, expected_response):
|
|
|
|
uri = f"api/v1/dataset/distinct/schema?q={prison.dumps(query_parameter)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == expected_response
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
example_db = get_example_database()
|
|
|
|
datasets = []
|
|
|
|
if example_db.backend == "postgresql":
|
|
|
|
datasets.append(
|
2021-08-02 15:45:55 -04:00
|
|
|
self.insert_dataset(
|
|
|
|
"ab_permission", [], get_main_database(), schema="public"
|
|
|
|
)
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
datasets.append(
|
|
|
|
self.insert_dataset(
|
2022-03-29 13:03:09 -04:00
|
|
|
"columns",
|
|
|
|
[],
|
|
|
|
get_main_database(),
|
|
|
|
schema="information_schema",
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
)
|
2023-01-17 12:33:23 -05:00
|
|
|
all_datasets = db.session.query(SqlaTable).all()
|
|
|
|
schema_values = sorted(
|
|
|
|
set(
|
|
|
|
[
|
|
|
|
dataset.schema
|
|
|
|
for dataset in all_datasets
|
|
|
|
if dataset.schema is not None
|
|
|
|
]
|
|
|
|
)
|
|
|
|
)
|
2020-08-17 10:46:59 -04:00
|
|
|
expected_response = {
|
2023-01-17 12:33:23 -05:00
|
|
|
"count": len(schema_values),
|
2020-09-02 14:07:37 -04:00
|
|
|
"result": [{"text": val, "value": val} for val in schema_values],
|
2020-08-17 10:46:59 -04:00
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/distinct/schema"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == expected_response
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
# Test filter
|
|
|
|
query_parameter = {"filter": "inf"}
|
|
|
|
pg_test_query_parameter(
|
|
|
|
query_parameter,
|
2020-09-02 14:07:37 -04:00
|
|
|
{
|
|
|
|
"count": 1,
|
|
|
|
"result": [
|
|
|
|
{"text": "information_schema", "value": "information_schema"}
|
|
|
|
],
|
|
|
|
},
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
query_parameter = {"page": 0, "page_size": 1}
|
|
|
|
pg_test_query_parameter(
|
2020-09-02 14:07:37 -04:00
|
|
|
query_parameter,
|
|
|
|
{
|
2023-01-17 12:33:23 -05:00
|
|
|
"count": len(schema_values),
|
|
|
|
"result": [expected_response["result"][0]],
|
2020-09-02 14:07:37 -04:00
|
|
|
},
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
for dataset in datasets:
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_get_dataset_distinct_not_allowed(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get dataset distinct not allowed
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-08-17 10:46:59 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/distinct/table_name"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
def test_get_dataset_distinct_gamma(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get dataset distinct with gamma
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-08-17 10:46:59 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = "api/v1/dataset/distinct/schema"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-08-17 10:46:59 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["count"] == 0
|
|
|
|
assert response["result"] == []
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_get_dataset_info(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset info
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/_info"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "info")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-12-16 06:49:03 -05:00
|
|
|
def test_info_security_dataset(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test info security
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-12-16 06:49:03 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
params = {"keys": ["permissions"]}
|
|
|
|
uri = f"api/v1/dataset/_info?q={prison.dumps(params)}"
|
|
|
|
rv = self.get_assert_metric(uri, "info")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
2022-08-26 18:07:56 -04:00
|
|
|
assert set(data["permissions"]) == {
|
|
|
|
"can_read",
|
|
|
|
"can_write",
|
|
|
|
"can_export",
|
|
|
|
"can_duplicate",
|
2023-02-15 05:38:51 -05:00
|
|
|
"can_get_or_create_dataset",
|
2022-08-26 18:07:56 -04:00
|
|
|
}
|
2020-12-16 06:49:03 -05:00
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_create_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 201
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-03-24 13:24:08 -04:00
|
|
|
table_id = data.get("id")
|
|
|
|
model = db.session.query(SqlaTable).get(table_id)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert model.table_name == table_data["table_name"]
|
|
|
|
assert model.database_id == table_data["database"]
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
# Assert that columns were created
|
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=table_id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name == "name"
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
# Assert that metrics were created
|
|
|
|
columns = (
|
|
|
|
db.session.query(SqlMetric)
|
|
|
|
.filter_by(table_id=table_id)
|
|
|
|
.order_by("metric_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].expression == "COUNT(*)"
|
2020-03-24 13:24:08 -04:00
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_dataset_item_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset item gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.client.post(uri, json=table_data)
|
2021-12-08 15:14:30 -05:00
|
|
|
assert rv.status_code == 403
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_create_dataset_item_owner(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create item owner
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="alpha")
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
alpha = self.get_user("alpha")
|
|
|
|
|
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
"owners": [admin.id],
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 201
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(SqlaTable).get(data.get("id"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert admin in model.owners
|
|
|
|
assert alpha in model.owners
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_dataset_item_owners_invalid(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset item owner invalid
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
admin = self.get_user("admin")
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
"owners": [admin.id, 1000],
|
|
|
|
}
|
2020-11-17 17:49:33 -05:00
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {"message": {"owners": ["Owners are invalid"]}}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-12-16 06:49:03 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_create_dataset_validate_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset validate table uniqueness
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-11-04 14:09:08 -04:00
|
|
|
schema = get_example_default_schema()
|
2020-12-16 06:49:03 -05:00
|
|
|
energy_usage_ds = self.get_energy_usage_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
2020-12-16 06:49:03 -05:00
|
|
|
"database": energy_usage_ds.database_id,
|
|
|
|
"table_name": energy_usage_ds.table_name,
|
2020-03-08 05:13:08 -04:00
|
|
|
}
|
2021-11-04 14:09:08 -04:00
|
|
|
if schema:
|
|
|
|
table_data["schema"] = schema
|
2021-01-25 18:09:03 -05:00
|
|
|
rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == {
|
2021-01-25 18:09:03 -05:00
|
|
|
"message": {"table_name": ["Dataset energy_usage already exists"]}
|
2020-10-22 03:56:26 -04:00
|
|
|
}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2022-10-19 10:05:21 -04:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
|
|
|
def test_create_dataset_with_sql_validate_uniqueness(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test create dataset with sql
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
schema = get_example_default_schema()
|
|
|
|
energy_usage_ds = self.get_energy_usage_dataset()
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": energy_usage_ds.database_id,
|
|
|
|
"table_name": energy_usage_ds.table_name,
|
|
|
|
"sql": "select * from energy_usage",
|
|
|
|
}
|
|
|
|
if schema:
|
|
|
|
table_data["schema"] = schema
|
|
|
|
rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post")
|
|
|
|
assert rv.status_code == 422
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert data == {
|
|
|
|
"message": {"table_name": ["Dataset energy_usage already exists"]}
|
|
|
|
}
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
|
|
|
def test_create_dataset_with_sql(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test create dataset with sql
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
schema = get_example_default_schema()
|
|
|
|
energy_usage_ds = self.get_energy_usage_dataset()
|
|
|
|
self.login(username="alpha")
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
alpha = self.get_user("alpha")
|
|
|
|
table_data = {
|
|
|
|
"database": energy_usage_ds.database_id,
|
|
|
|
"table_name": "energy_usage_virtual",
|
|
|
|
"sql": "select * from energy_usage",
|
|
|
|
"owners": [admin.id],
|
|
|
|
}
|
|
|
|
if schema:
|
|
|
|
table_data["schema"] = schema
|
|
|
|
rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post")
|
|
|
|
assert rv.status_code == 201
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(SqlaTable).get(data.get("id"))
|
|
|
|
assert admin in model.owners
|
|
|
|
assert alpha in model.owners
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
2022-02-24 14:02:01 -05:00
|
|
|
@unittest.skip("test is failing stochastically")
|
2020-09-29 18:01:01 -04:00
|
|
|
def test_create_dataset_same_name_different_schema(self):
|
|
|
|
if backend() == "sqlite":
|
|
|
|
# sqlite doesn't support schemas
|
|
|
|
return
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
2022-10-25 14:12:48 -04:00
|
|
|
with example_db.get_sqla_engine_with_context() as engine:
|
|
|
|
engine.execute(
|
|
|
|
f"CREATE TABLE {CTAS_SCHEMA_NAME}.birth_names AS SELECT 2 as two"
|
|
|
|
)
|
2020-09-29 18:01:01 -04:00
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": example_db.id,
|
|
|
|
"schema": CTAS_SCHEMA_NAME,
|
|
|
|
"table_name": "birth_names",
|
|
|
|
}
|
|
|
|
|
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 201
|
2020-09-29 18:01:01 -04:00
|
|
|
|
|
|
|
# cleanup
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
uri = f'api/v1/dataset/{data.get("id")}'
|
|
|
|
rv = self.client.delete(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2022-10-25 14:12:48 -04:00
|
|
|
with example_db.get_sqla_engine_with_context() as engine:
|
|
|
|
engine.execute(f"DROP TABLE {CTAS_SCHEMA_NAME}.birth_names")
|
2020-09-29 18:01:01 -04:00
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_create_dataset_validate_database(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset validate database exists
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset_data = {"database": 1000, "schema": "", "table_name": "birth_names"}
|
2020-03-08 05:13:08 -04:00
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, dataset_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == {"message": {"database": ["Database does not exist"]}}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_create_dataset_validate_tables_exists(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset validate table exists
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
example_db = get_example_database()
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": example_db.id,
|
|
|
|
"schema": "",
|
|
|
|
"table_name": "does_not_exist",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2021-09-14 10:29:29 -04:00
|
|
|
@patch("superset.models.core.Database.get_columns")
|
|
|
|
@patch("superset.models.core.Database.has_table_by_name")
|
2022-09-02 14:50:04 -04:00
|
|
|
@patch("superset.models.core.Database.has_view_by_name")
|
2021-09-14 10:29:29 -04:00
|
|
|
@patch("superset.models.core.Database.get_table")
|
|
|
|
def test_create_dataset_validate_view_exists(
|
2022-09-02 14:50:04 -04:00
|
|
|
self,
|
|
|
|
mock_get_table,
|
|
|
|
mock_has_table_by_name,
|
|
|
|
mock_has_view_by_name,
|
|
|
|
mock_get_columns,
|
2021-09-14 10:29:29 -04:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
Dataset API: Test create dataset validate view exists
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
2021-09-14 10:29:29 -04:00
|
|
|
|
|
|
|
mock_get_columns.return_value = [
|
2022-03-29 13:03:09 -04:00
|
|
|
{
|
|
|
|
"name": "col",
|
|
|
|
"type": "VARCHAR",
|
|
|
|
"type_generic": None,
|
|
|
|
"is_dttm": None,
|
|
|
|
}
|
2021-09-14 10:29:29 -04:00
|
|
|
]
|
|
|
|
|
|
|
|
mock_has_table_by_name.return_value = False
|
2022-09-02 14:50:04 -04:00
|
|
|
mock_has_view_by_name.return_value = True
|
2021-09-14 10:29:29 -04:00
|
|
|
mock_get_table.return_value = None
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
2022-10-25 14:12:48 -04:00
|
|
|
with example_db.get_sqla_engine_with_context() as engine:
|
|
|
|
engine = engine
|
|
|
|
dialect = engine.dialect
|
|
|
|
|
|
|
|
with patch.object(
|
|
|
|
dialect, "get_view_names", wraps=dialect.get_view_names
|
|
|
|
) as patch_get_view_names:
|
2022-11-18 15:41:21 -05:00
|
|
|
patch_get_view_names.return_value = {"test_case_view"}
|
2021-09-14 10:29:29 -04:00
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": example_db.id,
|
|
|
|
"schema": "",
|
|
|
|
"table_name": "test_case_view",
|
|
|
|
}
|
|
|
|
|
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
|
|
|
assert rv.status_code == 201
|
|
|
|
|
|
|
|
# cleanup
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
uri = f'api/v1/dataset/{data.get("id")}'
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
@patch("superset.datasets.dao.DatasetDAO.create")
|
|
|
|
def test_create_dataset_sqlalchemy_error(self, mock_dao_create):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset sqlalchemy error
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-20 12:32:03 -04:00
|
|
|
mock_dao_create.side_effect = DAOCreateFailedError()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
dataset_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, dataset_data, "post")
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset could not be created."}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_update_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset_data = {"description": "changed_description"}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, dataset_data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
model = db.session.query(SqlaTable).get(dataset.id)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert model.description == dataset_data["description"]
|
2020-08-17 10:46:59 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-30 12:23:11 -05:00
|
|
|
def test_update_dataset_item_w_override_columns(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test update dataset with override columns
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-11-30 12:23:11 -05:00
|
|
|
# Add default dataset
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
self.login(username="admin")
|
2021-04-12 16:45:50 -04:00
|
|
|
new_col_dict = {
|
|
|
|
"column_name": "new_col",
|
|
|
|
"description": "description",
|
|
|
|
"expression": "expression",
|
|
|
|
"type": "INTEGER",
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": "ADVANCED_DATA_TYPE",
|
2021-04-12 16:45:50 -04:00
|
|
|
"verbose_name": "New Col",
|
|
|
|
}
|
2020-11-30 12:23:11 -05:00
|
|
|
dataset_data = {
|
2021-04-12 16:45:50 -04:00
|
|
|
"columns": [new_col_dict],
|
2020-11-30 12:23:11 -05:00
|
|
|
"description": "changed description",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}?override_columns=true"
|
|
|
|
rv = self.put_assert_metric(uri, dataset_data, "put")
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
2021-04-05 14:52:04 -04:00
|
|
|
columns = db.session.query(TableColumn).filter_by(table_id=dataset.id).all()
|
2020-11-30 12:23:11 -05:00
|
|
|
|
2021-04-12 16:45:50 -04:00
|
|
|
assert new_col_dict["column_name"] in [col.column_name for col in columns]
|
|
|
|
assert new_col_dict["description"] in [col.description for col in columns]
|
|
|
|
assert new_col_dict["expression"] in [col.expression for col in columns]
|
|
|
|
assert new_col_dict["type"] in [col.type for col in columns]
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
assert new_col_dict["advanced_data_type"] in [
|
|
|
|
col.advanced_data_type for col in columns
|
|
|
|
]
|
2020-11-30 12:23:11 -05:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2022-07-29 21:51:35 -04:00
|
|
|
def test_update_dataset_item_w_override_columns_same_columns(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test update dataset with override columns
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
# Add default dataset
|
|
|
|
main_db = get_main_database()
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
prev_col_len = len(dataset.columns)
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
{
|
|
|
|
"column_name": c.column_name,
|
|
|
|
"description": c.description,
|
|
|
|
"expression": c.expression,
|
|
|
|
"type": c.type,
|
|
|
|
"advanced_data_type": c.advanced_data_type,
|
|
|
|
"verbose_name": c.verbose_name,
|
|
|
|
}
|
|
|
|
for c in dataset.columns
|
|
|
|
]
|
|
|
|
|
|
|
|
cols.append(
|
|
|
|
{
|
|
|
|
"column_name": "new_col",
|
|
|
|
"description": "description",
|
|
|
|
"expression": "expression",
|
|
|
|
"type": "INTEGER",
|
|
|
|
"advanced_data_type": "ADVANCED_DATA_TYPE",
|
|
|
|
"verbose_name": "New Col",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
dataset_data = {
|
|
|
|
"columns": cols,
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}?override_columns=true"
|
|
|
|
rv = self.put_assert_metric(uri, dataset_data, "put")
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
columns = db.session.query(TableColumn).filter_by(table_id=dataset.id).all()
|
|
|
|
assert len(columns) != prev_col_len
|
|
|
|
assert len(columns) == 3
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-12-01 14:47:22 -05:00
|
|
|
def test_update_dataset_create_column_and_metric(self):
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset create column
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
# create example dataset by Command
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
new_column_data = {
|
|
|
|
"column_name": "new_col",
|
|
|
|
"description": "description",
|
|
|
|
"expression": "expression",
|
2021-12-01 14:47:22 -05:00
|
|
|
"extra": '{"abc":123}',
|
2020-03-24 13:24:08 -04:00
|
|
|
"type": "INTEGER",
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": "ADVANCED_DATA_TYPE",
|
2020-03-24 13:24:08 -04:00
|
|
|
"verbose_name": "New Col",
|
2021-12-01 14:47:22 -05:00
|
|
|
"uuid": "c626b60a-3fb2-4e99-9f01-53aca0b17166",
|
|
|
|
}
|
|
|
|
new_metric_data = {
|
|
|
|
"d3format": None,
|
|
|
|
"description": None,
|
|
|
|
"expression": "COUNT(*)",
|
|
|
|
"extra": '{"abc":123}',
|
|
|
|
"metric_name": "my_count",
|
|
|
|
"metric_type": None,
|
|
|
|
"verbose_name": "My Count",
|
|
|
|
"warning_text": None,
|
|
|
|
"uuid": "051b5e72-4e6e-4860-b12b-4d530009dd2a",
|
2020-03-24 13:24:08 -04:00
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2021-12-01 14:47:22 -05:00
|
|
|
|
|
|
|
# Get current cols and metrics and append the new ones
|
2020-03-24 13:24:08 -04:00
|
|
|
self.login(username="admin")
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-07-07 08:26:54 -04:00
|
|
|
|
|
|
|
for column in data["result"]["columns"]:
|
|
|
|
column.pop("changed_on", None)
|
|
|
|
column.pop("created_on", None)
|
2021-06-27 00:35:17 -04:00
|
|
|
column.pop("type_generic", None)
|
2020-03-24 13:24:08 -04:00
|
|
|
data["result"]["columns"].append(new_column_data)
|
2021-12-01 14:47:22 -05:00
|
|
|
|
|
|
|
for metric in data["result"]["metrics"]:
|
|
|
|
metric.pop("changed_on", None)
|
|
|
|
metric.pop("created_on", None)
|
|
|
|
metric.pop("type_generic", None)
|
|
|
|
|
|
|
|
data["result"]["metrics"].append(new_metric_data)
|
|
|
|
rv = self.client.put(
|
|
|
|
uri,
|
|
|
|
json={
|
|
|
|
"columns": data["result"]["columns"],
|
|
|
|
"metrics": data["result"]["metrics"],
|
|
|
|
},
|
|
|
|
)
|
2020-03-24 13:24:08 -04:00
|
|
|
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
2021-12-01 14:47:22 -05:00
|
|
|
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name == "name"
|
|
|
|
assert columns[2].column_name == new_column_data["column_name"]
|
|
|
|
assert columns[2].description == new_column_data["description"]
|
|
|
|
assert columns[2].expression == new_column_data["expression"]
|
|
|
|
assert columns[2].type == new_column_data["type"]
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
assert columns[2].advanced_data_type == new_column_data["advanced_data_type"]
|
2021-12-01 14:47:22 -05:00
|
|
|
assert columns[2].extra == new_column_data["extra"]
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[2].verbose_name == new_column_data["verbose_name"]
|
2021-12-01 14:47:22 -05:00
|
|
|
assert str(columns[2].uuid) == new_column_data["uuid"]
|
|
|
|
|
|
|
|
metrics = (
|
|
|
|
db.session.query(SqlMetric)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("metric_name")
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
assert metrics[0].metric_name == "count"
|
|
|
|
assert metrics[1].metric_name == "my_count"
|
|
|
|
assert metrics[1].d3format == new_metric_data["d3format"]
|
|
|
|
assert metrics[1].description == new_metric_data["description"]
|
|
|
|
assert metrics[1].expression == new_metric_data["expression"]
|
|
|
|
assert metrics[1].extra == new_metric_data["extra"]
|
|
|
|
assert metrics[1].metric_type == new_metric_data["metric_type"]
|
|
|
|
assert metrics[1].verbose_name == new_metric_data["verbose_name"]
|
|
|
|
assert metrics[1].warning_text == new_metric_data["warning_text"]
|
|
|
|
assert str(metrics[1].uuid) == new_metric_data["uuid"]
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
def test_update_dataset_delete_column(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test update dataset delete column
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
# create example dataset by Command
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
new_column_data = {
|
|
|
|
"column_name": "new_col",
|
|
|
|
"description": "description",
|
|
|
|
"expression": "expression",
|
|
|
|
"type": "INTEGER",
|
feat(business-types): initial implementation of SIP-78 (#18794)
* add BUSINESS_TYPE_ADDONS to config with example callback
* Removing uneeded whitespace
* [Work in progress] Modifying cidr function to allow for single ip and adding port outline
* Added test REST endpoint, added some more ports
I've thrown in a test.py script as well that will try to connect to the
business_type endpoint.
* Moving code from config.py into the business api
Very simple api is exposed that will allow someone to call a checkport
endpoint and get back a response.
* Removing commented out bits.
* Adding fucntion dict back to the config
* Moving business_type endpoint to charts
* Adding schema for get endpoint
* Removing imports, updating docstring, fixing typo
Just some small changes as described in the title. I've updated the
test.py as well so it functions with the endpoint changes.
* Adding translation dict
* Fixing ops
* Adding check for list
* Modifying changes to add quotes where needed
Also changed BusinessTypeResponse to resp.
* Adding in some code to call the filter config
If a column starts with "cidr_" it will call the code in config.py to
try to translate the filter. Nothing is changed in the JSON being
executed, just some information is dumped to console.
* Porting Ryan's changes
* Adding migration script (as per Ryan's PR)
* Fixing typo
* Prettier fixes
* [CLDN-1043] Adding rough version of filter changes for business types
* fix down migration
* Fixing bugs after merge
* adding functionality to appy filters in back end
* Fixing linting issues
* fix down revision
* Changing conversion callback to handle multiple values at once
* Adding string representation of values
* Code cleanup plus fixing debouce to only be called once for each entry
* Removing non needed logginh
* Changing operator list to use sting values
* Using text value operators
* Removing clear operator call
* Moving business type endpoints
* fix down revision
* Adding port functions
* update migration
* fix bad rebase and add ff
* implement validator
* dont add invalid values to response
* [CLDN-1205] Added a new exception type for a business type translation error. Added the error message in the display_value field within the business type response. Modified the IP and Port business types to populate the error message field in the response if an error occurs
* [CLDN-1205] Added meaningful error message for port translation errors
* Removing status field from businesstype Response and adding in error message
* [CLDN-1205] Added check to make sure the port business type is within the valid range of ports, if it is not, it will populate the error message
* [CLDN-1205] Fixed the if statement that checks to see if the string_value is in the valid range of port numbers. It did not corrently verify this before now.
* [CLDN-1205] Fixed an error where it was trying to use string_value in <= statements. I just casted string_value to an integer if it is numeric, which allows <= operators to be used on it
* [CLDN-1207] Added unit tests for the cidr_func and port_translation_func functions which are located in /superset/config.py
* [CLDN-1207] removed the assertRaises line as it does not work with the cidr_func and port_translation_func functions
* [CLDN-1207] Added the skeleton of the test_cidr_translate_filter_func unit test, still need to update what the expected response from the function will be.
* [CLDN-1207] Added the remainder of the back-end unit tests for the business types
* [CLDN-1207] Fixed the syntax error which caused the test_cidr_translate_filter_func_NOT_IN_double unit test to fail
* [CLDN-1207] Removed the logging that was added for debugging purposes
* [CLDN-1207] Formatted the commands_tests.py file to make it nicer to look at/read through
* [CLDN-1207] Fixed the code so that it conformed to the pylint requirements (i.e., pylint no longer complains about the code in commands_tests.py)
* [CLDN-1207] Modified some of the docstrings so they made better use of the 100 character per line, line limit
* [CLDN-1207] Added the beginnings of the unit tests for the
business types API
* [CLDN-1207] Added a comment to the top of the commands_tests.py file explaining how to run the unit tests. This prevents the next person who tries to run them from having to waste time trying the different forms of testing that Superset supports (e.g., pytest, tox, etc.)
* [CLDN-1207] Added a grammar fix to the comments describing how to run the unit tests
* [CLDN-1207] Modified the description of the business_type API endpoints as they did not represent what the API was actually doing
* [CLDN-1207] Added further instructions on how to run the unit tests that are within the business_type/api_tests.py file
* add request validation
* disable request if business type missing
* [CLDN-1207] Unit tests for the business type API are now working, however, they need to be modified to make use of @mock as we don't want to have to run the server to be able to run the unit tests
* Removing businesss types deffinitons from config
* Adding select to only show valid business types
* Fixed Enzyme tests
* Added scalfolding for selecting filter dropdown
* Adding intigration tests
* fix revision
* fix typos and unnecessary requests
* break out useBusinessTypes
* Added front-end RTL unit tests for the business type API endpoint
* Fixed error from unit tests
* Added a unit test to ensure the operator list is updated after a business type API response is received
* Removing elect compoenet for business types
* Adding feature flag and allowing saving when no business type present
* fixing useEffect hooks
* Adding feature flag to model
* Changing behavior such that an empty string returns a default response
* add form validation
* Modified comments in unit test as command to run test has changed
* Modified comments in unit test as filename to run test has changed
* Modified the api_tests.py file to conform to the linting requirements
* Changed the name of one of the tests to reflect what the test is actually testing
* Added cypress back to the package.json
* Added informative comments
* Updated comments in files as well as removed imports which were not being used
* Changes made by npm run prettier
* Fixed spelling mistakes
* Updated models.py to remove placeholder comments used in development
* Added feature flag mocking in unit test
* Fixing open api failure
* Fixing business types to pass unit tests
* Reverting unsafe connections back to false
* Removing print statement
* Adding business tpye to export test
* setting default feature flag to false for business type
* Reverting pre commit
* Reverting pre commit and running pre commit
* Reverting pre commit and running pre commit
* Fixing formatting
* Adding license
* Fixing Linting
* Protecting api enpoints
* updating model
* Fixing code path when business type exists
* Linting
* Linting
* Fixing linting
* Fixing spelling
* Fixing schemas
* Fixing app import
* fixing item render
* Added RTL test to make sure business type operator list is updated after API response
* Fixing linting
* fix migration
* Changing unit tests
* Fixing import and DB migration after rebase
* Renaming to advanced types
* Fixing Linting
* More renaming
* Removing uneeded change
* Fixing linting and test errors
* Removing unused imports
* linting
* Adding more detailed name for migration
* Moving files to plugins
* more renaming
* Fixing schema name
* Disabling feature flag that should not be enabled by default
* Adding extra cehck
* NameChange
* formatting
* Fixing equals check
* Moveing all advanced type classes and types to one file, and converting tests to functional
* Adding advanced type to test and fix linitng
Co-authored-by: Ville Brofeldt <ville.v.brofeldt@gmail.com>
Co-authored-by: Dan Parent <daniel.parent@cse-cst.gc.ca>
Co-authored-by: GITHUB_USERNAME <EMAIL>
Co-authored-by: cccs-Dustin <96579982+cccs-Dustin@users.noreply.github.com>
2022-05-16 10:58:21 -04:00
|
|
|
"advanced_data_type": "ADVANCED_DATA_TYPE",
|
2021-03-15 14:14:26 -04:00
|
|
|
"verbose_name": "New Col",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# Get current cols and append the new column
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "get")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
for column in data["result"]["columns"]:
|
|
|
|
column.pop("changed_on", None)
|
|
|
|
column.pop("created_on", None)
|
2021-06-27 00:35:17 -04:00
|
|
|
column.pop("type_generic", None)
|
2021-03-15 14:14:26 -04:00
|
|
|
|
|
|
|
data["result"]["columns"].append(new_column_data)
|
|
|
|
rv = self.client.put(uri, json={"columns": data["result"]["columns"]})
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
# Remove this new column
|
|
|
|
data["result"]["columns"].remove(new_column_data)
|
|
|
|
rv = self.client.put(uri, json={"columns": data["result"]["columns"]})
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name == "name"
|
|
|
|
assert len(columns) == 2
|
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def test_update_dataset_update_column(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset columns
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# Get current cols and alter one
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-03-24 13:24:08 -04:00
|
|
|
resp_columns = json.loads(rv.data.decode("utf-8"))["result"]["columns"]
|
2020-07-07 08:26:54 -04:00
|
|
|
for column in resp_columns:
|
|
|
|
column.pop("changed_on", None)
|
|
|
|
column.pop("created_on", None)
|
2021-06-27 00:35:17 -04:00
|
|
|
column.pop("type_generic", None)
|
2020-07-07 08:26:54 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
resp_columns[0]["groupby"] = False
|
|
|
|
resp_columns[0]["filterable"] = False
|
2020-08-27 12:49:18 -04:00
|
|
|
rv = self.client.put(uri, json={"columns": resp_columns})
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name, "name"
|
2020-08-27 12:49:18 -04:00
|
|
|
# TODO(bkyryliuk): find the reason why update is failing for the presto database
|
|
|
|
if get_example_database().backend != "presto":
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].groupby is False
|
|
|
|
assert columns[0].filterable is False
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
def test_update_dataset_delete_metric(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test update dataset delete metric
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
metrics_query = (
|
|
|
|
db.session.query(SqlMetric)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("metric_name")
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
data = {
|
|
|
|
"metrics": [
|
|
|
|
{"metric_name": "metric1", "expression": "COUNT(*)"},
|
|
|
|
{"metric_name": "metric2", "expression": "DIFF_COUNT(*)"},
|
|
|
|
]
|
|
|
|
}
|
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
metrics = metrics_query.all()
|
|
|
|
assert len(metrics) == 2
|
|
|
|
|
|
|
|
data = {
|
|
|
|
"metrics": [
|
|
|
|
{
|
|
|
|
"id": metrics[0].id,
|
|
|
|
"metric_name": "metric1",
|
|
|
|
"expression": "COUNT(*)",
|
|
|
|
},
|
|
|
|
]
|
|
|
|
}
|
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
metrics = metrics_query.all()
|
|
|
|
assert len(metrics) == 1
|
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def test_update_dataset_update_column_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset columns uniqueness
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {"columns": [{"column_name": "id", "type": "INTEGER"}]}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"columns": ["One or more columns already exist"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_metric_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset metric uniqueness
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {"metrics": [{"metric_name": "count", "expression": "COUNT(*)"}]}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"metrics": ["One or more metrics already exist"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_column_duplicate(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset columns duplicate
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {
|
|
|
|
"columns": [
|
|
|
|
{"column_name": "id", "type": "INTEGER"},
|
|
|
|
{"column_name": "id", "type": "VARCHAR"},
|
|
|
|
]
|
|
|
|
}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"columns": ["One or more columns are duplicated"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_metric_duplicate(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset metric duplicate
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {
|
|
|
|
"metrics": [
|
|
|
|
{"metric_name": "dup", "expression": "COUNT(*)"},
|
|
|
|
{"metric_name": "dup", "expression": "DIFF_COUNT(*)"},
|
|
|
|
]
|
|
|
|
}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"metrics": ["One or more metrics are duplicated"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
table_data = {"description": "changed_description"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.put(uri, json=table_data)
|
2021-12-08 15:14:30 -05:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_not_owned(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item not owned
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="alpha")
|
|
|
|
table_data = {"description": "changed_description"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, table_data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_owners_invalid(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item owner invalid
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {"description": "changed_description", "owners": [1000]}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, table_data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset uniqueness
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 15:07:22 -04:00
|
|
|
ab_user = self.insert_dataset(
|
2021-08-02 15:45:55 -04:00
|
|
|
"ab_user", [self.get_user("admin").id], get_main_database()
|
2020-08-06 15:07:22 -04:00
|
|
|
)
|
|
|
|
table_data = {"table_name": "ab_user"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, table_data, "put")
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
expected_response = {
|
2021-01-25 18:09:03 -05:00
|
|
|
"message": {"table_name": ["Dataset ab_user already exists"]}
|
2020-03-08 05:13:08 -04:00
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_response
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-08-06 15:07:22 -04:00
|
|
|
db.session.delete(ab_user)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
2023-04-18 20:51:24 -04:00
|
|
|
def test_update_dataset_unsafe_default_endpoint(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test unsafe default endpoint
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
table_data = {"default_endpoint": "http://www.google.com"}
|
|
|
|
rv = self.client.put(uri, json=table_data)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 422
|
|
|
|
expected_response = {
|
|
|
|
"message": {
|
|
|
|
"default_endpoint": [
|
|
|
|
"The submitted URL is not considered safe,"
|
|
|
|
" only use URLs with the same domain as Superset."
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert data == expected_response
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
@patch("superset.datasets.dao.DatasetDAO.update")
|
|
|
|
def test_update_dataset_sqlalchemy_error(self, mock_dao_update):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset sqlalchemy error
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-20 12:32:03 -04:00
|
|
|
mock_dao_update.side_effect = DAOUpdateFailedError()
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {"description": "changed_description"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.put(uri, json=table_data)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset could not be updated."}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_delete_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-08-13 05:18:13 -04:00
|
|
|
view_menu = security_manager.find_view_menu(dataset.get_perm())
|
2020-10-22 03:56:26 -04:00
|
|
|
assert view_menu is not None
|
2020-08-13 05:18:13 -04:00
|
|
|
view_menu_id = view_menu.id
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.delete(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-08-13 05:18:13 -04:00
|
|
|
non_view_menu = db.session.query(security_manager.viewmenu_model).get(
|
|
|
|
view_menu_id
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert non_view_menu is None
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_delete_item_dataset_not_owned(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete item not owned
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="alpha")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_delete_dataset_item_not_authorized(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete item not authorized
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.delete(uri)
|
2021-12-08 15:14:30 -05:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@patch("superset.datasets.dao.DatasetDAO.delete")
|
|
|
|
def test_delete_dataset_sqlalchemy_error(self, mock_dao_delete):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete dataset sqlalchemy error
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-20 12:32:03 -04:00
|
|
|
mock_dao_delete.side_effect = DAODeleteFailedError()
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset could not be deleted."}
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_delete_dataset_column(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset column
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
column_id = dataset.columns[0].id
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert db.session.query(TableColumn).get(column_id) == None
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_delete_dataset_column_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset column not found
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
non_id = self.get_nonexistent_numeric_id(TableColumn)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/column/{non_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
non_id = self.get_nonexistent_numeric_id(SqlaTable)
|
|
|
|
column_id = dataset.columns[0].id
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{non_id}/column/{column_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_delete_dataset_column_not_owned(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset column not owned
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
column_id = dataset.columns[0].id
|
|
|
|
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 403
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
@patch("superset.datasets.dao.DatasetDAO.delete")
|
|
|
|
def test_delete_dataset_column_fail(self, mock_dao_delete):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset column
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
mock_dao_delete.side_effect = DAODeleteFailedError()
|
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
column_id = dataset.columns[0].id
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/column/{column_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset column delete failed."}
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_delete_dataset_metric(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset metric
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
test_metric = SqlMetric(
|
|
|
|
metric_name="metric1", expression="COUNT(*)", table=dataset
|
|
|
|
)
|
|
|
|
db.session.add(test_metric)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/metric/{test_metric.id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert db.session.query(SqlMetric).get(test_metric.id) == None
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_delete_dataset_metric_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset metric not found
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
non_id = self.get_nonexistent_numeric_id(SqlMetric)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/metric/{non_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
non_id = self.get_nonexistent_numeric_id(SqlaTable)
|
|
|
|
metric_id = dataset.metrics[0].id
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{non_id}/metric/{metric_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_delete_dataset_metric_not_owned(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset metric not owned
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
metric_id = dataset.metrics[0].id
|
|
|
|
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/metric/{metric_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 403
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
@patch("superset.datasets.dao.DatasetDAO.delete")
|
|
|
|
def test_delete_dataset_metric_fail(self, mock_dao_delete):
|
|
|
|
"""
|
|
|
|
Dataset API: Test delete dataset metric
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-15 14:14:26 -04:00
|
|
|
mock_dao_delete.side_effect = DAODeleteFailedError()
|
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
column_id = dataset.metrics[0].id
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/metric/{column_id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset metric delete failed."}
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_dataset_items(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete dataset items
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
|
|
|
|
view_menu_names = []
|
|
|
|
for dataset in datasets:
|
|
|
|
view_menu_names.append(dataset.get_perm())
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
expected_response = {"message": f"Deleted {len(datasets)} datasets"}
|
|
|
|
assert data == expected_response
|
|
|
|
datasets = (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter(SqlaTable.table_name.in_(self.fixture_tables_names))
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
assert datasets == []
|
|
|
|
# Assert permissions get cleaned
|
|
|
|
for view_menu_name in view_menu_names:
|
|
|
|
assert security_manager.find_view_menu(view_menu_name) is None
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_item_dataset_not_owned(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item not owned
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
|
|
|
assert rv.status_code == 403
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_item_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item not found
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
dataset_ids.append(db.session.query(func.max(SqlaTable.id)).scalar())
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_dataset_item_not_authorized(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item not authorized
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.client.delete(uri)
|
2021-12-08 15:14:30 -05:00
|
|
|
assert rv.status_code == 403
|
2020-10-12 08:40:05 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_dataset_item_incorrect(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item incorrect request
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
dataset_ids.append("Wrong")
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 400
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def test_dataset_item_refresh(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test item refresh
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
# delete a column
|
|
|
|
id_column = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id, column_name="id")
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
db.session.delete(id_column)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/refresh"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, {}, "refresh")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
# Assert the column is restored on refresh
|
|
|
|
id_column = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id, column_name="id")
|
|
|
|
.one()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert id_column is not None
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_dataset_item_refresh_not_found(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test item refresh not found dataset
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
max_id = db.session.query(func.max(SqlaTable.id)).scalar()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{max_id + 1}/refresh"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, {}, "refresh")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
def test_dataset_item_refresh_not_owned(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test item refresh not owned dataset
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/refresh"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, {}, "refresh")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
2020-03-27 05:30:23 -04:00
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
@unittest.skip("test is failing stochastically")
|
2020-03-27 05:30:23 -04:00
|
|
|
def test_export_dataset(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test export dataset
|
2020-03-27 05:30:23 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-27 05:30:23 -04:00
|
|
|
birth_names_dataset = self.get_birth_names_dataset()
|
2020-09-10 00:46:28 -04:00
|
|
|
# TODO: fix test for presto
|
2021-01-06 06:45:19 -05:00
|
|
|
# debug with dump: https://github.com/apache/superset/runs/1092546855
|
2020-10-08 12:17:09 -04:00
|
|
|
if birth_names_dataset.database.backend in {"presto", "hive"}:
|
2020-09-10 00:46:28 -04:00
|
|
|
return
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
argument = [birth_names_dataset.id]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="admin")
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "export")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
cli_export = export_to_dict(
|
2020-08-06 18:33:48 -04:00
|
|
|
session=db.session,
|
|
|
|
recursive=True,
|
|
|
|
back_references=False,
|
|
|
|
include_defaults=False,
|
2020-03-27 05:30:23 -04:00
|
|
|
)
|
|
|
|
cli_export_tables = cli_export["databases"][0]["tables"]
|
2020-09-10 00:46:28 -04:00
|
|
|
expected_response = {}
|
2020-03-27 05:30:23 -04:00
|
|
|
for export_table in cli_export_tables:
|
|
|
|
if export_table["table_name"] == "birth_names":
|
|
|
|
expected_response = export_table
|
|
|
|
break
|
|
|
|
ui_export = yaml.safe_load(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert ui_export[0] == expected_response
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
def test_export_dataset_not_found(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test export dataset not found
|
2020-03-27 05:30:23 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-03-27 05:30:23 -04:00
|
|
|
max_id = db.session.query(func.max(SqlaTable.id)).scalar()
|
|
|
|
# Just one does not exist and we get 404
|
|
|
|
argument = [max_id + 1, 1]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
self.login(username="admin")
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "export")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-03-27 05:30:23 -04:00
|
|
|
|
2021-11-25 23:10:41 -05:00
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
2020-03-27 05:30:23 -04:00
|
|
|
def test_export_dataset_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test export dataset has gamma
|
2020-03-27 05:30:23 -04:00
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-11-25 23:10:41 -05:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
2020-03-27 05:30:23 -04:00
|
|
|
|
2021-11-25 23:10:41 -05:00
|
|
|
argument = [dataset.id]
|
2020-03-27 05:30:23 -04:00
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
rv = self.client.get(uri)
|
2021-12-08 15:14:30 -05:00
|
|
|
assert rv.status_code == 403
|
2021-11-25 23:10:41 -05:00
|
|
|
|
|
|
|
perm1 = security_manager.find_permission_view_menu("can_export", "Dataset")
|
|
|
|
|
|
|
|
perm2 = security_manager.find_permission_view_menu(
|
|
|
|
"datasource_access", dataset.perm
|
|
|
|
)
|
|
|
|
|
2023-01-25 18:35:08 -05:00
|
|
|
# add permissions to allow export + access to query this dataset
|
2021-11-25 23:10:41 -05:00
|
|
|
gamma_role = security_manager.find_role("Gamma")
|
|
|
|
security_manager.add_permission_role(gamma_role, perm1)
|
|
|
|
security_manager.add_permission_role(gamma_role, perm2)
|
|
|
|
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
assert rv.status_code == 200
|
2020-07-06 19:25:57 -04:00
|
|
|
|
2021-02-11 13:19:41 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-10-22 13:32:08 -04:00
|
|
|
def test_export_dataset_bundle(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test export dataset
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-22 13:32:08 -04:00
|
|
|
birth_names_dataset = self.get_birth_names_dataset()
|
|
|
|
# TODO: fix test for presto
|
2021-01-06 06:45:19 -05:00
|
|
|
# debug with dump: https://github.com/apache/superset/runs/1092546855
|
2020-10-22 13:32:08 -04:00
|
|
|
if birth_names_dataset.database.backend in {"presto", "hive"}:
|
|
|
|
return
|
|
|
|
|
|
|
|
argument = [birth_names_dataset.id]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
buf = BytesIO(rv.data)
|
|
|
|
assert is_zipfile(buf)
|
|
|
|
|
|
|
|
def test_export_dataset_bundle_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test export dataset not found
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-22 13:32:08 -04:00
|
|
|
# Just one does not exist and we get 404
|
|
|
|
argument = [-1, 1]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
2021-11-25 23:10:41 -05:00
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
2020-10-22 13:32:08 -04:00
|
|
|
def test_export_dataset_bundle_gamma(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test export dataset has gamma
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-11-25 23:10:41 -05:00
|
|
|
dataset = self.get_fixture_datasets()[0]
|
2020-10-22 13:32:08 -04:00
|
|
|
|
2021-11-25 23:10:41 -05:00
|
|
|
argument = [dataset.id]
|
2020-10-22 13:32:08 -04:00
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
rv = self.client.get(uri)
|
2020-12-16 06:49:03 -05:00
|
|
|
# gamma users by default do not have access to this dataset
|
2021-12-08 15:14:30 -05:00
|
|
|
assert rv.status_code == 403
|
2020-10-22 13:32:08 -04:00
|
|
|
|
2021-11-04 14:09:08 -04:00
|
|
|
@unittest.skip("Number of related objects depend on DB")
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-07-06 19:25:57 -04:00
|
|
|
def test_get_dataset_related_objects(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get chart and dashboard count related to a dataset
|
|
|
|
:return:
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-07-06 19:25:57 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_birth_names_dataset()
|
|
|
|
uri = f"api/v1/dataset/{table.id}/related_objects"
|
|
|
|
rv = self.get_assert_metric(uri, "related_objects")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response["charts"]["count"] == 18
|
|
|
|
assert response["dashboards"]["count"] == 1
|
2020-07-08 05:04:05 -04:00
|
|
|
|
|
|
|
def test_get_dataset_related_objects_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test related objects not found
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-07-08 05:04:05 -04:00
|
|
|
max_id = db.session.query(func.max(SqlaTable.id)).scalar()
|
|
|
|
# id does not exist and we get 404
|
|
|
|
invalid_id = max_id + 1
|
|
|
|
uri = f"api/v1/dataset/{invalid_id}/related_objects/"
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-07-08 05:04:05 -04:00
|
|
|
self.logout()
|
2022-08-09 12:59:31 -04:00
|
|
|
|
2020-07-08 05:04:05 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
table = self.get_birth_names_dataset()
|
|
|
|
uri = f"api/v1/dataset/{table.id}/related_objects"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-10-29 16:11:33 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets", "create_virtual_datasets")
|
|
|
|
def test_get_datasets_custom_filter_sql(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test custom dataset_is_null_or_empty filter for sql
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-10-29 16:11:33 -04:00
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "sql", "opr": "dataset_is_null_or_empty", "value": False}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for table_name in self.fixture_virtual_table_names:
|
|
|
|
assert table_name in [ds["table_name"] for ds in data["result"]]
|
|
|
|
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "sql", "opr": "dataset_is_null_or_empty", "value": True}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for table_name in self.fixture_tables_names:
|
|
|
|
assert table_name in [ds["table_name"] for ds in data["result"]]
|
2020-11-17 17:49:33 -05:00
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
def test_import_dataset(self):
|
2020-11-17 17:49:33 -05:00
|
|
|
"""
|
|
|
|
Dataset API: Test import dataset
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-11-17 17:49:33 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
buf = self.create_dataset_import()
|
2020-11-17 17:49:33 -05:00
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2022-10-07 14:28:38 -04:00
|
|
|
"sync_columns": "true",
|
|
|
|
"sync_metrics": "true",
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
2022-11-14 12:55:53 -05:00
|
|
|
|
2020-11-17 17:49:33 -05:00
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
|
|
|
|
assert len(database.tables) == 1
|
|
|
|
dataset = database.tables[0]
|
|
|
|
assert dataset.table_name == "imported_dataset"
|
|
|
|
assert str(dataset.uuid) == dataset_config["uuid"]
|
|
|
|
|
2021-09-15 15:27:02 -04:00
|
|
|
dataset.owners = []
|
2020-11-17 17:49:33 -05:00
|
|
|
db.session.delete(dataset)
|
2023-01-20 16:17:56 -05:00
|
|
|
db.session.commit()
|
2020-11-17 17:49:33 -05:00
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-03-04 20:18:27 -05:00
|
|
|
def test_import_dataset_v0_export(self):
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2021-03-04 20:18:27 -05:00
|
|
|
num_datasets = db.session.query(SqlaTable).count()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
buf.write(json.dumps(dataset_ui_export).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2022-10-07 14:28:38 -04:00
|
|
|
"sync_columns": "true",
|
|
|
|
"sync_metrics": "true",
|
2021-03-04 20:18:27 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
assert db.session.query(SqlaTable).count() == num_datasets + 1
|
|
|
|
|
|
|
|
dataset = (
|
|
|
|
db.session.query(SqlaTable).filter_by(table_name="birth_names_2").one()
|
|
|
|
)
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
def test_import_dataset_overwrite(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test import existing dataset
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = self.create_dataset_import()
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "dataset_export.zip"),
|
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
# import again without overwrite flag
|
|
|
|
buf = self.create_dataset_import()
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "dataset_export.zip"),
|
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {
|
2021-05-27 17:46:41 -04:00
|
|
|
"errors": [
|
|
|
|
{
|
|
|
|
"message": "Error importing dataset",
|
|
|
|
"error_type": "GENERIC_COMMAND_ERROR",
|
|
|
|
"level": "warning",
|
|
|
|
"extra": {
|
|
|
|
"datasets/imported_dataset.yaml": "Dataset already exists and `overwrite=true` was not passed",
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1010,
|
|
|
|
"message": "Issue 1010 - Superset encountered an error while running a command.",
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
2020-12-10 17:50:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
# import with overwrite flag
|
|
|
|
buf = self.create_dataset_import()
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "dataset_export.zip"),
|
|
|
|
"overwrite": "true",
|
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
# clean up
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
dataset = database.tables[0]
|
|
|
|
|
2021-09-15 15:27:02 -04:00
|
|
|
dataset.owners = []
|
2020-12-10 17:50:10 -05:00
|
|
|
db.session.delete(dataset)
|
2023-01-20 16:17:56 -05:00
|
|
|
db.session.commit()
|
2020-12-10 17:50:10 -05:00
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_import_dataset_invalid(self):
|
2020-11-17 17:49:33 -05:00
|
|
|
"""
|
|
|
|
Dataset API: Test import invalid dataset
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-11-17 17:49:33 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("dataset_export/metadata.yaml", "w") as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_metadata_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/datasets/imported_dataset.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {
|
2021-05-27 17:46:41 -04:00
|
|
|
"errors": [
|
|
|
|
{
|
|
|
|
"message": "Error importing dataset",
|
|
|
|
"error_type": "GENERIC_COMMAND_ERROR",
|
|
|
|
"level": "warning",
|
|
|
|
"extra": {
|
|
|
|
"metadata.yaml": {"type": ["Must be equal to SqlaTable."]},
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1010,
|
|
|
|
"message": (
|
|
|
|
"Issue 1010 - Superset encountered "
|
|
|
|
"an error while running a command."
|
|
|
|
),
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
def test_import_dataset_invalid_v0_validation(self):
|
2020-11-17 17:49:33 -05:00
|
|
|
"""
|
|
|
|
Dataset API: Test import invalid dataset
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2020-11-17 17:49:33 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/datasets/imported_dataset.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
2021-05-27 17:46:41 -04:00
|
|
|
assert response == {
|
|
|
|
"errors": [
|
|
|
|
{
|
|
|
|
"message": "Could not find a valid command to import file",
|
|
|
|
"error_type": "GENERIC_COMMAND_ERROR",
|
|
|
|
"level": "warning",
|
|
|
|
"extra": {
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1010,
|
|
|
|
"message": "Issue 1010 - Superset encountered an error while running a command.",
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
2022-05-23 17:58:15 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_get_datasets_is_certified_filter(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test custom dataset_is_certified filter
|
|
|
|
"""
|
2022-07-18 18:21:38 -04:00
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
2022-05-23 17:58:15 -04:00
|
|
|
table_w_certification = SqlaTable(
|
|
|
|
table_name="foo",
|
|
|
|
schema=None,
|
|
|
|
owners=[],
|
|
|
|
database=get_main_database(),
|
|
|
|
sql=None,
|
|
|
|
extra='{"certification": 1}',
|
|
|
|
)
|
|
|
|
db.session.add(table_w_certification)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
arguments = {
|
|
|
|
"filters": [{"col": "id", "opr": "dataset_is_certified", "value": True}]
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert response.get("count") == 1
|
|
|
|
|
|
|
|
db.session.delete(table_w_certification)
|
|
|
|
db.session.commit()
|
2022-08-26 18:07:56 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_virtual_datasets")
|
|
|
|
def test_duplicate_virtual_dataset(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test duplicate virtual dataset
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
dataset = self.get_fixture_virtual_datasets()[0]
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/duplicate"
|
|
|
|
table_data = {"base_model_id": dataset.id, "table_name": "Dupe1"}
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "duplicate")
|
|
|
|
assert rv.status_code == 201
|
|
|
|
rv_data = json.loads(rv.data)
|
|
|
|
new_dataset: SqlaTable = (
|
|
|
|
db.session.query(SqlaTable).filter_by(id=rv_data["id"]).one_or_none()
|
|
|
|
)
|
|
|
|
assert new_dataset is not None
|
|
|
|
assert new_dataset.id != dataset.id
|
|
|
|
assert new_dataset.table_name == "Dupe1"
|
|
|
|
assert len(new_dataset.columns) == 2
|
|
|
|
assert new_dataset.columns[0].column_name == "id"
|
|
|
|
assert new_dataset.columns[1].column_name == "name"
|
2023-02-24 03:45:16 -05:00
|
|
|
db.session.delete(new_dataset)
|
|
|
|
db.session.commit()
|
2022-08-26 18:07:56 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_duplicate_physical_dataset(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test duplicate physical dataset
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
dataset = self.get_fixture_datasets()[0]
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/duplicate"
|
|
|
|
table_data = {"base_model_id": dataset.id, "table_name": "Dupe2"}
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "duplicate")
|
|
|
|
assert rv.status_code == 422
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_virtual_datasets")
|
|
|
|
def test_duplicate_existing_dataset(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test duplicate dataset with existing name
|
|
|
|
"""
|
|
|
|
if backend() == "sqlite":
|
|
|
|
return
|
|
|
|
|
|
|
|
dataset = self.get_fixture_virtual_datasets()[0]
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/duplicate"
|
|
|
|
table_data = {
|
|
|
|
"base_model_id": dataset.id,
|
|
|
|
"table_name": "sql_virtual_dataset_2",
|
|
|
|
}
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "duplicate")
|
|
|
|
assert rv.status_code == 422
|
|
|
|
|
|
|
|
def test_duplicate_invalid_dataset(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test duplicate invalid dataset
|
|
|
|
"""
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/duplicate"
|
|
|
|
table_data = {
|
|
|
|
"base_model_id": -1,
|
|
|
|
"table_name": "Dupe3",
|
|
|
|
}
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "duplicate")
|
|
|
|
assert rv.status_code == 422
|
2023-02-15 05:38:51 -05:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("app_context", "virtual_dataset")
|
|
|
|
def test_get_or_create_dataset_already_exists(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get or create endpoint when table already exists
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"api/v1/dataset/get_or_create/",
|
|
|
|
json={
|
|
|
|
"table_name": "virtual_dataset",
|
|
|
|
"database_id": get_example_database().id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
dataset = (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter(SqlaTable.table_name == "virtual_dataset")
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
self.assertEqual(response["result"], {"table_id": dataset.id})
|
|
|
|
|
|
|
|
def test_get_or_create_dataset_database_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get or create endpoint when database doesn't exist
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"api/v1/dataset/get_or_create/",
|
|
|
|
json={"table_name": "virtual_dataset", "database_id": 999},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(response["message"], {"database": ["Database does not exist"]})
|
|
|
|
|
|
|
|
@patch("superset.datasets.commands.create.CreateDatasetCommand.run")
|
|
|
|
def test_get_or_create_dataset_create_fails(self, command_run_mock):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get or create endpoint when create fails
|
|
|
|
"""
|
|
|
|
command_run_mock.side_effect = DatasetCreateFailedError
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"api/v1/dataset/get_or_create/",
|
|
|
|
json={
|
|
|
|
"table_name": "virtual_dataset",
|
|
|
|
"database_id": get_example_database().id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(response["message"], "Dataset could not be created.")
|
|
|
|
|
|
|
|
def test_get_or_create_dataset_creates_table(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get or create endpoint when table is created
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
examples_db = get_example_database()
|
|
|
|
with examples_db.get_sqla_engine_with_context() as engine:
|
|
|
|
engine.execute("DROP TABLE IF EXISTS test_create_sqla_table_api")
|
|
|
|
engine.execute("CREATE TABLE test_create_sqla_table_api AS SELECT 2 as col")
|
|
|
|
|
|
|
|
rv = self.client.post(
|
|
|
|
"api/v1/dataset/get_or_create/",
|
|
|
|
json={
|
|
|
|
"table_name": "test_create_sqla_table_api",
|
|
|
|
"database_id": examples_db.id,
|
|
|
|
"template_params": '{"param": 1}',
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
table = (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter_by(table_name="test_create_sqla_table_api")
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
self.assertEqual(response["result"], {"table_id": table.id})
|
|
|
|
self.assertEqual(table.template_params, '{"param": 1}')
|
|
|
|
|
|
|
|
db.session.delete(table)
|
|
|
|
with examples_db.get_sqla_engine_with_context() as engine:
|
|
|
|
engine.execute("DROP TABLE test_create_sqla_table_api")
|
|
|
|
db.session.commit()
|