2020-03-08 05:13:08 -04:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2020-11-17 17:49:33 -05:00
|
|
|
# pylint: disable=too-many-public-methods, invalid-name
|
2020-03-08 05:13:08 -04:00
|
|
|
"""Unit tests for Superset"""
|
|
|
|
import json
|
2020-10-22 13:32:08 -04:00
|
|
|
from io import BytesIO
|
2020-10-29 16:11:33 -04:00
|
|
|
from typing import List, Optional
|
2020-03-08 05:13:08 -04:00
|
|
|
from unittest.mock import patch
|
2020-11-17 17:49:33 -05:00
|
|
|
from zipfile import is_zipfile, ZipFile
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
import prison
|
2020-10-12 08:40:05 -04:00
|
|
|
import pytest
|
2020-03-27 05:30:23 -04:00
|
|
|
import yaml
|
2020-03-24 13:24:08 -04:00
|
|
|
from sqlalchemy.sql import func
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
|
2020-03-20 12:32:03 -04:00
|
|
|
from superset.dao.exceptions import (
|
|
|
|
DAOCreateFailedError,
|
|
|
|
DAODeleteFailedError,
|
|
|
|
DAOUpdateFailedError,
|
|
|
|
)
|
2020-04-08 03:44:35 -04:00
|
|
|
from superset.extensions import db, security_manager
|
2020-03-08 05:13:08 -04:00
|
|
|
from superset.models.core import Database
|
2020-09-29 18:01:01 -04:00
|
|
|
from superset.utils.core import backend, get_example_database, get_main_database
|
2020-03-27 05:30:23 -04:00
|
|
|
from superset.utils.dict_import_export import export_to_dict
|
2020-03-24 13:24:08 -04:00
|
|
|
from tests.base_tests import SupersetTestCase
|
2020-09-29 18:01:01 -04:00
|
|
|
from tests.conftest import CTAS_SCHEMA_NAME
|
2020-11-17 17:49:33 -05:00
|
|
|
from tests.fixtures.importexport import (
|
|
|
|
database_config,
|
|
|
|
database_metadata_config,
|
|
|
|
dataset_config,
|
|
|
|
dataset_metadata_config,
|
|
|
|
)
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
|
2020-06-29 18:36:06 -04:00
|
|
|
class TestDatasetApi(SupersetTestCase):
|
2020-10-12 08:40:05 -04:00
|
|
|
|
|
|
|
fixture_tables_names = ("ab_permission", "ab_permission_view", "ab_view_menu")
|
2020-10-29 16:11:33 -04:00
|
|
|
fixture_virtual_table_names = ("sql_virtual_dataset_1", "sql_virtual_dataset_2")
|
2020-10-12 08:40:05 -04:00
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
@staticmethod
|
|
|
|
def insert_dataset(
|
2020-10-29 16:11:33 -04:00
|
|
|
table_name: str,
|
|
|
|
schema: str,
|
|
|
|
owners: List[int],
|
|
|
|
database: Database,
|
|
|
|
sql: Optional[str] = None,
|
2020-03-08 05:13:08 -04:00
|
|
|
) -> SqlaTable:
|
|
|
|
obj_owners = list()
|
|
|
|
for owner in owners:
|
|
|
|
user = db.session.query(security_manager.user_model).get(owner)
|
|
|
|
obj_owners.append(user)
|
|
|
|
table = SqlaTable(
|
2020-10-29 16:11:33 -04:00
|
|
|
table_name=table_name,
|
|
|
|
schema=schema,
|
|
|
|
owners=obj_owners,
|
|
|
|
database=database,
|
|
|
|
sql=sql,
|
2020-03-08 05:13:08 -04:00
|
|
|
)
|
|
|
|
db.session.add(table)
|
|
|
|
db.session.commit()
|
2020-03-24 13:24:08 -04:00
|
|
|
table.fetch_metadata()
|
2020-03-08 05:13:08 -04:00
|
|
|
return table
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def insert_default_dataset(self):
|
|
|
|
return self.insert_dataset(
|
2020-08-06 15:07:22 -04:00
|
|
|
"ab_permission", "", [self.get_user("admin").id], get_main_database()
|
2020-03-24 13:24:08 -04:00
|
|
|
)
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
def get_fixture_datasets(self) -> List[SqlaTable]:
|
|
|
|
return (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter(SqlaTable.table_name.in_(self.fixture_tables_names))
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
2020-10-29 16:11:33 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_virtual_datasets(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
datasets = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
main_db = get_main_database()
|
|
|
|
for table_name in self.fixture_virtual_table_names:
|
|
|
|
datasets.append(
|
|
|
|
self.insert_dataset(
|
|
|
|
table_name,
|
|
|
|
"",
|
|
|
|
[admin.id],
|
|
|
|
main_db,
|
|
|
|
"SELECT * from ab_view_menu;",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
yield datasets
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for dataset in datasets:
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_datasets(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
datasets = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
main_db = get_main_database()
|
|
|
|
for tables_name in self.fixture_tables_names:
|
|
|
|
datasets.append(
|
|
|
|
self.insert_dataset(tables_name, "", [admin.id], main_db)
|
|
|
|
)
|
|
|
|
yield datasets
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for dataset in datasets:
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-08-10 16:20:19 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_energy_usage_dataset():
|
|
|
|
example_db = get_example_database()
|
|
|
|
return (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter_by(database=example_db, table_name="energy_usage")
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_get_dataset_list(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset list
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
example_db = get_example_database()
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "database", "opr": "rel_o_m", "value": f"{example_db.id}"},
|
2020-11-17 17:49:33 -05:00
|
|
|
{"col": "table_name", "opr": "eq", "value": "birth_names"},
|
2020-03-08 05:13:08 -04:00
|
|
|
]
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["count"] == 1
|
2020-03-08 05:13:08 -04:00
|
|
|
expected_columns = [
|
|
|
|
"changed_by",
|
2020-03-13 15:35:00 -04:00
|
|
|
"changed_by_name",
|
|
|
|
"changed_by_url",
|
2020-07-22 06:34:47 -04:00
|
|
|
"changed_on_delta_humanized",
|
|
|
|
"changed_on_utc",
|
2020-07-23 14:19:05 -04:00
|
|
|
"database",
|
2020-05-06 06:48:32 -04:00
|
|
|
"default_endpoint",
|
2020-03-13 15:35:00 -04:00
|
|
|
"explore_url",
|
2020-10-30 14:28:01 -04:00
|
|
|
"extra",
|
2020-03-13 15:35:00 -04:00
|
|
|
"id",
|
2020-06-10 15:04:40 -04:00
|
|
|
"kind",
|
|
|
|
"owners",
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema",
|
2020-06-22 03:21:06 -04:00
|
|
|
"sql",
|
2020-03-08 05:13:08 -04:00
|
|
|
"table_name",
|
|
|
|
]
|
2020-10-22 03:56:26 -04:00
|
|
|
assert sorted(list(response["result"][0].keys())) == expected_columns
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_get_dataset_list_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset list gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["result"] == []
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_get_dataset_related_database_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset related databases gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = "api/v1/dataset/related/database"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["count"] == 0
|
|
|
|
assert response["result"] == []
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_get_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-08-10 16:20:19 -04:00
|
|
|
table = self.get_energy_usage_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{table.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"cache_timeout": None,
|
|
|
|
"database": {"database_name": "examples", "id": 1},
|
|
|
|
"default_endpoint": None,
|
2020-08-10 16:20:19 -04:00
|
|
|
"description": "Energy consumption",
|
2020-10-30 14:28:01 -04:00
|
|
|
"extra": None,
|
2020-03-08 05:13:08 -04:00
|
|
|
"fetch_values_predicate": None,
|
2020-08-10 16:20:19 -04:00
|
|
|
"filter_select_enabled": False,
|
2020-03-08 05:13:08 -04:00
|
|
|
"is_sqllab_view": False,
|
2020-08-10 16:20:19 -04:00
|
|
|
"main_dttm_col": None,
|
2020-03-08 05:13:08 -04:00
|
|
|
"offset": 0,
|
|
|
|
"owners": [],
|
|
|
|
"schema": None,
|
|
|
|
"sql": None,
|
2020-08-10 16:20:19 -04:00
|
|
|
"table_name": "energy_usage",
|
2020-03-08 05:13:08 -04:00
|
|
|
"template_params": None,
|
|
|
|
}
|
2020-08-10 16:20:19 -04:00
|
|
|
assert {
|
|
|
|
k: v for k, v in response["result"].items() if k in expected_result
|
|
|
|
} == expected_result
|
2020-10-22 03:56:26 -04:00
|
|
|
assert len(response["result"]["columns"]) == 3
|
|
|
|
assert len(response["result"]["metrics"]) == 2
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-08-17 10:46:59 -04:00
|
|
|
def test_get_dataset_distinct_schema(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get dataset distinct schema
|
|
|
|
"""
|
|
|
|
|
|
|
|
def pg_test_query_parameter(query_parameter, expected_response):
|
|
|
|
uri = f"api/v1/dataset/distinct/schema?q={prison.dumps(query_parameter)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == expected_response
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
example_db = get_example_database()
|
|
|
|
datasets = []
|
|
|
|
if example_db.backend == "postgresql":
|
|
|
|
datasets.append(
|
|
|
|
self.insert_dataset("ab_permission", "public", [], get_main_database())
|
|
|
|
)
|
|
|
|
datasets.append(
|
|
|
|
self.insert_dataset(
|
|
|
|
"columns", "information_schema", [], get_main_database()
|
|
|
|
)
|
|
|
|
)
|
2020-09-02 14:07:37 -04:00
|
|
|
schema_values = [
|
|
|
|
"",
|
|
|
|
"admin_database",
|
|
|
|
"information_schema",
|
|
|
|
"public",
|
|
|
|
]
|
2020-08-17 10:46:59 -04:00
|
|
|
expected_response = {
|
2020-10-16 14:10:39 -04:00
|
|
|
"count": 4,
|
2020-09-02 14:07:37 -04:00
|
|
|
"result": [{"text": val, "value": val} for val in schema_values],
|
2020-08-17 10:46:59 -04:00
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/distinct/schema"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == expected_response
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
# Test filter
|
|
|
|
query_parameter = {"filter": "inf"}
|
|
|
|
pg_test_query_parameter(
|
|
|
|
query_parameter,
|
2020-09-02 14:07:37 -04:00
|
|
|
{
|
|
|
|
"count": 1,
|
|
|
|
"result": [
|
|
|
|
{"text": "information_schema", "value": "information_schema"}
|
|
|
|
],
|
|
|
|
},
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
query_parameter = {"page": 0, "page_size": 1}
|
|
|
|
pg_test_query_parameter(
|
2020-10-16 14:10:39 -04:00
|
|
|
query_parameter, {"count": 4, "result": [{"text": "", "value": ""}]},
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
query_parameter = {"page": 1, "page_size": 1}
|
|
|
|
pg_test_query_parameter(
|
2020-09-02 14:07:37 -04:00
|
|
|
query_parameter,
|
|
|
|
{
|
2020-10-16 14:10:39 -04:00
|
|
|
"count": 4,
|
2020-09-02 14:07:37 -04:00
|
|
|
"result": [{"text": "admin_database", "value": "admin_database"}],
|
|
|
|
},
|
2020-08-17 10:46:59 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
for dataset in datasets:
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_get_dataset_distinct_not_allowed(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get dataset distinct not allowed
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/distinct/table_name"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
def test_get_dataset_distinct_gamma(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get dataset distinct with gamma
|
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = "api/v1/dataset/distinct/schema"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-08-17 10:46:59 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert response["count"] == 0
|
|
|
|
assert response["result"] == []
|
2020-08-17 10:46:59 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_get_dataset_info(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test get dataset info
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/_info"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "info")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_create_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 201
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-03-24 13:24:08 -04:00
|
|
|
table_id = data.get("id")
|
|
|
|
model = db.session.query(SqlaTable).get(table_id)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert model.table_name == table_data["table_name"]
|
|
|
|
assert model.database_id == table_data["database"]
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
# Assert that columns were created
|
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=table_id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name == "name"
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
# Assert that metrics were created
|
|
|
|
columns = (
|
|
|
|
db.session.query(SqlMetric)
|
|
|
|
.filter_by(table_id=table_id)
|
|
|
|
.order_by("metric_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].expression == "COUNT(*)"
|
2020-03-24 13:24:08 -04:00
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_dataset_item_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset item gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.client.post(uri, json=table_data)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 401
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_create_dataset_item_owner(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create item owner
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="alpha")
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
alpha = self.get_user("alpha")
|
|
|
|
|
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
"owners": [admin.id],
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 201
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(SqlaTable).get(data.get("id"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert admin in model.owners
|
|
|
|
assert alpha in model.owners
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_dataset_item_owners_invalid(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset item owner invalid
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
"owners": [admin.id, 1000],
|
|
|
|
}
|
2020-11-17 17:49:33 -05:00
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {"message": {"owners": ["Owners are invalid"]}}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_create_dataset_validate_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset validate table uniqueness
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
example_db = get_example_database()
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": example_db.id,
|
|
|
|
"schema": "",
|
|
|
|
"table_name": "birth_names",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == {
|
|
|
|
"message": {"table_name": ["Datasource birth_names already exists"]}
|
|
|
|
}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-09-29 18:01:01 -04:00
|
|
|
def test_create_dataset_same_name_different_schema(self):
|
|
|
|
if backend() == "sqlite":
|
|
|
|
# sqlite doesn't support schemas
|
|
|
|
return
|
|
|
|
|
|
|
|
example_db = get_example_database()
|
|
|
|
example_db.get_sqla_engine().execute(
|
|
|
|
f"CREATE TABLE {CTAS_SCHEMA_NAME}.birth_names AS SELECT 2 as two"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": example_db.id,
|
|
|
|
"schema": CTAS_SCHEMA_NAME,
|
|
|
|
"table_name": "birth_names",
|
|
|
|
}
|
|
|
|
|
|
|
|
uri = "api/v1/dataset/"
|
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 201
|
2020-09-29 18:01:01 -04:00
|
|
|
|
|
|
|
# cleanup
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
uri = f'api/v1/dataset/{data.get("id")}'
|
|
|
|
rv = self.client.delete(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-09-29 18:01:01 -04:00
|
|
|
example_db.get_sqla_engine().execute(
|
|
|
|
f"DROP TABLE {CTAS_SCHEMA_NAME}.birth_names"
|
|
|
|
)
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_create_dataset_validate_database(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset validate database exists
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset_data = {"database": 1000, "schema": "", "table_name": "birth_names"}
|
2020-03-08 05:13:08 -04:00
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, dataset_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == {"message": {"database": ["Database does not exist"]}}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_create_dataset_validate_tables_exists(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset validate table exists
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
|
|
|
example_db = get_example_database()
|
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {
|
|
|
|
"database": example_db.id,
|
|
|
|
"schema": "",
|
|
|
|
"table_name": "does_not_exist",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, table_data, "post")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
@patch("superset.datasets.dao.DatasetDAO.create")
|
|
|
|
def test_create_dataset_sqlalchemy_error(self, mock_dao_create):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test create dataset sqlalchemy error
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-20 12:32:03 -04:00
|
|
|
mock_dao_create.side_effect = DAOCreateFailedError()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 15:07:22 -04:00
|
|
|
main_db = get_main_database()
|
2020-03-08 05:13:08 -04:00
|
|
|
dataset_data = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"database": main_db.id,
|
2020-03-08 05:13:08 -04:00
|
|
|
"schema": "",
|
|
|
|
"table_name": "ab_permission",
|
|
|
|
}
|
|
|
|
uri = "api/v1/dataset/"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.post_assert_metric(uri, dataset_data, "post")
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset could not be created."}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_update_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset_data = {"description": "changed_description"}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, dataset_data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
model = db.session.query(SqlaTable).get(dataset.id)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert model.description == dataset_data["description"]
|
2020-08-17 10:46:59 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-30 12:23:11 -05:00
|
|
|
def test_update_dataset_item_w_override_columns(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test update dataset with override columns
|
|
|
|
"""
|
|
|
|
# Add default dataset
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
self.login(username="admin")
|
|
|
|
dataset_data = {
|
|
|
|
"columns": [
|
|
|
|
{
|
|
|
|
"column_name": "new_col",
|
|
|
|
"description": "description",
|
|
|
|
"expression": "expression",
|
|
|
|
"type": "INTEGER",
|
|
|
|
"verbose_name": "New Col",
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"description": "changed description",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}?override_columns=true"
|
|
|
|
rv = self.put_assert_metric(uri, dataset_data, "put")
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
|
|
|
assert columns[0].column_name == dataset_data["columns"][0]["column_name"]
|
|
|
|
assert columns[0].description == dataset_data["columns"][0]["description"]
|
|
|
|
assert columns[0].expression == dataset_data["columns"][0]["expression"]
|
|
|
|
assert columns[0].type == dataset_data["columns"][0]["type"]
|
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def test_update_dataset_create_column(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset create column
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
# create example dataset by Command
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
new_column_data = {
|
|
|
|
"column_name": "new_col",
|
|
|
|
"description": "description",
|
|
|
|
"expression": "expression",
|
|
|
|
"type": "INTEGER",
|
|
|
|
"verbose_name": "New Col",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# Get current cols and append the new column
|
|
|
|
self.login(username="admin")
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-07-07 08:26:54 -04:00
|
|
|
|
|
|
|
for column in data["result"]["columns"]:
|
|
|
|
column.pop("changed_on", None)
|
|
|
|
column.pop("created_on", None)
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
data["result"]["columns"].append(new_column_data)
|
|
|
|
rv = self.client.put(uri, json={"columns": data["result"]["columns"]})
|
|
|
|
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name == "name"
|
|
|
|
assert columns[2].column_name == new_column_data["column_name"]
|
|
|
|
assert columns[2].description == new_column_data["description"]
|
|
|
|
assert columns[2].expression == new_column_data["expression"]
|
|
|
|
assert columns[2].type == new_column_data["type"]
|
|
|
|
assert columns[2].verbose_name == new_column_data["verbose_name"]
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_column(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset columns
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# Get current cols and alter one
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-03-24 13:24:08 -04:00
|
|
|
resp_columns = json.loads(rv.data.decode("utf-8"))["result"]["columns"]
|
2020-07-07 08:26:54 -04:00
|
|
|
for column in resp_columns:
|
|
|
|
column.pop("changed_on", None)
|
|
|
|
column.pop("created_on", None)
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
resp_columns[0]["groupby"] = False
|
|
|
|
resp_columns[0]["filterable"] = False
|
2020-08-27 12:49:18 -04:00
|
|
|
rv = self.client.put(uri, json={"columns": resp_columns})
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
columns = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id)
|
|
|
|
.order_by("column_name")
|
|
|
|
.all()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].column_name == "id"
|
|
|
|
assert columns[1].column_name, "name"
|
2020-08-27 12:49:18 -04:00
|
|
|
# TODO(bkyryliuk): find the reason why update is failing for the presto database
|
|
|
|
if get_example_database().backend != "presto":
|
2020-10-22 03:56:26 -04:00
|
|
|
assert columns[0].groupby is False
|
|
|
|
assert columns[0].filterable is False
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_column_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset columns uniqueness
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {"columns": [{"column_name": "id", "type": "INTEGER"}]}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"columns": ["One or more columns already exist"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_metric_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset metric uniqueness
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {"metrics": [{"metric_name": "count", "expression": "COUNT(*)"}]}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"metrics": ["One or more metrics already exist"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_column_duplicate(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset columns duplicate
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {
|
|
|
|
"columns": [
|
|
|
|
{"column_name": "id", "type": "INTEGER"},
|
|
|
|
{"column_name": "id", "type": "VARCHAR"},
|
|
|
|
]
|
|
|
|
}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"columns": ["One or more columns are duplicated"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_update_metric_duplicate(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset metric duplicate
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
|
|
|
# try to insert a new column ID that already exists
|
|
|
|
data = {
|
|
|
|
"metrics": [
|
|
|
|
{"metric_name": "dup", "expression": "COUNT(*)"},
|
|
|
|
{"metric_name": "dup", "expression": "DIFF_COUNT(*)"},
|
|
|
|
]
|
|
|
|
}
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_result = {
|
|
|
|
"message": {"metrics": ["One or more metrics are duplicated"]}
|
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_result
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item gamma
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
table_data = {"description": "changed_description"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.put(uri, json=table_data)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 401
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_not_owned(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item not owned
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="alpha")
|
|
|
|
table_data = {"description": "changed_description"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, table_data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_owners_invalid(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset item owner invalid
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {"description": "changed_description", "owners": [1000]}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, table_data, "put")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_dataset_item_uniqueness(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset uniqueness
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 15:07:22 -04:00
|
|
|
ab_user = self.insert_dataset(
|
|
|
|
"ab_user", "", [self.get_user("admin").id], get_main_database()
|
|
|
|
)
|
|
|
|
table_data = {"table_name": "ab_user"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, table_data, "put")
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
2020-03-08 05:13:08 -04:00
|
|
|
expected_response = {
|
2020-08-06 15:07:22 -04:00
|
|
|
"message": {"table_name": ["Datasource ab_user already exists"]}
|
2020-03-08 05:13:08 -04:00
|
|
|
}
|
2020-10-22 03:56:26 -04:00
|
|
|
assert data == expected_response
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-08-06 15:07:22 -04:00
|
|
|
db.session.delete(ab_user)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@patch("superset.datasets.dao.DatasetDAO.update")
|
|
|
|
def test_update_dataset_sqlalchemy_error(self, mock_dao_update):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test update dataset sqlalchemy error
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-20 12:32:03 -04:00
|
|
|
mock_dao_update.side_effect = DAOUpdateFailedError()
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table_data = {"description": "changed_description"}
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.put(uri, json=table_data)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset could not be updated."}
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-08 05:13:08 -04:00
|
|
|
def test_delete_dataset_item(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete dataset item
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-08-13 05:18:13 -04:00
|
|
|
view_menu = security_manager.find_view_menu(dataset.get_perm())
|
2020-10-22 03:56:26 -04:00
|
|
|
assert view_menu is not None
|
2020-08-13 05:18:13 -04:00
|
|
|
view_menu_id = view_menu.id
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.delete(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-08-13 05:18:13 -04:00
|
|
|
non_view_menu = db.session.query(security_manager.viewmenu_model).get(
|
|
|
|
view_menu_id
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert non_view_menu is None
|
2020-03-08 05:13:08 -04:00
|
|
|
|
|
|
|
def test_delete_item_dataset_not_owned(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete item not owned
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="alpha")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_delete_dataset_item_not_authorized(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete item not authorized
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="gamma")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-03-08 05:13:08 -04:00
|
|
|
rv = self.client.delete(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 401
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@patch("superset.datasets.dao.DatasetDAO.delete")
|
|
|
|
def test_delete_dataset_sqlalchemy_error(self, mock_dao_delete):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test delete dataset sqlalchemy error
|
2020-03-08 05:13:08 -04:00
|
|
|
"""
|
2020-03-20 12:32:03 -04:00
|
|
|
mock_dao_delete.side_effect = DAODeleteFailedError()
|
2020-03-08 05:13:08 -04:00
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
dataset = self.insert_default_dataset()
|
2020-03-08 05:13:08 -04:00
|
|
|
self.login(username="admin")
|
2020-03-24 13:24:08 -04:00
|
|
|
uri = f"api/v1/dataset/{dataset.id}"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-03-08 05:13:08 -04:00
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 422
|
|
|
|
assert data == {"message": "Dataset could not be deleted."}
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-10-12 08:40:05 -04:00
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_dataset_items(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete dataset items
|
|
|
|
"""
|
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
|
|
|
|
view_menu_names = []
|
|
|
|
for dataset in datasets:
|
|
|
|
view_menu_names.append(dataset.get_perm())
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
expected_response = {"message": f"Deleted {len(datasets)} datasets"}
|
|
|
|
assert data == expected_response
|
|
|
|
datasets = (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter(SqlaTable.table_name.in_(self.fixture_tables_names))
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
assert datasets == []
|
|
|
|
# Assert permissions get cleaned
|
|
|
|
for view_menu_name in view_menu_names:
|
|
|
|
assert security_manager.find_view_menu(view_menu_name) is None
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_item_dataset_not_owned(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item not owned
|
|
|
|
"""
|
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
|
|
|
assert rv.status_code == 403
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_item_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item not found
|
|
|
|
"""
|
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
dataset_ids.append(db.session.query(func.max(SqlaTable.id)).scalar())
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_dataset_item_not_authorized(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item not authorized
|
|
|
|
"""
|
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 401
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets")
|
|
|
|
def test_bulk_delete_dataset_item_incorrect(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test bulk delete item incorrect request
|
|
|
|
"""
|
|
|
|
datasets = self.get_fixture_datasets()
|
|
|
|
dataset_ids = [dataset.id for dataset in datasets]
|
|
|
|
dataset_ids.append("Wrong")
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(dataset_ids)}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
assert rv.status_code == 400
|
|
|
|
|
2020-03-24 13:24:08 -04:00
|
|
|
def test_dataset_item_refresh(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test item refresh
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
# delete a column
|
|
|
|
id_column = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id, column_name="id")
|
|
|
|
.one()
|
|
|
|
)
|
|
|
|
db.session.delete(id_column)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/refresh"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, {}, "refresh")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-24 13:24:08 -04:00
|
|
|
# Assert the column is restored on refresh
|
|
|
|
id_column = (
|
|
|
|
db.session.query(TableColumn)
|
|
|
|
.filter_by(table_id=dataset.id, column_name="id")
|
|
|
|
.one()
|
|
|
|
)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert id_column is not None
|
2020-03-24 13:24:08 -04:00
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_dataset_item_refresh_not_found(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test item refresh not found dataset
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
max_id = db.session.query(func.max(SqlaTable.id)).scalar()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/{max_id + 1}/refresh"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, {}, "refresh")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
def test_dataset_item_refresh_not_owned(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test item refresh not owned dataset
|
2020-03-24 13:24:08 -04:00
|
|
|
"""
|
|
|
|
dataset = self.insert_default_dataset()
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = f"api/v1/dataset/{dataset.id}/refresh"
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.put_assert_metric(uri, {}, "refresh")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 403
|
2020-03-24 13:24:08 -04:00
|
|
|
|
|
|
|
db.session.delete(dataset)
|
2020-03-08 05:13:08 -04:00
|
|
|
db.session.commit()
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
def test_export_dataset(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test export dataset
|
2020-03-27 05:30:23 -04:00
|
|
|
"""
|
|
|
|
birth_names_dataset = self.get_birth_names_dataset()
|
2020-09-10 00:46:28 -04:00
|
|
|
# TODO: fix test for presto
|
|
|
|
# debug with dump: https://github.com/apache/incubator-superset/runs/1092546855
|
2020-10-08 12:17:09 -04:00
|
|
|
if birth_names_dataset.database.backend in {"presto", "hive"}:
|
2020-09-10 00:46:28 -04:00
|
|
|
return
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
argument = [birth_names_dataset.id]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="admin")
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "export")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
cli_export = export_to_dict(
|
2020-08-06 18:33:48 -04:00
|
|
|
session=db.session,
|
|
|
|
recursive=True,
|
|
|
|
back_references=False,
|
|
|
|
include_defaults=False,
|
2020-03-27 05:30:23 -04:00
|
|
|
)
|
|
|
|
cli_export_tables = cli_export["databases"][0]["tables"]
|
2020-09-10 00:46:28 -04:00
|
|
|
expected_response = {}
|
2020-03-27 05:30:23 -04:00
|
|
|
for export_table in cli_export_tables:
|
|
|
|
if export_table["table_name"] == "birth_names":
|
|
|
|
expected_response = export_table
|
|
|
|
break
|
|
|
|
ui_export = yaml.safe_load(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert ui_export[0] == expected_response
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
def test_export_dataset_not_found(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test export dataset not found
|
2020-03-27 05:30:23 -04:00
|
|
|
"""
|
|
|
|
max_id = db.session.query(func.max(SqlaTable.id)).scalar()
|
|
|
|
# Just one does not exist and we get 404
|
|
|
|
argument = [max_id + 1, 1]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
self.login(username="admin")
|
2020-04-24 11:38:29 -04:00
|
|
|
rv = self.get_assert_metric(uri, "export")
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-03-27 05:30:23 -04:00
|
|
|
|
|
|
|
def test_export_dataset_gamma(self):
|
|
|
|
"""
|
2020-04-24 11:38:29 -04:00
|
|
|
Dataset API: Test export dataset has gamma
|
2020-03-27 05:30:23 -04:00
|
|
|
"""
|
|
|
|
birth_names_dataset = self.get_birth_names_dataset()
|
|
|
|
|
|
|
|
argument = [birth_names_dataset.id]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 401
|
2020-07-06 19:25:57 -04:00
|
|
|
|
2020-10-22 13:32:08 -04:00
|
|
|
@patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
{"VERSIONED_EXPORT": True},
|
|
|
|
clear=True,
|
|
|
|
)
|
|
|
|
def test_export_dataset_bundle(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test export dataset
|
|
|
|
"""
|
|
|
|
birth_names_dataset = self.get_birth_names_dataset()
|
|
|
|
# TODO: fix test for presto
|
|
|
|
# debug with dump: https://github.com/apache/incubator-superset/runs/1092546855
|
|
|
|
if birth_names_dataset.database.backend in {"presto", "hive"}:
|
|
|
|
return
|
|
|
|
|
|
|
|
argument = [birth_names_dataset.id]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
buf = BytesIO(rv.data)
|
|
|
|
assert is_zipfile(buf)
|
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
{"VERSIONED_EXPORT": True},
|
|
|
|
clear=True,
|
|
|
|
)
|
|
|
|
def test_export_dataset_bundle_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test export dataset not found
|
|
|
|
"""
|
|
|
|
# Just one does not exist and we get 404
|
|
|
|
argument = [-1, 1]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
{"VERSIONED_EXPORT": True},
|
|
|
|
clear=True,
|
|
|
|
)
|
|
|
|
def test_export_dataset_bundle_gamma(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test export dataset has gamma
|
|
|
|
"""
|
|
|
|
birth_names_dataset = self.get_birth_names_dataset()
|
|
|
|
|
|
|
|
argument = [birth_names_dataset.id]
|
|
|
|
uri = f"api/v1/dataset/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
|
|
|
|
assert rv.status_code == 401
|
|
|
|
|
2020-07-06 19:25:57 -04:00
|
|
|
def test_get_dataset_related_objects(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get chart and dashboard count related to a dataset
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_birth_names_dataset()
|
|
|
|
uri = f"api/v1/dataset/{table.id}/related_objects"
|
|
|
|
rv = self.get_assert_metric(uri, "related_objects")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response["charts"]["count"] == 18
|
|
|
|
assert response["dashboards"]["count"] == 1
|
2020-07-08 05:04:05 -04:00
|
|
|
|
|
|
|
def test_get_dataset_related_objects_not_found(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test related objects not found
|
|
|
|
"""
|
|
|
|
max_id = db.session.query(func.max(SqlaTable.id)).scalar()
|
|
|
|
# id does not exist and we get 404
|
|
|
|
invalid_id = max_id + 1
|
|
|
|
uri = f"api/v1/dataset/{invalid_id}/related_objects/"
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-07-08 05:04:05 -04:00
|
|
|
self.logout()
|
|
|
|
self.login(username="gamma")
|
|
|
|
table = self.get_birth_names_dataset()
|
|
|
|
uri = f"api/v1/dataset/{table.id}/related_objects"
|
|
|
|
rv = self.client.get(uri)
|
2020-10-22 03:56:26 -04:00
|
|
|
assert rv.status_code == 404
|
2020-10-29 16:11:33 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_datasets", "create_virtual_datasets")
|
|
|
|
def test_get_datasets_custom_filter_sql(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test custom dataset_is_null_or_empty filter for sql
|
|
|
|
"""
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "sql", "opr": "dataset_is_null_or_empty", "value": False}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for table_name in self.fixture_virtual_table_names:
|
|
|
|
assert table_name in [ds["table_name"] for ds in data["result"]]
|
|
|
|
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "sql", "opr": "dataset_is_null_or_empty", "value": True}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/dataset/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for table_name in self.fixture_tables_names:
|
|
|
|
assert table_name in [ds["table_name"] for ds in data["result"]]
|
2020-11-17 17:49:33 -05:00
|
|
|
|
2020-11-20 17:40:27 -05:00
|
|
|
def test_imported_dataset(self):
|
2020-11-17 17:49:33 -05:00
|
|
|
"""
|
|
|
|
Dataset API: Test import dataset
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("dataset_export/metadata.yaml", "w") as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_metadata_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/datasets/imported_dataset.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
|
|
|
|
assert len(database.tables) == 1
|
|
|
|
dataset = database.tables[0]
|
|
|
|
assert dataset.table_name == "imported_dataset"
|
|
|
|
assert str(dataset.uuid) == dataset_config["uuid"]
|
|
|
|
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-20 17:40:27 -05:00
|
|
|
def test_imported_dataset_invalid(self):
|
2020-11-17 17:49:33 -05:00
|
|
|
"""
|
|
|
|
Dataset API: Test import invalid dataset
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("dataset_export/metadata.yaml", "w") as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_metadata_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/datasets/imported_dataset.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {
|
|
|
|
"message": {"metadata.yaml": {"type": ["Must be equal to SqlaTable."]}}
|
|
|
|
}
|
|
|
|
|
2020-11-20 17:40:27 -05:00
|
|
|
def test_imported_dataset_invalid_v0_validation(self):
|
2020-11-17 17:49:33 -05:00
|
|
|
"""
|
|
|
|
Dataset API: Test import invalid dataset
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/dataset/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"dataset_export/datasets/imported_dataset.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-17 17:49:33 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "dataset_export.zip"),
|
2020-11-17 17:49:33 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {"message": "Could not process entity"}
|