2020-01-21 13:04:52 -05:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2020-06-09 04:46:28 -04:00
|
|
|
# isort:skip_file
|
2020-01-21 13:04:52 -05:00
|
|
|
"""Unit tests for Superset"""
|
|
|
|
import json
|
2020-04-23 07:30:48 -04:00
|
|
|
from typing import List, Optional
|
2020-07-15 14:09:32 -04:00
|
|
|
from datetime import datetime
|
2020-10-22 15:06:58 -04:00
|
|
|
from io import BytesIO
|
2020-06-09 04:46:28 -04:00
|
|
|
from unittest import mock
|
2020-11-20 17:40:27 -05:00
|
|
|
from zipfile import is_zipfile, ZipFile
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2020-07-15 14:09:32 -04:00
|
|
|
import humanize
|
2020-07-20 11:46:51 -04:00
|
|
|
import prison
|
|
|
|
import pytest
|
2020-11-20 17:40:27 -05:00
|
|
|
import yaml
|
2020-10-01 18:08:40 -04:00
|
|
|
from sqlalchemy import and_
|
2020-03-27 05:31:01 -04:00
|
|
|
from sqlalchemy.sql import func
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2020-10-14 15:41:37 -04:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2020-08-06 15:07:22 -04:00
|
|
|
from superset.utils.core import get_example_database
|
2020-10-07 11:29:37 -04:00
|
|
|
from tests.fixtures.unicode_dashboard import load_unicode_dashboard_with_slice
|
2020-06-09 04:46:28 -04:00
|
|
|
from tests.test_app import app
|
2020-01-21 13:04:52 -05:00
|
|
|
from superset.connectors.connector_registry import ConnectorRegistry
|
2020-04-08 03:44:35 -04:00
|
|
|
from superset.extensions import db, security_manager
|
2020-11-20 17:40:27 -05:00
|
|
|
from superset.models.core import Database, FavStar, FavStarClassName
|
2020-01-21 13:04:52 -05:00
|
|
|
from superset.models.dashboard import Dashboard
|
2020-11-26 03:45:49 -05:00
|
|
|
from superset.models.reports import ReportSchedule, ReportScheduleType
|
2020-01-21 13:04:52 -05:00
|
|
|
from superset.models.slice import Slice
|
2020-06-09 04:46:28 -04:00
|
|
|
from superset.utils import core as utils
|
2020-04-08 03:44:35 -04:00
|
|
|
from tests.base_api_tests import ApiOwnersTestCaseMixin
|
|
|
|
from tests.base_tests import SupersetTestCase
|
2020-11-20 17:40:27 -05:00
|
|
|
from tests.fixtures.importexport import (
|
|
|
|
chart_config,
|
|
|
|
chart_metadata_config,
|
|
|
|
database_config,
|
|
|
|
dataset_config,
|
|
|
|
dataset_metadata_config,
|
|
|
|
)
|
2020-04-23 07:30:48 -04:00
|
|
|
from tests.fixtures.query_context import get_query_context
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2020-06-09 04:46:28 -04:00
|
|
|
CHART_DATA_URI = "api/v1/chart/data"
|
2020-10-01 18:08:40 -04:00
|
|
|
CHARTS_FIXTURE_COUNT = 10
|
2020-06-09 04:46:28 -04:00
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2020-06-29 18:36:06 -04:00
|
|
|
class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
|
2020-01-21 13:04:52 -05:00
|
|
|
resource_name = "chart"
|
|
|
|
|
|
|
|
def insert_chart(
|
|
|
|
self,
|
|
|
|
slice_name: str,
|
|
|
|
owners: List[int],
|
|
|
|
datasource_id: int,
|
2020-09-30 16:53:04 -04:00
|
|
|
created_by=None,
|
2020-01-21 13:04:52 -05:00
|
|
|
datasource_type: str = "table",
|
2020-02-17 01:34:16 -05:00
|
|
|
description: Optional[str] = None,
|
|
|
|
viz_type: Optional[str] = None,
|
|
|
|
params: Optional[str] = None,
|
2020-01-21 13:04:52 -05:00
|
|
|
cache_timeout: Optional[int] = None,
|
|
|
|
) -> Slice:
|
|
|
|
obj_owners = list()
|
|
|
|
for owner in owners:
|
|
|
|
user = db.session.query(security_manager.user_model).get(owner)
|
|
|
|
obj_owners.append(user)
|
2020-08-06 18:33:48 -04:00
|
|
|
datasource = ConnectorRegistry.get_datasource(
|
|
|
|
datasource_type, datasource_id, db.session
|
|
|
|
)
|
2020-01-21 13:04:52 -05:00
|
|
|
slice = Slice(
|
2020-09-30 16:53:04 -04:00
|
|
|
cache_timeout=cache_timeout,
|
|
|
|
created_by=created_by,
|
2020-01-21 13:04:52 -05:00
|
|
|
datasource_id=datasource.id,
|
|
|
|
datasource_name=datasource.name,
|
|
|
|
datasource_type=datasource.type,
|
|
|
|
description=description,
|
2020-09-30 16:53:04 -04:00
|
|
|
owners=obj_owners,
|
2020-01-21 13:04:52 -05:00
|
|
|
params=params,
|
2020-09-30 16:53:04 -04:00
|
|
|
slice_name=slice_name,
|
|
|
|
viz_type=viz_type,
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
db.session.add(slice)
|
|
|
|
db.session.commit()
|
|
|
|
return slice
|
|
|
|
|
2020-10-01 18:08:40 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_charts(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
charts = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
for cx in range(CHARTS_FIXTURE_COUNT - 1):
|
|
|
|
charts.append(self.insert_chart(f"name{cx}", [admin.id], 1))
|
|
|
|
fav_charts = []
|
|
|
|
for cx in range(round(CHARTS_FIXTURE_COUNT / 2)):
|
|
|
|
fav_star = FavStar(
|
|
|
|
user_id=admin.id, class_name="slice", obj_id=charts[cx].id
|
|
|
|
)
|
|
|
|
db.session.add(fav_star)
|
|
|
|
db.session.commit()
|
|
|
|
fav_charts.append(fav_star)
|
|
|
|
yield charts
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for chart in charts:
|
|
|
|
db.session.delete(chart)
|
|
|
|
for fav_chart in fav_charts:
|
|
|
|
db.session.delete(fav_chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-26 03:45:49 -05:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_chart_with_report(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart = self.insert_chart(f"chart_report", [admin.id], 1)
|
|
|
|
report_schedule = ReportSchedule(
|
|
|
|
type=ReportScheduleType.REPORT,
|
|
|
|
name="report_with_chart",
|
|
|
|
crontab="* * * * *",
|
|
|
|
chart=chart,
|
|
|
|
)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
yield chart
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
db.session.delete(report_schedule)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-30 12:48:14 -05:00
|
|
|
@pytest.fixture()
|
|
|
|
def add_dashboard_to_chart(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
|
|
|
|
self.chart = self.insert_chart("My chart", [admin.id], 1)
|
|
|
|
|
|
|
|
self.original_dashboard = Dashboard()
|
|
|
|
self.original_dashboard.dashboard_title = "Original Dashboard"
|
|
|
|
self.original_dashboard.slug = "slug"
|
|
|
|
self.original_dashboard.owners = [admin]
|
|
|
|
self.original_dashboard.slices = [self.chart]
|
|
|
|
self.original_dashboard.published = False
|
|
|
|
db.session.add(self.original_dashboard)
|
|
|
|
|
|
|
|
self.new_dashboard = Dashboard()
|
|
|
|
self.new_dashboard.dashboard_title = "New Dashboard"
|
|
|
|
self.new_dashboard.slug = "new_slug"
|
|
|
|
self.new_dashboard.owners = [admin]
|
|
|
|
self.new_dashboard.slices = []
|
|
|
|
self.new_dashboard.published = False
|
|
|
|
db.session.add(self.new_dashboard)
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
yield self.chart
|
|
|
|
|
|
|
|
db.session.delete(self.original_dashboard)
|
|
|
|
db.session.delete(self.new_dashboard)
|
|
|
|
db.session.delete(self.chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin_id = self.get_user("admin").id
|
|
|
|
chart_id = self.insert_chart("name", [admin_id], 1).id
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_charts(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
2020-09-30 16:53:04 -04:00
|
|
|
admin = self.get_user("admin")
|
2020-03-27 05:31:01 -04:00
|
|
|
chart_count = 4
|
|
|
|
chart_ids = list()
|
|
|
|
for chart_name_index in range(chart_count):
|
|
|
|
chart_ids.append(
|
2020-09-30 16:53:04 -04:00
|
|
|
self.insert_chart(f"title{chart_name_index}", [admin.id], 1, admin).id
|
2020-03-27 05:31:01 -04:00
|
|
|
)
|
|
|
|
self.login(username="admin")
|
|
|
|
argument = chart_ids
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(argument)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": f"Deleted {chart_count} charts"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
for chart_id in chart_ids:
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
|
|
|
def test_delete_bulk_chart_bad_request(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk bad request
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
chart_ids = [1, "a"]
|
|
|
|
self.login(username="admin")
|
|
|
|
argument = chart_ids
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(argument)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_not_found_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test not found delete
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_id = 1000
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2020-11-26 03:45:49 -05:00
|
|
|
@pytest.mark.usefixtures("create_chart_with_report")
|
|
|
|
def test_delete_chart_with_report(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test delete with associated report
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(Slice.slice_name == "chart_report")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
expected_response = {
|
|
|
|
"message": "There are associated alerts or reports: report_with_chart"
|
|
|
|
}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_charts_not_found(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk not found
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
max_id = db.session.query(func.max(Slice.id)).scalar()
|
|
|
|
chart_ids = [max_id + 1, max_id + 2]
|
|
|
|
self.login(username="admin")
|
2020-11-26 03:45:49 -05:00
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(chart_ids)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2020-11-26 03:45:49 -05:00
|
|
|
@pytest.mark.usefixtures("create_chart_with_report", "create_charts")
|
|
|
|
def test_bulk_delete_chart_with_report(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test bulk delete with associated report
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_with_report = (
|
|
|
|
db.session.query(Slice.id)
|
|
|
|
.filter(Slice.slice_name == "chart_report")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
|
|
|
|
charts = db.session.query(Slice.id).filter(Slice.slice_name.like("name%")).all()
|
|
|
|
chart_ids = [chart.id for chart in charts]
|
|
|
|
chart_ids.append(chart_with_report.id)
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(chart_ids)}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
expected_response = {
|
|
|
|
"message": "There are associated alerts or reports: report_with_chart"
|
|
|
|
}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_chart_admin_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test admin delete not owned
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
gamma_id = self.get_user("gamma").id
|
|
|
|
chart_id = self.insert_chart("title", [gamma_id], 1).id
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_chart_admin_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test admin delete bulk not owned
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
gamma_id = self.get_user("gamma").id
|
|
|
|
chart_count = 4
|
|
|
|
chart_ids = list()
|
|
|
|
for chart_name_index in range(chart_count):
|
|
|
|
chart_ids.append(
|
|
|
|
self.insert_chart(f"title{chart_name_index}", [gamma_id], 1).id
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
argument = chart_ids
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(argument)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
expected_response = {"message": f"Deleted {chart_count} charts"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
for chart_id in chart_ids:
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_chart_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete try not owned
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
chart = self.insert_chart("title", [user_alpha1.id], 1)
|
|
|
|
self.login(username="alpha2", password="password")
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_chart_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk try not owned
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
|
|
|
|
chart_count = 4
|
|
|
|
charts = list()
|
|
|
|
for chart_name_index in range(chart_count):
|
|
|
|
charts.append(
|
|
|
|
self.insert_chart(f"title{chart_name_index}", [user_alpha1.id], 1)
|
|
|
|
)
|
|
|
|
|
|
|
|
owned_chart = self.insert_chart("title_owned", [user_alpha2.id], 1)
|
|
|
|
|
|
|
|
self.login(username="alpha2", password="password")
|
|
|
|
|
|
|
|
# verify we can't delete not owned charts
|
|
|
|
arguments = [chart.id for chart in charts]
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": "Forbidden"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
# # nothing is deleted in bulk with a list of owned and not owned charts
|
|
|
|
arguments = [chart.id for chart in charts] + [owned_chart.id]
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": "Forbidden"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
for chart in charts:
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(owned_chart)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_create_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create chart
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin_id = self.get_user("admin").id
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "name1",
|
|
|
|
"description": "description1",
|
|
|
|
"owners": [admin_id],
|
|
|
|
"viz_type": "viz_type1",
|
|
|
|
"params": "1234",
|
|
|
|
"cache_timeout": 1000,
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"dashboards": [1, 2],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 201)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(Slice).get(data.get("id"))
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_simple_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create simple chart
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 201)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(Slice).get(data.get("id"))
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_chart_validate_owners(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create validate owners
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"owners": [1000],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-03-24 06:05:11 -04:00
|
|
|
expected_response = {"message": {"owners": ["Owners are invalid"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
def test_create_chart_validate_params(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create validate params json
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"params": '{"A:"a"}',
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-03-24 06:05:11 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-01-21 13:04:52 -05:00
|
|
|
|
|
|
|
def test_create_chart_validate_datasource(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create validate datasource
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "unknown",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-05-05 09:42:18 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-01-21 13:04:52 -05:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2020-07-07 08:26:54 -04:00
|
|
|
response,
|
|
|
|
{"message": {"datasource_type": ["Must be one of: druid, table, view."]}},
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 0,
|
|
|
|
"datasource_type": "table",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2020-03-24 06:05:11 -04:00
|
|
|
response, {"message": {"datasource_id": ["Datasource does not exist"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_update_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
gamma = self.get_user("gamma")
|
|
|
|
|
2020-09-30 16:53:04 -04:00
|
|
|
chart_id = self.insert_chart("title", [admin.id], 1, admin).id
|
2020-08-10 16:20:19 -04:00
|
|
|
birth_names_table_id = SupersetTestCase.get_table_by_name("birth_names").id
|
2020-01-21 13:04:52 -05:00
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1_changed",
|
|
|
|
"description": "description1",
|
|
|
|
"owners": [gamma.id],
|
|
|
|
"viz_type": "viz_type1",
|
2020-08-10 16:20:19 -04:00
|
|
|
"params": """{"a": 1}""",
|
2020-01-21 13:04:52 -05:00
|
|
|
"cache_timeout": 1000,
|
2020-08-10 16:20:19 -04:00
|
|
|
"datasource_id": birth_names_table_id,
|
2020-01-21 13:04:52 -05:00
|
|
|
"datasource_type": "table",
|
|
|
|
"dashboards": [1],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
related_dashboard = db.session.query(Dashboard).get(1)
|
2020-09-30 16:53:04 -04:00
|
|
|
self.assertEqual(model.created_by, admin)
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(model.slice_name, "title1_changed")
|
|
|
|
self.assertEqual(model.description, "description1")
|
|
|
|
self.assertIn(admin, model.owners)
|
|
|
|
self.assertIn(gamma, model.owners)
|
|
|
|
self.assertEqual(model.viz_type, "viz_type1")
|
2020-08-10 16:20:19 -04:00
|
|
|
self.assertEqual(model.params, """{"a": 1}""")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(model.cache_timeout, 1000)
|
2020-08-10 16:20:19 -04:00
|
|
|
self.assertEqual(model.datasource_id, birth_names_table_id)
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(model.datasource_type, "table")
|
|
|
|
self.assertEqual(model.datasource_name, "birth_names")
|
|
|
|
self.assertIn(related_dashboard, model.dashboards)
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_chart_new_owner(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update set new owner to current user
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
gamma = self.get_user("gamma")
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart_id = self.insert_chart("title", [gamma.id], 1).id
|
|
|
|
chart_data = {"slice_name": "title1_changed"}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertIn(admin, model.owners)
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-30 12:48:14 -05:00
|
|
|
@pytest.mark.usefixtures("add_dashboard_to_chart")
|
|
|
|
def test_update_chart_new_dashboards(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test update set new owner to current user
|
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1_changed",
|
|
|
|
"dashboards": [self.new_dashboard.id],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{self.chart.id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertIn(self.new_dashboard, self.chart.dashboards)
|
|
|
|
self.assertNotIn(self.original_dashboard, self.chart.dashboards)
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("add_dashboard_to_chart")
|
|
|
|
def test_not_update_chart_none_dashboards(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test update set new owner to current user
|
|
|
|
"""
|
|
|
|
chart_data = {"slice_name": "title1_changed_again"}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{self.chart.id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertIn(self.original_dashboard, self.chart.dashboards)
|
|
|
|
self.assertEqual(len(self.chart.dashboards), 1)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_update_chart_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update not owned
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
chart = self.insert_chart("title", [user_alpha1.id], 1)
|
|
|
|
|
|
|
|
self.login(username="alpha2", password="password")
|
|
|
|
chart_data = {"slice_name": "title1_changed"}
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_chart_validate_datasource(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update validate datasource
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart = self.insert_chart("title", [admin.id], 1)
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_data = {"datasource_id": 1, "datasource_type": "unknown"}
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-05-05 09:42:18 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-01-21 13:04:52 -05:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2020-07-07 08:26:54 -04:00
|
|
|
response,
|
|
|
|
{"message": {"datasource_type": ["Must be one of: druid, table, view."]}},
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
chart_data = {"datasource_id": 0, "datasource_type": "table"}
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2020-03-24 06:05:11 -04:00
|
|
|
response, {"message": {"datasource_id": ["Datasource does not exist"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_chart_validate_owners(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update validate owners
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"owners": [1000],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
|
|
|
rv = self.client.post(uri, json=chart_data)
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-03-24 06:05:11 -04:00
|
|
|
expected_response = {"message": {"owners": ["Owners are invalid"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
def test_get_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get chart
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart = self.insert_chart("title", [admin.id], 1)
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
expected_result = {
|
|
|
|
"cache_timeout": None,
|
|
|
|
"dashboards": [],
|
|
|
|
"description": None,
|
2020-04-07 12:09:02 -04:00
|
|
|
"owners": [
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"username": "admin",
|
|
|
|
"first_name": "admin",
|
|
|
|
"last_name": "user",
|
|
|
|
}
|
|
|
|
],
|
2020-01-21 13:04:52 -05:00
|
|
|
"params": None,
|
|
|
|
"slice_name": "title",
|
|
|
|
"viz_type": None,
|
|
|
|
}
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["result"], expected_result)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_get_chart_not_found(self):
|
|
|
|
"""
|
2020-09-28 13:18:34 -04:00
|
|
|
Chart API: Test get chart not found
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_id = 1000
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
|
|
|
def test_get_chart_no_data_access(self):
|
|
|
|
"""
|
2020-09-28 13:18:34 -04:00
|
|
|
Chart API: Test get chart without data access
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
|
|
|
chart_no_access = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter_by(slice_name="Girl Name Cloud")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
uri = f"api/v1/chart/{chart_no_access.id}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2020-10-07 11:29:37 -04:00
|
|
|
@pytest.mark.usefixtures("load_unicode_dashboard_with_slice")
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_charts(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 33)
|
|
|
|
|
2020-07-15 14:09:32 -04:00
|
|
|
def test_get_charts_changed_on(self):
|
|
|
|
"""
|
|
|
|
Dashboard API: Test get charts changed on
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
start_changed_on = datetime.now()
|
|
|
|
chart = self.insert_chart("foo_a", [admin.id], 1, description="ZY_bar")
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
arguments = {
|
|
|
|
"order_column": "changed_on_delta_humanized",
|
|
|
|
"order_direction": "desc",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
data["result"][0]["changed_on_delta_humanized"],
|
|
|
|
humanize.naturaltime(datetime.now() - start_changed_on),
|
|
|
|
)
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_charts_filter(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts filter
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {"filters": [{"col": "slice_name", "opr": "sw", "value": "G"}]}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 5)
|
|
|
|
|
2020-10-14 15:41:37 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def load_charts(self):
|
|
|
|
with app.app_context():
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
energy_table = (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter_by(table_name="energy_usage")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
energy_table_id = 1
|
|
|
|
if energy_table:
|
|
|
|
energy_table_id = energy_table.id
|
|
|
|
chart1 = self.insert_chart(
|
|
|
|
"foo_a", [admin.id], energy_table_id, description="ZY_bar"
|
|
|
|
)
|
|
|
|
chart2 = self.insert_chart(
|
|
|
|
"zy_foo", [admin.id], energy_table_id, description="desc1"
|
|
|
|
)
|
|
|
|
chart3 = self.insert_chart(
|
|
|
|
"foo_b", [admin.id], energy_table_id, description="desc1zy_"
|
|
|
|
)
|
|
|
|
chart4 = self.insert_chart(
|
|
|
|
"foo_c", [admin.id], energy_table_id, viz_type="viz_zy_"
|
|
|
|
)
|
|
|
|
chart5 = self.insert_chart(
|
|
|
|
"bar", [admin.id], energy_table_id, description="foo"
|
|
|
|
)
|
|
|
|
|
|
|
|
yield
|
|
|
|
# rollback changes
|
|
|
|
db.session.delete(chart1)
|
|
|
|
db.session.delete(chart2)
|
|
|
|
db.session.delete(chart3)
|
|
|
|
db.session.delete(chart4)
|
|
|
|
db.session.delete(chart5)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_charts")
|
2020-04-11 02:25:54 -04:00
|
|
|
def test_get_charts_custom_filter(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts custom filter
|
2020-04-11 02:25:54 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
arguments = {
|
2020-09-28 13:18:34 -04:00
|
|
|
"filters": [{"col": "slice_name", "opr": "chart_all_text", "value": "zy_"}],
|
2020-04-11 02:25:54 -04:00
|
|
|
"order_column": "slice_name",
|
|
|
|
"order_direction": "asc",
|
2020-04-29 03:36:11 -04:00
|
|
|
"keys": ["none"],
|
2020-09-28 13:18:34 -04:00
|
|
|
"columns": ["slice_name", "description", "viz_type"],
|
2020-04-11 02:25:54 -04:00
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-04-11 02:25:54 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-09-28 13:18:34 -04:00
|
|
|
self.assertEqual(data["count"], 4)
|
2020-04-11 02:25:54 -04:00
|
|
|
|
|
|
|
expected_response = [
|
2020-09-28 13:18:34 -04:00
|
|
|
{"description": "ZY_bar", "slice_name": "foo_a", "viz_type": None},
|
|
|
|
{"description": "desc1zy_", "slice_name": "foo_b", "viz_type": None},
|
|
|
|
{"description": None, "slice_name": "foo_c", "viz_type": "viz_zy_"},
|
|
|
|
{"description": "desc1", "slice_name": "zy_foo", "viz_type": None},
|
2020-04-11 02:25:54 -04:00
|
|
|
]
|
|
|
|
for index, item in enumerate(data["result"]):
|
|
|
|
self.assertEqual(
|
|
|
|
item["description"], expected_response[index]["description"]
|
|
|
|
)
|
|
|
|
self.assertEqual(item["slice_name"], expected_response[index]["slice_name"])
|
2020-09-28 13:18:34 -04:00
|
|
|
self.assertEqual(item["viz_type"], expected_response[index]["viz_type"])
|
|
|
|
|
2020-10-14 15:41:37 -04:00
|
|
|
@pytest.mark.usefixtures("load_charts")
|
|
|
|
def test_admin_gets_filtered_energy_slices(self):
|
2020-09-28 13:18:34 -04:00
|
|
|
# test filtering on datasource_name
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "slice_name", "opr": "chart_all_text", "value": "energy",}
|
|
|
|
],
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
2020-10-14 15:41:37 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
|
2020-09-28 13:18:34 -04:00
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 8)
|
2020-04-11 02:25:54 -04:00
|
|
|
|
2020-10-14 15:41:37 -04:00
|
|
|
@pytest.mark.usefixtures("load_charts")
|
|
|
|
def test_user_gets_none_filtered_energy_slices(self):
|
|
|
|
# test filtering on datasource_name
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "slice_name", "opr": "chart_all_text", "value": "energy",}
|
|
|
|
],
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
|
2020-04-11 02:25:54 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-04-11 02:25:54 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 0)
|
|
|
|
|
2020-10-01 18:08:40 -04:00
|
|
|
@pytest.mark.usefixtures("create_charts")
|
|
|
|
def test_get_charts_favorite_filter(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test get charts favorite filter
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
users_favorite_query = db.session.query(FavStar.obj_id).filter(
|
|
|
|
and_(FavStar.user_id == admin.id, FavStar.class_name == "slice")
|
|
|
|
)
|
|
|
|
expected_models = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(and_(Slice.id.in_(users_favorite_query)))
|
|
|
|
.order_by(Slice.slice_name.asc())
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
|
|
|
arguments = {
|
|
|
|
"filters": [{"col": "id", "opr": "chart_is_fav", "value": True}],
|
|
|
|
"order_column": "slice_name",
|
|
|
|
"order_direction": "asc",
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert len(expected_models) == data["count"]
|
|
|
|
|
|
|
|
for i, expected_model in enumerate(expected_models):
|
|
|
|
assert expected_model.slice_name == data["result"][i]["slice_name"]
|
|
|
|
|
|
|
|
# Test not favorite charts
|
|
|
|
expected_models = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(and_(~Slice.id.in_(users_favorite_query)))
|
|
|
|
.order_by(Slice.slice_name.asc())
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
arguments["filters"][0]["value"] = False
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert len(expected_models) == data["count"]
|
|
|
|
|
2020-11-03 00:26:14 -05:00
|
|
|
@pytest.mark.usefixtures("create_charts")
|
|
|
|
def test_get_current_user_favorite_status(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get current user favorite stars
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
users_favorite_ids = [
|
|
|
|
star.obj_id
|
|
|
|
for star in db.session.query(FavStar.obj_id)
|
|
|
|
.filter(
|
|
|
|
and_(
|
|
|
|
FavStar.user_id == admin.id,
|
|
|
|
FavStar.class_name == FavStarClassName.CHART,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.all()
|
|
|
|
]
|
|
|
|
|
|
|
|
assert users_favorite_ids
|
|
|
|
arguments = [s.id for s in db.session.query(Slice.id).all()]
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/favorite_status/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
for res in data["result"]:
|
|
|
|
if res["id"] in users_favorite_ids:
|
|
|
|
assert res["value"]
|
|
|
|
|
2020-10-07 11:29:37 -04:00
|
|
|
@pytest.mark.usefixtures("load_unicode_dashboard_with_slice")
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_charts_page(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts filter
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
# Assuming we have 33 sample charts
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {"page_size": 10, "page": 0}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(len(data["result"]), 10)
|
|
|
|
|
|
|
|
arguments = {"page_size": 10, "page": 3}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(len(data["result"]), 3)
|
|
|
|
|
|
|
|
def test_get_charts_no_data_access(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts no data access
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 0)
|
2020-04-09 04:05:49 -04:00
|
|
|
|
2020-06-09 04:46:28 -04:00
|
|
|
def test_chart_data_simple(self):
|
2020-04-09 04:05:49 -04:00
|
|
|
"""
|
2020-06-09 04:46:28 -04:00
|
|
|
Chart data API: Test chart data query
|
2020-04-09 04:05:49 -04:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
2020-04-23 07:30:48 -04:00
|
|
|
table = self.get_table_by_name("birth_names")
|
2020-06-09 04:46:28 -04:00
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
2020-04-09 04:05:49 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-08-03 12:08:49 -04:00
|
|
|
self.assertEqual(data["result"][0]["rowcount"], 45)
|
2020-04-17 09:44:16 -04:00
|
|
|
|
2020-10-28 18:46:24 -04:00
|
|
|
def test_chart_data_applied_time_extras(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data query with applied time extras
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["queries"][0]["applied_time_extras"] = {
|
|
|
|
"__time_range": "100 years ago : now",
|
|
|
|
"__time_origin": "now",
|
|
|
|
}
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
data["result"][0]["applied_filters"],
|
|
|
|
[{"column": "gender"}, {"column": "__time_range"},],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
data["result"][0]["rejected_filters"],
|
|
|
|
[{"column": "__time_origin", "reason": "not_druid_datasource"},],
|
|
|
|
)
|
|
|
|
self.assertEqual(data["result"][0]["rowcount"], 45)
|
|
|
|
|
2020-06-09 04:46:28 -04:00
|
|
|
def test_chart_data_limit_offset(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data query with limit and offset
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["queries"][0]["row_limit"] = 5
|
|
|
|
request_payload["queries"][0]["row_offset"] = 0
|
|
|
|
request_payload["queries"][0]["orderby"] = [["name", True]]
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
self.assertEqual(result["rowcount"], 5)
|
|
|
|
|
2020-08-06 15:07:22 -04:00
|
|
|
# TODO: fix offset for presto DB
|
|
|
|
if get_example_database().backend == "presto":
|
|
|
|
return
|
|
|
|
|
2020-06-09 04:46:28 -04:00
|
|
|
# ensure that offset works properly
|
|
|
|
offset = 2
|
|
|
|
expected_name = result["data"][offset]["name"]
|
|
|
|
request_payload["queries"][0]["row_offset"] = offset
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
self.assertEqual(result["rowcount"], 5)
|
|
|
|
self.assertEqual(result["data"][0]["name"], expected_name)
|
|
|
|
|
|
|
|
@mock.patch(
|
|
|
|
"superset.common.query_object.config", {**app.config, "ROW_LIMIT": 7},
|
|
|
|
)
|
|
|
|
def test_chart_data_default_row_limit(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Ensure row count doesn't exceed default limit
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
del request_payload["queries"][0]["row_limit"]
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
self.assertEqual(result["rowcount"], 7)
|
|
|
|
|
|
|
|
@mock.patch(
|
|
|
|
"superset.common.query_context.config", {**app.config, "SAMPLES_ROW_LIMIT": 5},
|
|
|
|
)
|
|
|
|
def test_chart_data_default_sample_limit(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Ensure sample response row count doesn't exceed default limit
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["result_type"] = utils.ChartDataResultType.SAMPLES
|
|
|
|
request_payload["queries"][0]["row_limit"] = 10
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
self.assertEqual(result["rowcount"], 5)
|
|
|
|
|
2020-07-14 05:40:00 -04:00
|
|
|
def test_chart_data_incorrect_result_type(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data with unsupported result type
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["result_type"] = "qwerty"
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
|
|
|
|
def test_chart_data_incorrect_result_format(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data with unsupported result format
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["result_format"] = "qwerty"
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
|
2020-07-14 09:37:19 -04:00
|
|
|
def test_chart_data_query_result_type(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data with query result format
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
2020-08-28 14:26:07 -04:00
|
|
|
request_payload["result_type"] = utils.ChartDataResultType.QUERY
|
2020-07-14 09:37:19 -04:00
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
|
|
|
|
def test_chart_data_csv_result_format(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data with CSV result format
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["result_format"] = "csv"
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
|
2020-07-13 10:21:02 -04:00
|
|
|
def test_chart_data_mixed_case_filter_op(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Ensure mixed case filter operator generates valid result
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["queries"][0]["filters"][0]["op"] = "In"
|
|
|
|
request_payload["queries"][0]["row_limit"] = 10
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
self.assertEqual(result["rowcount"], 10)
|
|
|
|
|
2020-07-20 11:46:51 -04:00
|
|
|
def test_chart_data_prophet(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Ensure prophet post transformation works
|
|
|
|
"""
|
|
|
|
pytest.importorskip("fbprophet")
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
time_grain = "P1Y"
|
|
|
|
request_payload["queries"][0]["is_timeseries"] = True
|
|
|
|
request_payload["queries"][0]["groupby"] = []
|
|
|
|
request_payload["queries"][0]["extras"] = {"time_grain_sqla": time_grain}
|
|
|
|
request_payload["queries"][0]["granularity"] = "ds"
|
|
|
|
request_payload["queries"][0]["post_processing"] = [
|
|
|
|
{
|
|
|
|
"operation": "prophet",
|
|
|
|
"options": {
|
|
|
|
"time_grain": time_grain,
|
|
|
|
"periods": 3,
|
|
|
|
"confidence_interval": 0.9,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
row = result["data"][0]
|
|
|
|
self.assertIn("__timestamp", row)
|
|
|
|
self.assertIn("sum__num", row)
|
|
|
|
self.assertIn("sum__num__yhat", row)
|
|
|
|
self.assertIn("sum__num__yhat_upper", row)
|
|
|
|
self.assertIn("sum__num__yhat_lower", row)
|
|
|
|
self.assertEqual(result["rowcount"], 47)
|
|
|
|
|
2020-09-30 04:34:23 -04:00
|
|
|
def test_chart_data_query_missing_filter(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Ensure filter referencing missing column is ignored
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["queries"][0]["filters"] = [
|
|
|
|
{"col": "non_existent_filter", "op": "==", "val": "foo"},
|
|
|
|
]
|
|
|
|
request_payload["result_type"] = utils.ChartDataResultType.QUERY
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert "non_existent_filter" not in response_payload["result"][0]["query"]
|
|
|
|
|
2020-07-14 05:40:00 -04:00
|
|
|
def test_chart_data_no_data(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data with empty result
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["queries"][0]["filters"] = [
|
|
|
|
{"col": "gender", "op": "==", "val": "foo"}
|
|
|
|
]
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]
|
|
|
|
self.assertEqual(result["rowcount"], 0)
|
|
|
|
self.assertEqual(result["data"], [])
|
|
|
|
|
|
|
|
def test_chart_data_incorrect_request(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Test chart data with invalid SQL
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["queries"][0]["filters"] = []
|
|
|
|
# erroneus WHERE-clause
|
|
|
|
request_payload["queries"][0]["extras"]["where"] = "(gender abc def)"
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
|
2020-04-23 07:30:48 -04:00
|
|
|
def test_chart_data_with_invalid_datasource(self):
|
2020-09-30 16:53:04 -04:00
|
|
|
"""
|
|
|
|
Chart data API: Test chart data query with invalid schema
|
|
|
|
"""
|
2020-04-23 07:30:48 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
payload["datasource"] = "abc"
|
2020-06-09 04:46:28 -04:00
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, payload, "data")
|
2020-04-23 07:30:48 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
|
|
|
|
def test_chart_data_with_invalid_enum_value(self):
|
2020-09-30 16:53:04 -04:00
|
|
|
"""
|
|
|
|
Chart data API: Test chart data query with invalid enum value
|
|
|
|
"""
|
2020-04-17 09:44:16 -04:00
|
|
|
self.login(username="admin")
|
2020-04-23 07:30:48 -04:00
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
payload["queries"][0]["extras"]["time_range_endpoints"] = [
|
|
|
|
"abc",
|
|
|
|
"EXCLUSIVE",
|
|
|
|
]
|
2020-06-09 04:46:28 -04:00
|
|
|
rv = self.client.post(CHART_DATA_URI, json=payload)
|
2020-04-17 09:44:16 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-04-09 04:05:49 -04:00
|
|
|
|
|
|
|
def test_query_exec_not_allowed(self):
|
|
|
|
"""
|
2020-06-09 04:46:28 -04:00
|
|
|
Chart data API: Test chart data query not allowed
|
2020-04-09 04:05:49 -04:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
2020-04-23 07:30:48 -04:00
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
payload = get_query_context(table.name, table.id, table.type)
|
2020-06-09 04:46:28 -04:00
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, payload, "data")
|
2020-04-09 04:05:49 -04:00
|
|
|
self.assertEqual(rv.status_code, 401)
|
2020-08-28 14:26:07 -04:00
|
|
|
|
|
|
|
def test_chart_data_jinja_filter_request(self):
|
|
|
|
"""
|
|
|
|
Chart data API: Ensure request referencing filters via jinja renders a correct query
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
table = self.get_table_by_name("birth_names")
|
|
|
|
request_payload = get_query_context(table.name, table.id, table.type)
|
|
|
|
request_payload["result_type"] = utils.ChartDataResultType.QUERY
|
|
|
|
request_payload["queries"][0]["filters"] = [
|
|
|
|
{"col": "gender", "op": "==", "val": "boy"}
|
|
|
|
]
|
|
|
|
request_payload["queries"][0]["extras"][
|
|
|
|
"where"
|
|
|
|
] = "('boy' = '{{ filter_values('gender', 'xyz' )[0] }}')"
|
|
|
|
rv = self.post_assert_metric(CHART_DATA_URI, request_payload, "data")
|
|
|
|
response_payload = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = response_payload["result"][0]["query"]
|
|
|
|
if get_example_database().backend != "presto":
|
|
|
|
assert "('boy' = 'boy')" in result
|
2020-10-22 15:06:58 -04:00
|
|
|
|
|
|
|
def test_export_chart(self):
|
|
|
|
"""
|
2020-11-20 17:40:27 -05:00
|
|
|
Chart API: Test export chart
|
2020-10-22 15:06:58 -04:00
|
|
|
"""
|
|
|
|
example_chart = db.session.query(Slice).all()[0]
|
|
|
|
argument = [example_chart.id]
|
|
|
|
uri = f"api/v1/chart/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
buf = BytesIO(rv.data)
|
|
|
|
assert is_zipfile(buf)
|
|
|
|
|
|
|
|
def test_export_chart_not_found(self):
|
|
|
|
"""
|
2020-11-20 17:40:27 -05:00
|
|
|
Chart API: Test export chart not found
|
2020-10-22 15:06:58 -04:00
|
|
|
"""
|
|
|
|
# Just one does not exist and we get 404
|
|
|
|
argument = [-1, 1]
|
|
|
|
uri = f"api/v1/chart/export/?q={prison.dumps(argument)}"
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
def test_export_chart_gamma(self):
|
|
|
|
"""
|
2020-11-20 17:40:27 -05:00
|
|
|
Chart API: Test export chart has gamma
|
2020-10-22 15:06:58 -04:00
|
|
|
"""
|
|
|
|
example_chart = db.session.query(Slice).all()[0]
|
|
|
|
argument = [example_chart.id]
|
|
|
|
uri = f"api/v1/chart/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
|
|
|
|
assert rv.status_code == 404
|
2020-11-20 17:40:27 -05:00
|
|
|
|
|
|
|
def test_import_chart(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test import chart
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/chart/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/metadata.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(chart_metadata_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"chart_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(chart_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "chart_export.zip"),
|
2020-11-20 17:40:27 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
|
|
|
|
assert len(database.tables) == 1
|
|
|
|
dataset = database.tables[0]
|
|
|
|
assert dataset.table_name == "imported_dataset"
|
|
|
|
assert str(dataset.uuid) == dataset_config["uuid"]
|
|
|
|
|
|
|
|
chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one()
|
|
|
|
assert chart.table == dataset
|
|
|
|
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(dataset)
|
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_import_chart_invalid(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test import invalid chart
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/chart/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/metadata.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_metadata_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"chart_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(chart_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "chart_export.zip"),
|
2020-11-20 17:40:27 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {
|
|
|
|
"message": {"metadata.yaml": {"type": ["Must be equal to Slice."]}}
|
|
|
|
}
|