2020-01-21 13:04:52 -05:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
"""Unit tests for Superset"""
|
|
|
|
import json
|
2020-10-22 15:06:58 -04:00
|
|
|
from io import BytesIO
|
2023-07-19 14:12:36 -04:00
|
|
|
from unittest import mock
|
2020-11-20 17:40:27 -05:00
|
|
|
from zipfile import is_zipfile, ZipFile
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2020-07-20 11:46:51 -04:00
|
|
|
import prison
|
|
|
|
import pytest
|
2020-11-20 17:40:27 -05:00
|
|
|
import yaml
|
2023-07-19 14:12:36 -04:00
|
|
|
from flask_babel import lazy_gettext as _
|
|
|
|
from parameterized import parameterized
|
2021-09-16 05:09:08 -04:00
|
|
|
from sqlalchemy import and_
|
2020-03-27 05:31:01 -04:00
|
|
|
from sqlalchemy.sql import func
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2023-11-22 14:55:54 -05:00
|
|
|
from superset.commands.chart.data.get_data_command import ChartDataCommand
|
|
|
|
from superset.commands.chart.exceptions import ChartDataQueryFailedError
|
2021-11-14 16:35:23 -05:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2023-04-03 13:29:02 -04:00
|
|
|
from superset.extensions import cache_manager, db, security_manager
|
2020-11-20 17:40:27 -05:00
|
|
|
from superset.models.core import Database, FavStar, FavStarClassName
|
2020-01-21 13:04:52 -05:00
|
|
|
from superset.models.dashboard import Dashboard
|
|
|
|
from superset.models.slice import Slice
|
2023-07-19 14:12:36 -04:00
|
|
|
from superset.reports.models import ReportSchedule, ReportScheduleType
|
2021-11-14 16:35:23 -05:00
|
|
|
from superset.utils.core import get_example_default_schema
|
2023-06-20 07:08:29 -04:00
|
|
|
from superset.utils.database import get_example_database
|
2023-07-19 14:12:36 -04:00
|
|
|
from superset.viz import viz_types
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin
|
2021-11-14 16:35:23 -05:00
|
|
|
from tests.integration_tests.base_tests import SupersetTestCase
|
2023-07-19 14:12:36 -04:00
|
|
|
from tests.integration_tests.conftest import with_feature_flags
|
2021-11-14 16:35:23 -05:00
|
|
|
from tests.integration_tests.fixtures.birth_names_dashboard import (
|
|
|
|
load_birth_names_dashboard_with_slices,
|
2021-12-16 19:11:47 -05:00
|
|
|
load_birth_names_data,
|
2021-11-14 16:35:23 -05:00
|
|
|
)
|
|
|
|
from tests.integration_tests.fixtures.energy_dashboard import (
|
2021-12-16 19:11:47 -05:00
|
|
|
load_energy_table_data,
|
2023-07-19 14:12:36 -04:00
|
|
|
load_energy_table_with_slice,
|
2021-07-01 11:03:07 -04:00
|
|
|
)
|
|
|
|
from tests.integration_tests.fixtures.importexport import (
|
2020-11-20 17:40:27 -05:00
|
|
|
chart_config,
|
|
|
|
chart_metadata_config,
|
|
|
|
database_config,
|
|
|
|
dataset_config,
|
|
|
|
dataset_metadata_config,
|
|
|
|
)
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.fixtures.unicode_dashboard import (
|
|
|
|
load_unicode_dashboard_with_slice,
|
2021-12-16 19:11:47 -05:00
|
|
|
load_unicode_data,
|
2021-07-01 11:03:07 -04:00
|
|
|
)
|
2021-11-14 16:35:23 -05:00
|
|
|
from tests.integration_tests.fixtures.world_bank_dashboard import (
|
|
|
|
load_world_bank_dashboard_with_slices,
|
2021-12-16 19:11:47 -05:00
|
|
|
load_world_bank_data,
|
2021-11-14 16:35:23 -05:00
|
|
|
)
|
2021-11-15 05:50:08 -05:00
|
|
|
from tests.integration_tests.insert_chart_mixin import InsertChartMixin
|
2021-11-14 16:35:23 -05:00
|
|
|
from tests.integration_tests.test_app import app
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.utils.get_dashboards import get_dashboards_ids
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2020-06-09 04:46:28 -04:00
|
|
|
CHART_DATA_URI = "api/v1/chart/data"
|
2020-10-01 18:08:40 -04:00
|
|
|
CHARTS_FIXTURE_COUNT = 10
|
2020-06-09 04:46:28 -04:00
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2021-02-15 14:41:59 -05:00
|
|
|
class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixin):
|
2020-01-21 13:04:52 -05:00
|
|
|
resource_name = "chart"
|
|
|
|
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def clear_data_cache(self):
|
|
|
|
with app.app_context():
|
|
|
|
cache_manager.data_cache.clear()
|
|
|
|
yield
|
|
|
|
|
2020-10-01 18:08:40 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_charts(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
charts = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
for cx in range(CHARTS_FIXTURE_COUNT - 1):
|
2022-03-02 00:44:36 -05:00
|
|
|
charts.append(self.insert_chart(f"name{cx}", [admin.id], 1))
|
2020-10-01 18:08:40 -04:00
|
|
|
fav_charts = []
|
|
|
|
for cx in range(round(CHARTS_FIXTURE_COUNT / 2)):
|
|
|
|
fav_star = FavStar(
|
|
|
|
user_id=admin.id, class_name="slice", obj_id=charts[cx].id
|
|
|
|
)
|
|
|
|
db.session.add(fav_star)
|
|
|
|
db.session.commit()
|
|
|
|
fav_charts.append(fav_star)
|
|
|
|
yield charts
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for chart in charts:
|
|
|
|
db.session.delete(chart)
|
|
|
|
for fav_chart in fav_charts:
|
|
|
|
db.session.delete(fav_chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2022-10-04 04:13:11 -04:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_charts_created_by_gamma(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
charts = []
|
|
|
|
user = self.get_user("gamma")
|
|
|
|
for cx in range(CHARTS_FIXTURE_COUNT - 1):
|
|
|
|
charts.append(self.insert_chart(f"gamma{cx}", [user.id], 1))
|
|
|
|
yield charts
|
|
|
|
# rollback changes
|
|
|
|
for chart in charts:
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-11-24 06:42:52 -05:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_certified_charts(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
certified_charts = []
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
for cx in range(CHARTS_FIXTURE_COUNT):
|
|
|
|
certified_charts.append(
|
|
|
|
self.insert_chart(
|
|
|
|
f"certified{cx}",
|
|
|
|
[admin.id],
|
|
|
|
1,
|
|
|
|
certified_by="John Doe",
|
|
|
|
certification_details="Sample certification",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
yield certified_charts
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
for chart in certified_charts:
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-26 03:45:49 -05:00
|
|
|
@pytest.fixture()
|
|
|
|
def create_chart_with_report(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart = self.insert_chart(f"chart_report", [admin.id], 1)
|
|
|
|
report_schedule = ReportSchedule(
|
|
|
|
type=ReportScheduleType.REPORT,
|
|
|
|
name="report_with_chart",
|
|
|
|
crontab="* * * * *",
|
|
|
|
chart=chart,
|
|
|
|
)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
yield chart
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
db.session.delete(report_schedule)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-30 12:48:14 -05:00
|
|
|
@pytest.fixture()
|
|
|
|
def add_dashboard_to_chart(self):
|
|
|
|
with self.create_app().app_context():
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
|
|
|
|
self.chart = self.insert_chart("My chart", [admin.id], 1)
|
|
|
|
|
|
|
|
self.original_dashboard = Dashboard()
|
|
|
|
self.original_dashboard.dashboard_title = "Original Dashboard"
|
|
|
|
self.original_dashboard.slug = "slug"
|
|
|
|
self.original_dashboard.owners = [admin]
|
|
|
|
self.original_dashboard.slices = [self.chart]
|
|
|
|
self.original_dashboard.published = False
|
|
|
|
db.session.add(self.original_dashboard)
|
|
|
|
|
|
|
|
self.new_dashboard = Dashboard()
|
|
|
|
self.new_dashboard.dashboard_title = "New Dashboard"
|
|
|
|
self.new_dashboard.slug = "new_slug"
|
|
|
|
self.new_dashboard.owners = [admin]
|
|
|
|
self.new_dashboard.published = False
|
|
|
|
db.session.add(self.new_dashboard)
|
|
|
|
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
yield self.chart
|
|
|
|
|
|
|
|
db.session.delete(self.original_dashboard)
|
|
|
|
db.session.delete(self.new_dashboard)
|
|
|
|
db.session.delete(self.chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-12-15 06:27:06 -05:00
|
|
|
def test_info_security_chart(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test info security
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
params = {"keys": ["permissions"]}
|
|
|
|
uri = f"api/v1/chart/_info?q={prison.dumps(params)}"
|
|
|
|
rv = self.get_assert_metric(uri, "info")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
2023-06-20 07:08:29 -04:00
|
|
|
assert set(data["permissions"]) == {
|
|
|
|
"can_read",
|
|
|
|
"can_write",
|
|
|
|
"can_export",
|
|
|
|
"can_warm_up_cache",
|
|
|
|
}
|
2020-12-15 06:27:06 -05:00
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
def create_chart_import(self):
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
|
|
|
with bundle.open("chart_export/metadata.yaml", "w") as fp:
|
|
|
|
fp.write(yaml.safe_dump(chart_metadata_config).encode())
|
|
|
|
with bundle.open(
|
|
|
|
"chart_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
|
|
|
with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp:
|
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
|
|
|
with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp:
|
|
|
|
fp.write(yaml.safe_dump(chart_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
return buf
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin_id = self.get_user("admin").id
|
|
|
|
chart_id = self.insert_chart("name", [admin_id], 1).id
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_charts(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
2020-09-30 16:53:04 -04:00
|
|
|
admin = self.get_user("admin")
|
2020-03-27 05:31:01 -04:00
|
|
|
chart_count = 4
|
|
|
|
chart_ids = list()
|
|
|
|
for chart_name_index in range(chart_count):
|
|
|
|
chart_ids.append(
|
2020-09-30 16:53:04 -04:00
|
|
|
self.insert_chart(f"title{chart_name_index}", [admin.id], 1, admin).id
|
2020-03-27 05:31:01 -04:00
|
|
|
)
|
|
|
|
self.login(username="admin")
|
|
|
|
argument = chart_ids
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(argument)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": f"Deleted {chart_count} charts"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
for chart_id in chart_ids:
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
|
|
|
def test_delete_bulk_chart_bad_request(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk bad request
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
chart_ids = [1, "a"]
|
|
|
|
self.login(username="admin")
|
|
|
|
argument = chart_ids
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(argument)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_not_found_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test not found delete
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_id = 1000
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2020-11-26 03:45:49 -05:00
|
|
|
@pytest.mark.usefixtures("create_chart_with_report")
|
|
|
|
def test_delete_chart_with_report(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test delete with associated report
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(Slice.slice_name == "chart_report")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
expected_response = {
|
|
|
|
"message": "There are associated alerts or reports: report_with_chart"
|
|
|
|
}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_charts_not_found(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk not found
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
max_id = db.session.query(func.max(Slice.id)).scalar()
|
|
|
|
chart_ids = [max_id + 1, max_id + 2]
|
|
|
|
self.login(username="admin")
|
2020-11-26 03:45:49 -05:00
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(chart_ids)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2020-11-26 03:45:49 -05:00
|
|
|
@pytest.mark.usefixtures("create_chart_with_report", "create_charts")
|
|
|
|
def test_bulk_delete_chart_with_report(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test bulk delete with associated report
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_with_report = (
|
|
|
|
db.session.query(Slice.id)
|
|
|
|
.filter(Slice.slice_name == "chart_report")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
|
|
|
|
charts = db.session.query(Slice.id).filter(Slice.slice_name.like("name%")).all()
|
|
|
|
chart_ids = [chart.id for chart in charts]
|
|
|
|
chart_ids.append(chart_with_report.id)
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(chart_ids)}"
|
|
|
|
rv = self.client.delete(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
expected_response = {
|
|
|
|
"message": "There are associated alerts or reports: report_with_chart"
|
|
|
|
}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_chart_admin_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test admin delete not owned
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
gamma_id = self.get_user("gamma").id
|
|
|
|
chart_id = self.insert_chart("title", [gamma_id], 1).id
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_chart_admin_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test admin delete bulk not owned
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
gamma_id = self.get_user("gamma").id
|
|
|
|
chart_count = 4
|
|
|
|
chart_ids = list()
|
|
|
|
for chart_name_index in range(chart_count):
|
|
|
|
chart_ids.append(
|
|
|
|
self.insert_chart(f"title{chart_name_index}", [gamma_id], 1).id
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
argument = chart_ids
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(argument)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
expected_response = {"message": f"Deleted {chart_count} charts"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
for chart_id in chart_ids:
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
self.assertEqual(model, None)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_delete_chart_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete try not owned
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
chart = self.insert_chart("title", [user_alpha1.id], 1)
|
|
|
|
self.login(username="alpha2", password="password")
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "delete")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-03-27 05:31:01 -04:00
|
|
|
def test_delete_bulk_chart_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test delete bulk try not owned
|
2020-03-27 05:31:01 -04:00
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
|
|
|
|
chart_count = 4
|
|
|
|
charts = list()
|
|
|
|
for chart_name_index in range(chart_count):
|
|
|
|
charts.append(
|
|
|
|
self.insert_chart(f"title{chart_name_index}", [user_alpha1.id], 1)
|
|
|
|
)
|
|
|
|
|
|
|
|
owned_chart = self.insert_chart("title_owned", [user_alpha2.id], 1)
|
|
|
|
|
|
|
|
self.login(username="alpha2", password="password")
|
|
|
|
|
|
|
|
# verify we can't delete not owned charts
|
|
|
|
arguments = [chart.id for chart in charts]
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": "Forbidden"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
# # nothing is deleted in bulk with a list of owned and not owned charts
|
|
|
|
arguments = [chart.id for chart in charts] + [owned_chart.id]
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.delete_assert_metric(uri, "bulk_delete")
|
2020-03-27 05:31:01 -04:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": "Forbidden"}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
for chart in charts:
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(owned_chart)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-01-13 17:20:05 -05:00
|
|
|
@pytest.mark.usefixtures(
|
|
|
|
"load_world_bank_dashboard_with_slices",
|
|
|
|
"load_birth_names_dashboard_with_slices",
|
|
|
|
)
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_create_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create chart
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
2021-01-11 08:57:55 -05:00
|
|
|
dashboards_ids = get_dashboards_ids(db, ["world_health", "births"])
|
2020-01-21 13:04:52 -05:00
|
|
|
admin_id = self.get_user("admin").id
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "name1",
|
|
|
|
"description": "description1",
|
|
|
|
"owners": [admin_id],
|
|
|
|
"viz_type": "viz_type1",
|
|
|
|
"params": "1234",
|
|
|
|
"cache_timeout": 1000,
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
2021-01-11 08:57:55 -05:00
|
|
|
"dashboards": dashboards_ids,
|
2021-11-24 06:42:52 -05:00
|
|
|
"certified_by": "John Doe",
|
|
|
|
"certification_details": "Sample certification",
|
2020-01-21 13:04:52 -05:00
|
|
|
}
|
|
|
|
self.login(username="admin")
|
2023-07-11 11:58:29 -04:00
|
|
|
uri = "api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 201)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(Slice).get(data.get("id"))
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_simple_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create simple chart
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
2023-07-11 11:58:29 -04:00
|
|
|
uri = "api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 201)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
model = db.session.query(Slice).get(data.get("id"))
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_create_chart_validate_owners(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create validate owners
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"owners": [1000],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
2023-07-11 11:58:29 -04:00
|
|
|
uri = "api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-03-24 06:05:11 -04:00
|
|
|
expected_response = {"message": {"owners": ["Owners are invalid"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
def test_create_chart_validate_params(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create validate params json
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"params": '{"A:"a"}',
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
2023-07-11 11:58:29 -04:00
|
|
|
uri = "api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
2020-03-24 06:05:11 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-01-21 13:04:52 -05:00
|
|
|
|
|
|
|
def test_create_chart_validate_datasource(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test create validate datasource
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "unknown",
|
|
|
|
}
|
2021-01-25 18:09:03 -05:00
|
|
|
rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post")
|
2020-05-05 09:42:18 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-01-21 13:04:52 -05:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2020-07-07 08:26:54 -04:00
|
|
|
response,
|
2022-06-02 19:48:16 -04:00
|
|
|
{
|
|
|
|
"message": {
|
|
|
|
"datasource_type": [
|
|
|
|
"Must be one of: sl_table, table, dataset, query, saved_query, view."
|
|
|
|
]
|
|
|
|
}
|
|
|
|
},
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 0,
|
|
|
|
"datasource_type": "table",
|
|
|
|
}
|
2021-01-25 18:09:03 -05:00
|
|
|
rv = self.post_assert_metric("/api/v1/chart/", chart_data, "post")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2022-06-02 19:48:16 -04:00
|
|
|
response, {"message": {"datasource_id": ["Datasource does not exist"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
|
|
|
|
2023-07-11 11:58:29 -04:00
|
|
|
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
|
|
|
|
def test_create_chart_validate_user_is_dashboard_owner(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test create validate user is dashboard owner
|
|
|
|
"""
|
|
|
|
dash = db.session.query(Dashboard).filter_by(slug="world_health").first()
|
|
|
|
# Must be published so that alpha user has read access to dash
|
|
|
|
dash.published = True
|
|
|
|
db.session.commit()
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"dashboards": [dash.id],
|
|
|
|
}
|
|
|
|
self.login(username="alpha")
|
|
|
|
uri = "api/v1/chart/"
|
|
|
|
rv = self.post_assert_metric(uri, chart_data, "post")
|
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
response,
|
|
|
|
{"message": "Changing one or more of these dashboards is forbidden"},
|
|
|
|
)
|
|
|
|
|
2021-01-13 17:20:05 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_update_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
2021-11-04 14:09:08 -04:00
|
|
|
schema = get_example_default_schema()
|
|
|
|
full_table_name = f"{schema}.birth_names" if schema else "birth_names"
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
admin = self.get_user("admin")
|
|
|
|
gamma = self.get_user("gamma")
|
2021-08-02 15:45:55 -04:00
|
|
|
birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id
|
2021-01-13 17:20:05 -05:00
|
|
|
chart_id = self.insert_chart(
|
|
|
|
"title", [admin.id], birth_names_table_id, admin
|
|
|
|
).id
|
|
|
|
dash_id = db.session.query(Dashboard.id).filter_by(slug="births").first()[0]
|
2020-01-21 13:04:52 -05:00
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1_changed",
|
|
|
|
"description": "description1",
|
|
|
|
"owners": [gamma.id],
|
|
|
|
"viz_type": "viz_type1",
|
2020-08-10 16:20:19 -04:00
|
|
|
"params": """{"a": 1}""",
|
2020-01-21 13:04:52 -05:00
|
|
|
"cache_timeout": 1000,
|
2020-08-10 16:20:19 -04:00
|
|
|
"datasource_id": birth_names_table_id,
|
2020-01-21 13:04:52 -05:00
|
|
|
"datasource_type": "table",
|
2021-01-13 17:20:05 -05:00
|
|
|
"dashboards": [dash_id],
|
2021-11-24 06:42:52 -05:00
|
|
|
"certified_by": "Mario Rossi",
|
|
|
|
"certification_details": "Edited certification",
|
2020-01-21 13:04:52 -05:00
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
2021-01-13 17:20:05 -05:00
|
|
|
related_dashboard = db.session.query(Dashboard).filter_by(slug="births").first()
|
2020-09-30 16:53:04 -04:00
|
|
|
self.assertEqual(model.created_by, admin)
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(model.slice_name, "title1_changed")
|
|
|
|
self.assertEqual(model.description, "description1")
|
2021-08-13 05:42:48 -04:00
|
|
|
self.assertNotIn(admin, model.owners)
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertIn(gamma, model.owners)
|
|
|
|
self.assertEqual(model.viz_type, "viz_type1")
|
2020-08-10 16:20:19 -04:00
|
|
|
self.assertEqual(model.params, """{"a": 1}""")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(model.cache_timeout, 1000)
|
2020-08-10 16:20:19 -04:00
|
|
|
self.assertEqual(model.datasource_id, birth_names_table_id)
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(model.datasource_type, "table")
|
2021-11-04 14:09:08 -04:00
|
|
|
self.assertEqual(model.datasource_name, full_table_name)
|
2021-11-24 06:42:52 -05:00
|
|
|
self.assertEqual(model.certified_by, "Mario Rossi")
|
|
|
|
self.assertEqual(model.certification_details, "Edited certification")
|
2021-01-13 17:20:05 -05:00
|
|
|
self.assertIn(model.id, [slice.id for slice in related_dashboard.slices])
|
2020-01-21 13:04:52 -05:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
2023-05-17 13:07:47 -04:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_chart_get_list_no_username(self):
|
|
|
|
"""
|
|
|
|
Chart API: Tests that no username is returned
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id
|
|
|
|
chart_id = self.insert_chart("title", [admin.id], birth_names_table_id).id
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": (new_name := "title1_changed"),
|
|
|
|
"owners": [admin.id],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
|
|
|
|
response = self.get_assert_metric("api/v1/chart/", "get_list")
|
|
|
|
res = json.loads(response.data.decode("utf-8"))["result"]
|
|
|
|
|
|
|
|
current_chart = [d for d in res if d["id"] == chart_id][0]
|
|
|
|
self.assertEqual(current_chart["slice_name"], new_name)
|
|
|
|
self.assertNotIn("username", current_chart["changed_by"].keys())
|
|
|
|
self.assertNotIn("username", current_chart["owners"][0].keys())
|
|
|
|
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_chart_get_no_username(self):
|
|
|
|
"""
|
|
|
|
Chart API: Tests that no username is returned
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id
|
|
|
|
chart_id = self.insert_chart("title", [admin.id], birth_names_table_id).id
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": (new_name := "title1_changed"),
|
|
|
|
"owners": [admin.id],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
|
|
|
|
|
|
|
response = self.get_assert_metric(uri, "get")
|
|
|
|
res = json.loads(response.data.decode("utf-8"))["result"]
|
|
|
|
|
|
|
|
self.assertEqual(res["slice_name"], new_name)
|
|
|
|
self.assertNotIn("username", res["owners"][0].keys())
|
|
|
|
|
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-08-13 05:42:48 -04:00
|
|
|
def test_update_chart_new_owner_not_admin(self):
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
2021-08-13 05:42:48 -04:00
|
|
|
Chart API: Test update set new owner implicitly adds logged in owner
|
|
|
|
"""
|
2023-03-01 06:32:04 -05:00
|
|
|
gamma = self.get_user("gamma_no_csv")
|
2021-08-13 05:42:48 -04:00
|
|
|
alpha = self.get_user("alpha")
|
2023-03-01 06:32:04 -05:00
|
|
|
chart_id = self.insert_chart("title", [gamma.id], 1).id
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": (new_name := "title1_changed"),
|
|
|
|
"owners": [alpha.id],
|
|
|
|
}
|
|
|
|
self.login(username=gamma.username)
|
2021-08-13 05:42:48 -04:00
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2023-03-01 06:32:04 -05:00
|
|
|
assert rv.status_code == 200
|
2021-08-13 05:42:48 -04:00
|
|
|
model = db.session.query(Slice).get(chart_id)
|
2023-03-01 06:32:04 -05:00
|
|
|
assert model.slice_name == new_name
|
|
|
|
assert alpha in model.owners
|
|
|
|
assert gamma in model.owners
|
2021-08-13 05:42:48 -04:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_chart_new_owner_admin(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test update set new owner as admin to other than current user
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
gamma = self.get_user("gamma")
|
|
|
|
admin = self.get_user("admin")
|
2021-08-13 05:42:48 -04:00
|
|
|
chart_id = self.insert_chart("title", [admin.id], 1).id
|
|
|
|
chart_data = {"slice_name": "title1_changed", "owners": [gamma.id]}
|
2020-01-21 13:04:52 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
model = db.session.query(Slice).get(chart_id)
|
2021-08-13 05:42:48 -04:00
|
|
|
self.assertNotIn(admin, model.owners)
|
|
|
|
self.assertIn(gamma, model.owners)
|
2020-01-21 13:04:52 -05:00
|
|
|
db.session.delete(model)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-30 12:48:14 -05:00
|
|
|
@pytest.mark.usefixtures("add_dashboard_to_chart")
|
|
|
|
def test_update_chart_new_dashboards(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test update set new owner to current user
|
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1_changed",
|
|
|
|
"dashboards": [self.new_dashboard.id],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{self.chart.id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertIn(self.new_dashboard, self.chart.dashboards)
|
|
|
|
self.assertNotIn(self.original_dashboard, self.chart.dashboards)
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("add_dashboard_to_chart")
|
|
|
|
def test_not_update_chart_none_dashboards(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test update set new owner to current user
|
|
|
|
"""
|
|
|
|
chart_data = {"slice_name": "title1_changed_again"}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{self.chart.id}"
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertIn(self.original_dashboard, self.chart.dashboards)
|
|
|
|
self.assertEqual(len(self.chart.dashboards), 1)
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_update_chart_not_owned(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update not owned
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
chart = self.insert_chart("title", [user_alpha1.id], 1)
|
|
|
|
|
|
|
|
self.login(username="alpha2", password="password")
|
|
|
|
chart_data = {"slice_name": "title1_changed"}
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 403)
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
2023-02-02 13:07:17 -05:00
|
|
|
def test_update_chart_linked_with_not_owned_dashboard(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test update chart which is linked to not owned dashboard
|
|
|
|
"""
|
|
|
|
user_alpha1 = self.create_user(
|
|
|
|
"alpha1", "password", "Alpha", email="alpha1@superset.org"
|
|
|
|
)
|
|
|
|
user_alpha2 = self.create_user(
|
|
|
|
"alpha2", "password", "Alpha", email="alpha2@superset.org"
|
|
|
|
)
|
|
|
|
chart = self.insert_chart("title", [user_alpha1.id], 1)
|
|
|
|
|
|
|
|
original_dashboard = Dashboard()
|
|
|
|
original_dashboard.dashboard_title = "Original Dashboard"
|
|
|
|
original_dashboard.slug = "slug"
|
|
|
|
original_dashboard.owners = [user_alpha1]
|
|
|
|
original_dashboard.slices = [chart]
|
|
|
|
original_dashboard.published = False
|
|
|
|
db.session.add(original_dashboard)
|
|
|
|
|
|
|
|
new_dashboard = Dashboard()
|
|
|
|
new_dashboard.dashboard_title = "Cloned Dashboard"
|
|
|
|
new_dashboard.slug = "new_slug"
|
|
|
|
new_dashboard.owners = [user_alpha2]
|
|
|
|
new_dashboard.slices = [chart]
|
|
|
|
new_dashboard.published = False
|
|
|
|
db.session.add(new_dashboard)
|
|
|
|
|
|
|
|
self.login(username="alpha1", password="password")
|
|
|
|
chart_data_with_invalid_dashboard = {
|
|
|
|
"slice_name": "title1_changed",
|
|
|
|
"dashboards": [original_dashboard.id, 0],
|
|
|
|
}
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1_changed",
|
|
|
|
"dashboards": [original_dashboard.id, new_dashboard.id],
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
|
|
|
|
|
|
|
rv = self.put_assert_metric(uri, chart_data_with_invalid_dashboard, "put")
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
expected_response = {"message": {"dashboards": ["Dashboards do not exist"]}}
|
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
|
|
|
rv = self.put_assert_metric(uri, chart_data, "put")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.delete(original_dashboard)
|
|
|
|
db.session.delete(new_dashboard)
|
|
|
|
db.session.delete(user_alpha1)
|
|
|
|
db.session.delete(user_alpha2)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_update_chart_validate_datasource(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update validate datasource
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
2021-01-25 18:09:03 -05:00
|
|
|
chart = self.insert_chart("title", owners=[admin.id], datasource_id=1)
|
2020-01-21 13:04:52 -05:00
|
|
|
self.login(username="admin")
|
2021-01-25 18:09:03 -05:00
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
chart_data = {"datasource_id": 1, "datasource_type": "unknown"}
|
2021-01-25 18:09:03 -05:00
|
|
|
rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put")
|
2020-05-05 09:42:18 -04:00
|
|
|
self.assertEqual(rv.status_code, 400)
|
2020-01-21 13:04:52 -05:00
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2020-07-07 08:26:54 -04:00
|
|
|
response,
|
2022-06-02 19:48:16 -04:00
|
|
|
{
|
|
|
|
"message": {
|
|
|
|
"datasource_type": [
|
|
|
|
"Must be one of: sl_table, table, dataset, query, saved_query, view."
|
|
|
|
]
|
|
|
|
}
|
|
|
|
},
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
2021-01-25 18:09:03 -05:00
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
chart_data = {"datasource_id": 0, "datasource_type": "table"}
|
2021-01-25 18:09:03 -05:00
|
|
|
rv = self.put_assert_metric(f"/api/v1/chart/{chart.id}", chart_data, "put")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
2022-06-02 19:48:16 -04:00
|
|
|
response, {"message": {"datasource_id": ["Datasource does not exist"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
)
|
2021-01-25 18:09:03 -05:00
|
|
|
|
2020-01-21 13:04:52 -05:00
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_update_chart_validate_owners(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test update validate owners
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_data = {
|
|
|
|
"slice_name": "title1",
|
|
|
|
"datasource_id": 1,
|
|
|
|
"datasource_type": "table",
|
|
|
|
"owners": [1000],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
|
|
|
rv = self.client.post(uri, json=chart_data)
|
|
|
|
self.assertEqual(rv.status_code, 422)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
2020-03-24 06:05:11 -04:00
|
|
|
expected_response = {"message": {"owners": ["Owners are invalid"]}}
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(response, expected_response)
|
|
|
|
|
2021-01-13 17:20:05 -05:00
|
|
|
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_chart(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get chart
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart = self.insert_chart("title", [admin.id], 1)
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart.id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
expected_result = {
|
|
|
|
"cache_timeout": None,
|
2021-11-24 06:42:52 -05:00
|
|
|
"certified_by": None,
|
|
|
|
"certification_details": None,
|
2020-01-21 13:04:52 -05:00
|
|
|
"dashboards": [],
|
|
|
|
"description": None,
|
2020-04-07 12:09:02 -04:00
|
|
|
"owners": [
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"first_name": "admin",
|
|
|
|
"last_name": "user",
|
|
|
|
}
|
|
|
|
],
|
2020-01-21 13:04:52 -05:00
|
|
|
"params": None,
|
|
|
|
"slice_name": "title",
|
2023-04-04 10:32:24 -04:00
|
|
|
"tags": [],
|
2020-01-21 13:04:52 -05:00
|
|
|
"viz_type": None,
|
2021-07-26 15:43:25 -04:00
|
|
|
"query_context": None,
|
2022-03-28 19:32:57 -04:00
|
|
|
"is_managed_externally": False,
|
2020-01-21 13:04:52 -05:00
|
|
|
}
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2022-08-15 15:16:40 -04:00
|
|
|
self.assertIn("changed_on_delta_humanized", data["result"])
|
|
|
|
self.assertIn("id", data["result"])
|
|
|
|
self.assertIn("thumbnail_url", data["result"])
|
|
|
|
self.assertIn("url", data["result"])
|
|
|
|
for key, value in data["result"].items():
|
|
|
|
# We can't assert timestamp values or id/urls
|
|
|
|
if key not in (
|
|
|
|
"changed_on_delta_humanized",
|
|
|
|
"id",
|
|
|
|
"thumbnail_url",
|
|
|
|
"url",
|
|
|
|
):
|
|
|
|
self.assertEqual(value, expected_result[key])
|
2020-01-21 13:04:52 -05:00
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_get_chart_not_found(self):
|
|
|
|
"""
|
2020-09-28 13:18:34 -04:00
|
|
|
Chart API: Test get chart not found
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
chart_id = 1000
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart_id}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_chart_no_data_access(self):
|
|
|
|
"""
|
2020-09-28 13:18:34 -04:00
|
|
|
Chart API: Test get chart without data access
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
|
|
|
chart_no_access = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter_by(slice_name="Girl Name Cloud")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
uri = f"api/v1/chart/{chart_no_access.id}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures(
|
|
|
|
"load_energy_table_with_slice",
|
|
|
|
"load_birth_names_dashboard_with_slices",
|
|
|
|
"load_unicode_dashboard_with_slice",
|
2021-01-13 17:20:05 -05:00
|
|
|
"load_world_bank_dashboard_with_slices",
|
2021-01-11 08:57:55 -05:00
|
|
|
)
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_charts(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2021-11-05 11:05:48 -04:00
|
|
|
self.assertEqual(data["count"], 34)
|
2020-01-21 13:04:52 -05:00
|
|
|
|
2022-09-23 12:21:19 -04:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice", "add_dashboard_to_chart")
|
|
|
|
def test_get_charts_dashboards(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test get charts with related dashboards
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "slice_name", "opr": "eq", "value": self.chart.slice_name}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert data["result"][0]["dashboards"] == [
|
|
|
|
{
|
|
|
|
"id": self.original_dashboard.id,
|
|
|
|
"dashboard_title": self.original_dashboard.dashboard_title,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice", "add_dashboard_to_chart")
|
|
|
|
def test_get_charts_dashboard_filter(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test get charts with dashboard filter
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"col": "dashboards",
|
|
|
|
"opr": "rel_m_m",
|
|
|
|
"value": self.original_dashboard.id,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
result = data["result"]
|
|
|
|
assert len(result) == 1
|
|
|
|
assert result[0]["slice_name"] == self.chart.slice_name
|
|
|
|
|
2020-07-15 14:09:32 -04:00
|
|
|
def test_get_charts_changed_on(self):
|
|
|
|
"""
|
|
|
|
Dashboard API: Test get charts changed on
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
chart = self.insert_chart("foo_a", [admin.id], 1, description="ZY_bar")
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
arguments = {
|
|
|
|
"order_column": "changed_on_delta_humanized",
|
|
|
|
"order_direction": "desc",
|
|
|
|
}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2022-04-01 04:47:56 -04:00
|
|
|
assert data["result"][0]["changed_on_delta_humanized"] in (
|
|
|
|
"now",
|
|
|
|
"a second ago",
|
2020-07-15 14:09:32 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
# rollback changes
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-01-13 17:20:05 -05:00
|
|
|
@pytest.mark.usefixtures(
|
|
|
|
"load_world_bank_dashboard_with_slices",
|
|
|
|
"load_birth_names_dashboard_with_slices",
|
|
|
|
)
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_charts_filter(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts filter
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {"filters": [{"col": "slice_name", "opr": "sw", "value": "G"}]}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 5)
|
|
|
|
|
2020-10-14 15:41:37 -04:00
|
|
|
@pytest.fixture()
|
2020-12-09 15:02:29 -05:00
|
|
|
def load_energy_charts(self):
|
2020-10-14 15:41:37 -04:00
|
|
|
with app.app_context():
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
energy_table = (
|
|
|
|
db.session.query(SqlaTable)
|
|
|
|
.filter_by(table_name="energy_usage")
|
|
|
|
.one_or_none()
|
|
|
|
)
|
|
|
|
energy_table_id = 1
|
|
|
|
if energy_table:
|
|
|
|
energy_table_id = energy_table.id
|
|
|
|
chart1 = self.insert_chart(
|
|
|
|
"foo_a", [admin.id], energy_table_id, description="ZY_bar"
|
|
|
|
)
|
|
|
|
chart2 = self.insert_chart(
|
|
|
|
"zy_foo", [admin.id], energy_table_id, description="desc1"
|
|
|
|
)
|
|
|
|
chart3 = self.insert_chart(
|
|
|
|
"foo_b", [admin.id], energy_table_id, description="desc1zy_"
|
|
|
|
)
|
|
|
|
chart4 = self.insert_chart(
|
|
|
|
"foo_c", [admin.id], energy_table_id, viz_type="viz_zy_"
|
|
|
|
)
|
|
|
|
chart5 = self.insert_chart(
|
|
|
|
"bar", [admin.id], energy_table_id, description="foo"
|
|
|
|
)
|
|
|
|
|
|
|
|
yield
|
|
|
|
# rollback changes
|
|
|
|
db.session.delete(chart1)
|
|
|
|
db.session.delete(chart2)
|
|
|
|
db.session.delete(chart3)
|
|
|
|
db.session.delete(chart4)
|
|
|
|
db.session.delete(chart5)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_charts")
|
2020-04-11 02:25:54 -04:00
|
|
|
def test_get_charts_custom_filter(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts custom filter
|
2020-04-11 02:25:54 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
arguments = {
|
2020-09-28 13:18:34 -04:00
|
|
|
"filters": [{"col": "slice_name", "opr": "chart_all_text", "value": "zy_"}],
|
2020-04-11 02:25:54 -04:00
|
|
|
"order_column": "slice_name",
|
|
|
|
"order_direction": "asc",
|
2020-04-29 03:36:11 -04:00
|
|
|
"keys": ["none"],
|
2020-09-28 13:18:34 -04:00
|
|
|
"columns": ["slice_name", "description", "viz_type"],
|
2020-04-11 02:25:54 -04:00
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-04-11 02:25:54 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2020-09-28 13:18:34 -04:00
|
|
|
self.assertEqual(data["count"], 4)
|
2020-04-11 02:25:54 -04:00
|
|
|
|
|
|
|
expected_response = [
|
2020-09-28 13:18:34 -04:00
|
|
|
{"description": "ZY_bar", "slice_name": "foo_a", "viz_type": None},
|
|
|
|
{"description": "desc1zy_", "slice_name": "foo_b", "viz_type": None},
|
|
|
|
{"description": None, "slice_name": "foo_c", "viz_type": "viz_zy_"},
|
|
|
|
{"description": "desc1", "slice_name": "zy_foo", "viz_type": None},
|
2020-04-11 02:25:54 -04:00
|
|
|
]
|
|
|
|
for index, item in enumerate(data["result"]):
|
|
|
|
self.assertEqual(
|
|
|
|
item["description"], expected_response[index]["description"]
|
|
|
|
)
|
|
|
|
self.assertEqual(item["slice_name"], expected_response[index]["slice_name"])
|
2020-09-28 13:18:34 -04:00
|
|
|
self.assertEqual(item["viz_type"], expected_response[index]["viz_type"])
|
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice", "load_energy_charts")
|
2020-10-14 15:41:37 -04:00
|
|
|
def test_admin_gets_filtered_energy_slices(self):
|
2020-09-28 13:18:34 -04:00
|
|
|
# test filtering on datasource_name
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
2022-03-29 13:03:09 -04:00
|
|
|
{
|
|
|
|
"col": "slice_name",
|
|
|
|
"opr": "chart_all_text",
|
|
|
|
"value": "energy",
|
|
|
|
}
|
2020-09-28 13:18:34 -04:00
|
|
|
],
|
|
|
|
"keys": ["none"],
|
2022-07-14 21:10:31 -04:00
|
|
|
"columns": ["slice_name", "description", "table.table_name"],
|
2020-09-28 13:18:34 -04:00
|
|
|
}
|
2020-10-14 15:41:37 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
|
2020-09-28 13:18:34 -04:00
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2022-07-14 21:10:31 -04:00
|
|
|
data = rv.json
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert data["count"] > 0
|
|
|
|
for chart in data["result"]:
|
|
|
|
print(chart)
|
|
|
|
assert (
|
|
|
|
"energy"
|
|
|
|
in " ".join(
|
|
|
|
[
|
|
|
|
chart["slice_name"] or "",
|
|
|
|
chart["description"] or "",
|
|
|
|
chart["table"]["table_name"] or "",
|
|
|
|
]
|
|
|
|
).lower()
|
|
|
|
)
|
2020-04-11 02:25:54 -04:00
|
|
|
|
2021-11-24 06:42:52 -05:00
|
|
|
@pytest.mark.usefixtures("create_certified_charts")
|
|
|
|
def test_gets_certified_charts_filter(self):
|
|
|
|
arguments = {
|
2022-03-29 13:03:09 -04:00
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"col": "id",
|
|
|
|
"opr": "chart_is_certified",
|
|
|
|
"value": True,
|
|
|
|
}
|
|
|
|
],
|
2021-11-24 06:42:52 -05:00
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], CHARTS_FIXTURE_COUNT)
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_charts")
|
|
|
|
def test_gets_not_certified_charts_filter(self):
|
|
|
|
arguments = {
|
2022-03-29 13:03:09 -04:00
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"col": "id",
|
|
|
|
"opr": "chart_is_certified",
|
|
|
|
"value": False,
|
|
|
|
}
|
|
|
|
],
|
2021-11-24 06:42:52 -05:00
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 17)
|
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_charts")
|
2020-10-14 15:41:37 -04:00
|
|
|
def test_user_gets_none_filtered_energy_slices(self):
|
|
|
|
# test filtering on datasource_name
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
2022-03-29 13:03:09 -04:00
|
|
|
{
|
|
|
|
"col": "slice_name",
|
|
|
|
"opr": "chart_all_text",
|
|
|
|
"value": "energy",
|
|
|
|
}
|
2020-10-14 15:41:37 -04:00
|
|
|
],
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
|
2020-04-11 02:25:54 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-04-11 02:25:54 -04:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 0)
|
|
|
|
|
2020-10-01 18:08:40 -04:00
|
|
|
@pytest.mark.usefixtures("create_charts")
|
|
|
|
def test_get_charts_favorite_filter(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test get charts favorite filter
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
users_favorite_query = db.session.query(FavStar.obj_id).filter(
|
|
|
|
and_(FavStar.user_id == admin.id, FavStar.class_name == "slice")
|
|
|
|
)
|
|
|
|
expected_models = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(and_(Slice.id.in_(users_favorite_query)))
|
|
|
|
.order_by(Slice.slice_name.asc())
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
|
|
|
|
arguments = {
|
2020-12-22 13:03:26 -05:00
|
|
|
"filters": [{"col": "id", "opr": "chart_is_favorite", "value": True}],
|
2020-10-01 18:08:40 -04:00
|
|
|
"order_column": "slice_name",
|
|
|
|
"order_direction": "asc",
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert len(expected_models) == data["count"]
|
|
|
|
|
|
|
|
for i, expected_model in enumerate(expected_models):
|
|
|
|
assert expected_model.slice_name == data["result"][i]["slice_name"]
|
|
|
|
|
|
|
|
# Test not favorite charts
|
|
|
|
expected_models = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(and_(~Slice.id.in_(users_favorite_query)))
|
|
|
|
.order_by(Slice.slice_name.asc())
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
arguments["filters"][0]["value"] = False
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert len(expected_models) == data["count"]
|
|
|
|
|
2022-10-04 04:13:11 -04:00
|
|
|
@pytest.mark.usefixtures("create_charts_created_by_gamma")
|
|
|
|
def test_get_charts_created_by_me_filter(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test get charts with created by me special filter
|
|
|
|
"""
|
|
|
|
gamma_user = self.get_user("gamma")
|
|
|
|
expected_models = (
|
|
|
|
db.session.query(Slice).filter(Slice.created_by_fk == gamma_user.id).all()
|
|
|
|
)
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{"col": "created_by", "opr": "chart_created_by_me", "value": "me"}
|
|
|
|
],
|
|
|
|
"order_column": "slice_name",
|
|
|
|
"order_direction": "asc",
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert len(expected_models) == data["count"]
|
|
|
|
for i, expected_model in enumerate(expected_models):
|
|
|
|
assert expected_model.slice_name == data["result"][i]["slice_name"]
|
|
|
|
|
2020-11-03 00:26:14 -05:00
|
|
|
@pytest.mark.usefixtures("create_charts")
|
|
|
|
def test_get_current_user_favorite_status(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test get current user favorite stars
|
|
|
|
"""
|
|
|
|
admin = self.get_user("admin")
|
|
|
|
users_favorite_ids = [
|
|
|
|
star.obj_id
|
|
|
|
for star in db.session.query(FavStar.obj_id)
|
|
|
|
.filter(
|
|
|
|
and_(
|
|
|
|
FavStar.user_id == admin.id,
|
|
|
|
FavStar.class_name == FavStarClassName.CHART,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.all()
|
|
|
|
]
|
|
|
|
|
|
|
|
assert users_favorite_ids
|
|
|
|
arguments = [s.id for s in db.session.query(Slice.id).all()]
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/favorite_status/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
assert rv.status_code == 200
|
|
|
|
for res in data["result"]:
|
|
|
|
if res["id"] in users_favorite_ids:
|
|
|
|
assert res["value"]
|
|
|
|
|
2023-03-29 16:42:23 -04:00
|
|
|
def test_add_favorite(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test add chart to favorites
|
|
|
|
"""
|
|
|
|
chart = Slice(
|
|
|
|
id=100,
|
|
|
|
datasource_id=1,
|
|
|
|
datasource_type="table",
|
|
|
|
datasource_name="tmp_perm_table",
|
|
|
|
slice_name="slice_name",
|
|
|
|
)
|
|
|
|
db.session.add(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/favorite_status/?q={prison.dumps([chart.id])}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for res in data["result"]:
|
|
|
|
assert res["value"] is False
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/{chart.id}/favorites/"
|
|
|
|
self.client.post(uri)
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/favorite_status/?q={prison.dumps([chart.id])}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for res in data["result"]:
|
|
|
|
assert res["value"] is True
|
|
|
|
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
def test_remove_favorite(self):
|
|
|
|
"""
|
|
|
|
Dataset API: Test remove chart from favorites
|
|
|
|
"""
|
|
|
|
chart = Slice(
|
|
|
|
id=100,
|
|
|
|
datasource_id=1,
|
|
|
|
datasource_type="table",
|
|
|
|
datasource_name="tmp_perm_table",
|
|
|
|
slice_name="slice_name",
|
|
|
|
)
|
|
|
|
db.session.add(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"api/v1/chart/{chart.id}/favorites/"
|
|
|
|
self.client.post(uri)
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/favorite_status/?q={prison.dumps([chart.id])}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for res in data["result"]:
|
|
|
|
assert res["value"] is True
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/{chart.id}/favorites/"
|
|
|
|
self.client.delete(uri)
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/favorite_status/?q={prison.dumps([chart.id])}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
for res in data["result"]:
|
|
|
|
assert res["value"] is False
|
|
|
|
|
|
|
|
db.session.delete(chart)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-12-17 21:27:21 -05:00
|
|
|
def test_get_time_range(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test get actually time range from human readable string
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
humanize_time_range = "100 years ago : now"
|
|
|
|
uri = f"api/v1/time_range/?q={prison.dumps(humanize_time_range)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertEqual(len(data["result"]), 3)
|
|
|
|
|
2023-07-21 19:31:41 -04:00
|
|
|
def test_query_form_data(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test query form data
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
slice = db.session.query(Slice).first()
|
|
|
|
uri = f"api/v1/form_data/?slice_id={slice.id if slice else None}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertEqual(rv.content_type, "application/json")
|
|
|
|
if slice:
|
|
|
|
self.assertEqual(data["slice_id"], slice.id)
|
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures(
|
2021-01-11 08:57:55 -05:00
|
|
|
"load_unicode_dashboard_with_slice",
|
|
|
|
"load_energy_table_with_slice",
|
2021-01-13 17:20:05 -05:00
|
|
|
"load_world_bank_dashboard_with_slices",
|
2021-01-11 08:57:55 -05:00
|
|
|
"load_birth_names_dashboard_with_slices",
|
2020-12-09 15:02:29 -05:00
|
|
|
)
|
2020-01-21 13:04:52 -05:00
|
|
|
def test_get_charts_page(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts filter
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
2021-11-05 11:05:48 -04:00
|
|
|
# Assuming we have 34 sample charts
|
2020-01-21 13:04:52 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {"page_size": 10, "page": 0}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(len(data["result"]), 10)
|
|
|
|
|
|
|
|
arguments = {"page_size": 10, "page": 3}
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
2021-11-05 11:05:48 -04:00
|
|
|
self.assertEqual(len(data["result"]), 4)
|
2020-01-21 13:04:52 -05:00
|
|
|
|
|
|
|
def test_get_charts_no_data_access(self):
|
|
|
|
"""
|
2020-04-21 14:57:42 -04:00
|
|
|
Chart API: Test get charts no data access
|
2020-01-21 13:04:52 -05:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
2021-07-21 20:03:22 -04:00
|
|
|
uri = "api/v1/chart/"
|
2020-04-21 14:57:42 -04:00
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
2020-01-21 13:04:52 -05:00
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 0)
|
2020-04-09 04:05:49 -04:00
|
|
|
|
2020-10-22 15:06:58 -04:00
|
|
|
def test_export_chart(self):
|
|
|
|
"""
|
2020-11-20 17:40:27 -05:00
|
|
|
Chart API: Test export chart
|
2020-10-22 15:06:58 -04:00
|
|
|
"""
|
|
|
|
example_chart = db.session.query(Slice).all()[0]
|
|
|
|
argument = [example_chart.id]
|
|
|
|
uri = f"api/v1/chart/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
|
|
|
|
buf = BytesIO(rv.data)
|
|
|
|
assert is_zipfile(buf)
|
|
|
|
|
|
|
|
def test_export_chart_not_found(self):
|
|
|
|
"""
|
2020-11-20 17:40:27 -05:00
|
|
|
Chart API: Test export chart not found
|
2020-10-22 15:06:58 -04:00
|
|
|
"""
|
|
|
|
# Just one does not exist and we get 404
|
|
|
|
argument = [-1, 1]
|
|
|
|
uri = f"api/v1/chart/export/?q={prison.dumps(argument)}"
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.get_assert_metric(uri, "export")
|
|
|
|
|
|
|
|
assert rv.status_code == 404
|
|
|
|
|
|
|
|
def test_export_chart_gamma(self):
|
|
|
|
"""
|
2020-11-20 17:40:27 -05:00
|
|
|
Chart API: Test export chart has gamma
|
2020-10-22 15:06:58 -04:00
|
|
|
"""
|
|
|
|
example_chart = db.session.query(Slice).all()[0]
|
|
|
|
argument = [example_chart.id]
|
|
|
|
uri = f"api/v1/chart/export/?q={prison.dumps(argument)}"
|
|
|
|
|
|
|
|
self.login(username="gamma")
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
|
|
|
|
assert rv.status_code == 404
|
2020-11-20 17:40:27 -05:00
|
|
|
|
|
|
|
def test_import_chart(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test import chart
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/chart/import/"
|
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
buf = self.create_chart_import()
|
2020-11-20 17:40:27 -05:00
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "chart_export.zip"),
|
2020-11-20 17:40:27 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
assert database.database_name == "imported_database"
|
|
|
|
|
|
|
|
assert len(database.tables) == 1
|
|
|
|
dataset = database.tables[0]
|
|
|
|
assert dataset.table_name == "imported_dataset"
|
|
|
|
assert str(dataset.uuid) == dataset_config["uuid"]
|
|
|
|
|
|
|
|
chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one()
|
|
|
|
assert chart.table == dataset
|
|
|
|
|
|
|
|
db.session.delete(chart)
|
2023-01-20 16:17:56 -05:00
|
|
|
db.session.commit()
|
2020-11-20 17:40:27 -05:00
|
|
|
db.session.delete(dataset)
|
2023-01-20 16:17:56 -05:00
|
|
|
db.session.commit()
|
2020-11-20 17:40:27 -05:00
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-12-10 17:50:10 -05:00
|
|
|
def test_import_chart_overwrite(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test import existing chart
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/chart/import/"
|
|
|
|
|
|
|
|
buf = self.create_chart_import()
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "chart_export.zip"),
|
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
# import again without overwrite flag
|
|
|
|
buf = self.create_chart_import()
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "chart_export.zip"),
|
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {
|
2021-05-27 17:46:41 -04:00
|
|
|
"errors": [
|
|
|
|
{
|
|
|
|
"message": "Error importing chart",
|
|
|
|
"error_type": "GENERIC_COMMAND_ERROR",
|
|
|
|
"level": "warning",
|
|
|
|
"extra": {
|
|
|
|
"charts/imported_chart.yaml": "Chart already exists and `overwrite=true` was not passed",
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1010,
|
|
|
|
"message": "Issue 1010 - Superset encountered an error while running a command.",
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
2020-12-10 17:50:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
# import with overwrite flag
|
|
|
|
buf = self.create_chart_import()
|
|
|
|
form_data = {
|
|
|
|
"formData": (buf, "chart_export.zip"),
|
|
|
|
"overwrite": "true",
|
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 200
|
|
|
|
assert response == {"message": "OK"}
|
|
|
|
|
|
|
|
# clean up
|
|
|
|
database = (
|
|
|
|
db.session.query(Database).filter_by(uuid=database_config["uuid"]).one()
|
|
|
|
)
|
|
|
|
dataset = database.tables[0]
|
|
|
|
chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one()
|
|
|
|
|
|
|
|
db.session.delete(chart)
|
2023-01-20 16:17:56 -05:00
|
|
|
db.session.commit()
|
2020-12-10 17:50:10 -05:00
|
|
|
db.session.delete(dataset)
|
2023-01-20 16:17:56 -05:00
|
|
|
db.session.commit()
|
2020-12-10 17:50:10 -05:00
|
|
|
db.session.delete(database)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-11-20 17:40:27 -05:00
|
|
|
def test_import_chart_invalid(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test import invalid chart
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = "api/v1/chart/import/"
|
|
|
|
|
|
|
|
buf = BytesIO()
|
|
|
|
with ZipFile(buf, "w") as bundle:
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/metadata.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_metadata_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open(
|
|
|
|
"chart_export/databases/imported_database.yaml", "w"
|
|
|
|
) as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(database_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/datasets/imported_dataset.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(dataset_config).encode())
|
2020-11-25 14:47:48 -05:00
|
|
|
with bundle.open("chart_export/charts/imported_chart.yaml", "w") as fp:
|
2020-11-20 17:40:27 -05:00
|
|
|
fp.write(yaml.safe_dump(chart_config).encode())
|
|
|
|
buf.seek(0)
|
|
|
|
|
|
|
|
form_data = {
|
2020-11-25 14:47:48 -05:00
|
|
|
"formData": (buf, "chart_export.zip"),
|
2020-11-20 17:40:27 -05:00
|
|
|
}
|
|
|
|
rv = self.client.post(uri, data=form_data, content_type="multipart/form-data")
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
assert response == {
|
2021-05-27 17:46:41 -04:00
|
|
|
"errors": [
|
|
|
|
{
|
|
|
|
"message": "Error importing chart",
|
|
|
|
"error_type": "GENERIC_COMMAND_ERROR",
|
|
|
|
"level": "warning",
|
|
|
|
"extra": {
|
|
|
|
"metadata.yaml": {"type": ["Must be equal to Slice."]},
|
|
|
|
"issue_codes": [
|
|
|
|
{
|
|
|
|
"code": 1010,
|
|
|
|
"message": (
|
|
|
|
"Issue 1010 - Superset encountered an "
|
|
|
|
"error while running a command."
|
|
|
|
),
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
]
|
2020-11-20 17:40:27 -05:00
|
|
|
}
|
2022-08-30 15:27:38 -04:00
|
|
|
|
|
|
|
def test_gets_created_by_user_charts_filter(self):
|
|
|
|
arguments = {
|
|
|
|
"filters": [{"col": "id", "opr": "chart_has_created_by", "value": True}],
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 8)
|
|
|
|
|
|
|
|
def test_gets_not_created_by_user_charts_filter(self):
|
|
|
|
arguments = {
|
|
|
|
"filters": [{"col": "id", "opr": "chart_has_created_by", "value": False}],
|
|
|
|
"keys": ["none"],
|
|
|
|
"columns": ["slice_name"],
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
uri = f"api/v1/chart/?q={prison.dumps(arguments)}"
|
|
|
|
rv = self.get_assert_metric(uri, "get_list")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data["count"], 8)
|
2023-04-03 13:29:02 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("create_charts")
|
|
|
|
def test_gets_owned_created_favorited_by_me_filter(self):
|
|
|
|
"""
|
|
|
|
Chart API: Test ChartOwnedCreatedFavoredByMeFilter
|
|
|
|
"""
|
|
|
|
self.login(username="admin")
|
|
|
|
arguments = {
|
|
|
|
"filters": [
|
|
|
|
{
|
|
|
|
"col": "id",
|
|
|
|
"opr": "chart_owned_created_favored_by_me",
|
|
|
|
"value": True,
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"order_column": "slice_name",
|
|
|
|
"order_direction": "asc",
|
|
|
|
"page": 0,
|
|
|
|
"page_size": 25,
|
|
|
|
}
|
|
|
|
rv = self.client.get(f"api/v1/chart/?q={prison.dumps(arguments)}")
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
assert data["result"][0]["slice_name"] == "name0"
|
|
|
|
assert data["result"][0]["datasource_id"] == 1
|
2023-06-20 07:08:29 -04:00
|
|
|
|
2023-07-19 14:12:36 -04:00
|
|
|
@parameterized.expand(
|
|
|
|
[
|
|
|
|
"Top 10 Girl Name Share", # Legacy chart
|
|
|
|
"Pivot Table v2", # Non-legacy chart
|
|
|
|
],
|
2023-06-20 07:08:29 -04:00
|
|
|
)
|
2023-07-19 14:12:36 -04:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_warm_up_cache(self, slice_name):
|
2023-06-20 07:08:29 -04:00
|
|
|
self.login()
|
2023-07-19 14:12:36 -04:00
|
|
|
slc = self.get_slice(slice_name, db.session)
|
2023-06-20 07:08:29 -04:00
|
|
|
rv = self.client.put("/api/v1/chart/warm_up_cache", json={"chart_id": slc.id})
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
data["result"],
|
|
|
|
[{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}],
|
|
|
|
)
|
|
|
|
|
|
|
|
dashboard = self.get_dash_by_slug("births")
|
|
|
|
|
|
|
|
rv = self.client.put(
|
|
|
|
"/api/v1/chart/warm_up_cache",
|
|
|
|
json={"chart_id": slc.id, "dashboard_id": dashboard.id},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
data["result"],
|
|
|
|
[{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}],
|
|
|
|
)
|
|
|
|
|
|
|
|
rv = self.client.put(
|
|
|
|
"/api/v1/chart/warm_up_cache",
|
|
|
|
json={
|
|
|
|
"chart_id": slc.id,
|
|
|
|
"dashboard_id": dashboard.id,
|
|
|
|
"extra_filters": json.dumps(
|
|
|
|
[{"col": "name", "op": "in", "val": ["Jennifer"]}]
|
|
|
|
),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
data["result"],
|
|
|
|
[{"chart_id": slc.id, "viz_error": None, "viz_status": "success"}],
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_warm_up_cache_chart_id_required(self):
|
|
|
|
self.login()
|
|
|
|
rv = self.client.put("/api/v1/chart/warm_up_cache", json={"dashboard_id": 1})
|
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
data,
|
|
|
|
{"message": {"chart_id": ["Missing data for required field."]}},
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_warm_up_cache_chart_not_found(self):
|
|
|
|
self.login()
|
|
|
|
rv = self.client.put("/api/v1/chart/warm_up_cache", json={"chart_id": 99999})
|
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(data, {"message": "Chart not found"})
|
|
|
|
|
|
|
|
def test_warm_up_cache_payload_validation(self):
|
|
|
|
self.login()
|
|
|
|
rv = self.client.put(
|
|
|
|
"/api/v1/chart/warm_up_cache",
|
|
|
|
json={"chart_id": "id", "dashboard_id": "id", "extra_filters": 4},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 400)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(
|
|
|
|
data,
|
|
|
|
{
|
|
|
|
"message": {
|
|
|
|
"chart_id": ["Not a valid integer."],
|
|
|
|
"dashboard_id": ["Not a valid integer."],
|
|
|
|
"extra_filters": ["Not a valid string."],
|
|
|
|
}
|
|
|
|
},
|
|
|
|
)
|
2023-07-19 14:12:36 -04:00
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_warm_up_cache_error(self) -> None:
|
|
|
|
self.login()
|
|
|
|
slc = self.get_slice("Pivot Table v2", db.session)
|
|
|
|
|
|
|
|
with mock.patch.object(ChartDataCommand, "run") as mock_run:
|
|
|
|
mock_run.side_effect = ChartDataQueryFailedError(
|
|
|
|
_(
|
|
|
|
"Error: %(error)s",
|
|
|
|
error=_("Empty query?"),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
assert json.loads(
|
|
|
|
self.client.put(
|
|
|
|
"/api/v1/chart/warm_up_cache",
|
|
|
|
json={"chart_id": slc.id},
|
|
|
|
).data
|
|
|
|
) == {
|
|
|
|
"result": [
|
|
|
|
{
|
|
|
|
"chart_id": slc.id,
|
|
|
|
"viz_error": "Error: Empty query?",
|
|
|
|
"viz_status": None,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_warm_up_cache_no_query_context(self) -> None:
|
|
|
|
self.login()
|
|
|
|
slc = self.get_slice("Pivot Table v2", db.session)
|
|
|
|
|
|
|
|
with mock.patch.object(Slice, "get_query_context") as mock_get_query_context:
|
|
|
|
mock_get_query_context.return_value = None
|
|
|
|
|
|
|
|
assert json.loads(
|
|
|
|
self.client.put(
|
|
|
|
f"/api/v1/chart/warm_up_cache",
|
|
|
|
json={"chart_id": slc.id},
|
|
|
|
).data
|
|
|
|
) == {
|
|
|
|
"result": [
|
|
|
|
{
|
|
|
|
"chart_id": slc.id,
|
|
|
|
"viz_error": "Chart's query context does not exist",
|
|
|
|
"viz_status": None,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_warm_up_cache_no_datasource(self) -> None:
|
|
|
|
self.login()
|
|
|
|
slc = self.get_slice("Top 10 Girl Name Share", db.session)
|
|
|
|
|
|
|
|
with mock.patch.object(
|
|
|
|
Slice,
|
|
|
|
"datasource",
|
|
|
|
new_callable=mock.PropertyMock,
|
|
|
|
) as mock_datasource:
|
|
|
|
mock_datasource.return_value = None
|
|
|
|
|
|
|
|
assert json.loads(
|
|
|
|
self.client.put(
|
|
|
|
f"/api/v1/chart/warm_up_cache",
|
|
|
|
json={"chart_id": slc.id},
|
|
|
|
).data
|
|
|
|
) == {
|
|
|
|
"result": [
|
|
|
|
{
|
|
|
|
"chart_id": slc.id,
|
|
|
|
"viz_error": "Chart's datasource does not exist",
|
|
|
|
"viz_status": None,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
}
|