2019-01-15 18:53:27 -05:00
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# or more contributor license agreements. See the NOTICE file
|
|
|
|
# distributed with this work for additional information
|
|
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
|
|
# to you under the Apache License, Version 2.0 (the
|
|
|
|
# "License"); you may not use this file except in compliance
|
|
|
|
# with the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing,
|
|
|
|
# software distributed under the License is distributed on an
|
|
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
# KIND, either express or implied. See the License for the
|
|
|
|
# specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2019-11-20 10:47:06 -05:00
|
|
|
# isort:skip_file
|
2016-11-10 02:08:22 -05:00
|
|
|
"""Unit tests for Superset"""
|
2016-08-30 00:55:31 -04:00
|
|
|
import csv
|
2017-11-20 11:33:18 -05:00
|
|
|
import datetime
|
2016-02-10 12:34:09 -05:00
|
|
|
import doctest
|
2020-05-17 17:49:51 -04:00
|
|
|
import html
|
2017-11-07 23:23:40 -05:00
|
|
|
import io
|
2016-08-30 00:55:31 -04:00
|
|
|
import json
|
2017-03-10 12:11:51 -05:00
|
|
|
import logging
|
2020-08-27 16:12:24 -04:00
|
|
|
from typing import Dict, List
|
2020-05-27 12:02:03 -04:00
|
|
|
from urllib.parse import quote
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.fixtures.birth_names_dashboard import (
|
|
|
|
load_birth_names_dashboard_with_slices,
|
|
|
|
)
|
2020-05-21 16:49:53 -04:00
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
import pytest
|
2020-01-03 11:55:39 -05:00
|
|
|
import pytz
|
2016-09-11 10:39:07 -04:00
|
|
|
import random
|
2018-06-02 14:08:43 -04:00
|
|
|
import re
|
2015-09-26 18:55:33 -04:00
|
|
|
import unittest
|
2021-05-21 17:29:52 -04:00
|
|
|
from unittest import mock
|
2016-03-28 11:09:46 -04:00
|
|
|
|
2017-11-20 11:33:18 -05:00
|
|
|
import pandas as pd
|
2017-09-13 23:59:03 -04:00
|
|
|
import sqlalchemy as sqla
|
2021-03-15 18:46:51 -04:00
|
|
|
from sqlalchemy.exc import SQLAlchemyError
|
2020-09-01 12:41:25 -04:00
|
|
|
from superset.models.cache import CacheKey
|
2021-07-19 13:31:05 -04:00
|
|
|
from superset.utils.core import get_example_database
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.conftest import with_feature_flags
|
|
|
|
from tests.integration_tests.fixtures.energy_dashboard import (
|
|
|
|
load_energy_table_with_slice,
|
|
|
|
)
|
|
|
|
from tests.integration_tests.test_app import app
|
2020-06-17 16:42:13 -04:00
|
|
|
import superset.views.utils
|
2020-02-19 12:51:50 -05:00
|
|
|
from superset import (
|
|
|
|
dataframe,
|
|
|
|
db,
|
|
|
|
security_manager,
|
|
|
|
sql_lab,
|
|
|
|
)
|
2021-09-26 14:15:57 -04:00
|
|
|
from superset.common.db_query_status import QueryStatus
|
2017-11-07 23:23:40 -05:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2019-06-08 14:27:13 -04:00
|
|
|
from superset.db_engine_specs.base import BaseEngineSpec
|
|
|
|
from superset.db_engine_specs.mssql import MssqlEngineSpec
|
2021-03-15 18:46:51 -04:00
|
|
|
from superset.exceptions import SupersetException
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
from superset.extensions import async_query_manager
|
2017-03-10 12:11:51 -05:00
|
|
|
from superset.models import core as models
|
2020-05-27 12:02:03 -04:00
|
|
|
from superset.models.annotations import Annotation, AnnotationLayer
|
2019-12-18 14:40:45 -05:00
|
|
|
from superset.models.dashboard import Dashboard
|
2019-12-17 19:17:49 -05:00
|
|
|
from superset.models.datasource_access_request import DatasourceAccessRequest
|
2019-12-18 14:40:45 -05:00
|
|
|
from superset.models.slice import Slice
|
2017-04-04 23:15:19 -04:00
|
|
|
from superset.models.sql_lab import Query
|
2020-01-03 11:55:39 -05:00
|
|
|
from superset.result_set import SupersetResultSet
|
2018-10-16 20:59:34 -04:00
|
|
|
from superset.utils import core as utils
|
2019-08-27 17:23:40 -04:00
|
|
|
from superset.views import core as views
|
2019-07-23 00:25:16 -04:00
|
|
|
from superset.views.database.views import DatabaseView
|
2019-10-18 17:44:27 -04:00
|
|
|
|
2016-11-10 02:08:22 -05:00
|
|
|
from .base_tests import SupersetTestCase
|
2021-07-01 11:03:07 -04:00
|
|
|
from tests.integration_tests.fixtures.world_bank_dashboard import (
|
|
|
|
load_world_bank_dashboard_with_slices,
|
|
|
|
)
|
2016-03-28 11:09:46 -04:00
|
|
|
|
2020-02-08 02:38:48 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2020-06-29 18:36:06 -04:00
|
|
|
class TestCore(SupersetTestCase):
|
2015-09-26 18:55:33 -04:00
|
|
|
def setUp(self):
|
2017-04-04 23:15:19 -04:00
|
|
|
db.session.query(Query).delete()
|
2019-12-17 19:17:49 -05:00
|
|
|
db.session.query(DatasourceAccessRequest).delete()
|
2017-08-24 12:11:41 -04:00
|
|
|
db.session.query(models.Log).delete()
|
2019-11-20 10:47:06 -05:00
|
|
|
self.table_ids = {
|
|
|
|
tbl.table_name: tbl.id for tbl in (db.session.query(SqlaTable).all())
|
|
|
|
}
|
2020-03-02 16:13:11 -05:00
|
|
|
self.original_unsafe_db_setting = app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2016-03-28 11:09:46 -04:00
|
|
|
def tearDown(self):
|
2017-04-04 23:15:19 -04:00
|
|
|
db.session.query(Query).delete()
|
2020-03-02 16:13:11 -05:00
|
|
|
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = self.original_unsafe_db_setting
|
2015-09-22 14:36:54 -04:00
|
|
|
|
2017-02-10 04:17:49 -05:00
|
|
|
def test_login(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.get_resp("/login/", data=dict(username="admin", password="general"))
|
|
|
|
self.assertNotIn("User confirmation needed", resp)
|
2017-02-10 04:17:49 -05:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.get_resp("/logout/", follow_redirects=True)
|
|
|
|
self.assertIn("User confirmation needed", resp)
|
2017-02-10 04:17:49 -05:00
|
|
|
|
|
|
|
resp = self.get_resp(
|
2019-06-25 16:34:48 -04:00
|
|
|
"/login/", data=dict(username="admin", password="wrongPassword")
|
|
|
|
)
|
|
|
|
self.assertIn("User confirmation needed", resp)
|
2017-02-10 04:17:49 -05:00
|
|
|
|
2018-11-12 13:08:20 -05:00
|
|
|
def test_dashboard_endpoint(self):
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.client.get("/superset/dashboard/-1/")
|
2018-11-12 13:08:20 -05:00
|
|
|
assert resp.status_code == 404
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2016-10-07 19:24:39 -04:00
|
|
|
def test_slice_endpoint(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice("Girls", db.session)
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.get_resp("/superset/slice/{}/".format(slc.id))
|
2021-03-09 10:27:46 -05:00
|
|
|
assert "Original value" in resp
|
2019-06-25 16:34:48 -04:00
|
|
|
assert "List Roles" in resp
|
2016-10-07 19:24:39 -04:00
|
|
|
|
|
|
|
# Testing overrides
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.get_resp("/superset/slice/{}/?standalone=true".format(slc.id))
|
2019-11-20 01:56:27 -05:00
|
|
|
assert '<div class="navbar' not in resp
|
2016-10-07 19:24:39 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.client.get("/superset/slice/-1/")
|
2018-11-12 13:08:20 -05:00
|
|
|
assert resp.status_code == 404
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-01-16 00:51:13 -05:00
|
|
|
def test_viz_cache_key(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice("Girls", db.session)
|
2018-01-28 12:46:13 -05:00
|
|
|
|
|
|
|
viz = slc.viz
|
|
|
|
qobj = viz.query_obj()
|
|
|
|
cache_key = viz.cache_key(qobj)
|
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
qobj["groupby"] = []
|
2020-06-16 11:47:35 -04:00
|
|
|
cache_key_with_groupby = viz.cache_key(qobj)
|
|
|
|
self.assertNotEqual(cache_key, cache_key_with_groupby)
|
|
|
|
|
|
|
|
self.assertNotEqual(
|
|
|
|
viz.cache_key(qobj), viz.cache_key(qobj, time_compare="12 weeks")
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertNotEqual(
|
|
|
|
viz.cache_key(qobj, time_compare="28 days"),
|
|
|
|
viz.cache_key(qobj, time_compare="12 weeks"),
|
|
|
|
)
|
|
|
|
|
|
|
|
qobj["inner_from_dttm"] = datetime.datetime(1901, 1, 1)
|
|
|
|
|
2020-06-29 19:38:06 -04:00
|
|
|
self.assertEqual(cache_key_with_groupby, viz.cache_key(qobj))
|
2018-01-28 12:46:13 -05:00
|
|
|
|
2020-02-20 05:15:22 -05:00
|
|
|
def test_get_superset_tables_not_allowed(self):
|
|
|
|
example_db = utils.get_example_database()
|
|
|
|
schema_name = self.default_schema_backend_map[example_db.backend]
|
|
|
|
self.login(username="gamma")
|
|
|
|
uri = f"superset/tables/{example_db.id}/{schema_name}/undefined/"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
|
|
|
def test_get_superset_tables_substr(self):
|
|
|
|
example_db = utils.get_example_database()
|
2020-08-27 12:49:18 -04:00
|
|
|
if example_db.backend in {"presto", "hive"}:
|
2020-08-06 15:07:22 -04:00
|
|
|
# TODO: change table to the real table that is in examples.
|
|
|
|
return
|
2020-02-20 05:15:22 -05:00
|
|
|
self.login(username="admin")
|
|
|
|
schema_name = self.default_schema_backend_map[example_db.backend]
|
|
|
|
uri = f"superset/tables/{example_db.id}/{schema_name}/ab_role/"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
response = json.loads(rv.data.decode("utf-8"))
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
|
2020-11-24 15:13:52 -05:00
|
|
|
expected_response = {
|
2020-02-20 05:15:22 -05:00
|
|
|
"options": [
|
|
|
|
{
|
|
|
|
"label": "ab_role",
|
|
|
|
"schema": schema_name,
|
|
|
|
"title": "ab_role",
|
|
|
|
"type": "table",
|
|
|
|
"value": "ab_role",
|
2020-11-24 15:13:52 -05:00
|
|
|
"extra": None,
|
2020-02-20 05:15:22 -05:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"tableLength": 1,
|
|
|
|
}
|
2020-11-24 15:13:52 -05:00
|
|
|
self.assertEqual(response, expected_response)
|
2020-02-20 05:15:22 -05:00
|
|
|
|
|
|
|
def test_get_superset_tables_not_found(self):
|
|
|
|
self.login(username="admin")
|
|
|
|
uri = f"superset/tables/invalid/public/undefined/"
|
|
|
|
rv = self.client.get(uri)
|
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
|
2020-05-27 12:02:03 -04:00
|
|
|
def test_annotation_json_endpoint(self):
|
|
|
|
# Set up an annotation layer and annotation
|
|
|
|
layer = AnnotationLayer(name="foo", descr="bar")
|
|
|
|
db.session.add(layer)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
annotation = Annotation(
|
|
|
|
layer_id=layer.id,
|
|
|
|
short_descr="my_annotation",
|
|
|
|
start_dttm=datetime.datetime(2020, 5, 20, 18, 21, 51),
|
|
|
|
end_dttm=datetime.datetime(2020, 5, 20, 18, 31, 51),
|
|
|
|
)
|
|
|
|
|
|
|
|
db.session.add(annotation)
|
|
|
|
db.session.commit()
|
|
|
|
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2020-06-12 14:53:26 -04:00
|
|
|
resp_annotations = json.loads(
|
|
|
|
self.get_resp("annotationlayermodelview/api/read")
|
|
|
|
)
|
|
|
|
# the UI needs id and name to function
|
|
|
|
self.assertIn("id", resp_annotations["result"][0])
|
|
|
|
self.assertIn("name", resp_annotations["result"][0])
|
|
|
|
|
2020-10-23 04:41:17 -04:00
|
|
|
response = self.get_resp(
|
2020-05-27 12:02:03 -04:00
|
|
|
f"/superset/annotation_json/{layer.id}?form_data="
|
|
|
|
+ quote(json.dumps({"time_range": "100 years ago : now"}))
|
|
|
|
)
|
2020-10-23 04:41:17 -04:00
|
|
|
assert "my_annotation" in response
|
|
|
|
|
|
|
|
# Rollback changes
|
|
|
|
db.session.delete(annotation)
|
|
|
|
db.session.delete(layer)
|
|
|
|
db.session.commit()
|
2020-05-27 12:02:03 -04:00
|
|
|
|
2016-09-22 12:53:14 -04:00
|
|
|
def test_admin_only_permissions(self):
|
|
|
|
def assert_admin_permission_in(role_name, assert_func):
|
2018-03-27 19:46:02 -04:00
|
|
|
role = security_manager.find_role(role_name)
|
2016-09-22 12:53:14 -04:00
|
|
|
permissions = [p.permission.name for p in role.permissions]
|
2019-06-25 16:34:48 -04:00
|
|
|
assert_func("can_sync_druid_source", permissions)
|
|
|
|
assert_func("can_approve", permissions)
|
2016-09-22 12:53:14 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
assert_admin_permission_in("Admin", self.assertIn)
|
|
|
|
assert_admin_permission_in("Alpha", self.assertNotIn)
|
|
|
|
assert_admin_permission_in("Gamma", self.assertNotIn)
|
2016-09-22 12:53:14 -04:00
|
|
|
|
|
|
|
def test_admin_only_menu_views(self):
|
|
|
|
def assert_admin_view_menus_in(role_name, assert_func):
|
2018-03-27 19:46:02 -04:00
|
|
|
role = security_manager.find_role(role_name)
|
2016-09-22 12:53:14 -04:00
|
|
|
view_menus = [p.view_menu.name for p in role.permissions]
|
2019-06-25 16:34:48 -04:00
|
|
|
assert_func("ResetPasswordView", view_menus)
|
|
|
|
assert_func("RoleModelView", view_menus)
|
|
|
|
assert_func("Security", view_menus)
|
|
|
|
assert_func("SQL Lab", view_menus)
|
2016-09-22 12:53:14 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
assert_admin_view_menus_in("Admin", self.assertIn)
|
|
|
|
assert_admin_view_menus_in("Alpha", self.assertNotIn)
|
|
|
|
assert_admin_view_menus_in("Gamma", self.assertNotIn)
|
2016-09-22 12:53:14 -04:00
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
2016-04-20 18:08:10 -04:00
|
|
|
def test_save_slice(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2019-11-20 10:47:06 -05:00
|
|
|
slice_name = f"Energy Sankey"
|
2020-08-06 18:33:48 -04:00
|
|
|
slice_id = self.get_slice(slice_name, db.session).id
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
copy_name_prefix = "Test Sankey"
|
|
|
|
copy_name = f"{copy_name_prefix}[save]{random.random()}"
|
2019-06-25 16:34:48 -04:00
|
|
|
tbl_id = self.table_ids.get("energy_usage")
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
new_slice_name = f"{copy_name_prefix}[overwrite]{random.random()}"
|
2016-04-20 18:08:10 -04:00
|
|
|
|
2017-02-16 20:28:35 -05:00
|
|
|
url = (
|
2019-06-25 16:34:48 -04:00
|
|
|
"/superset/explore/table/{}/?slice_name={}&"
|
|
|
|
"action={}&datasource_name=energy_usage"
|
|
|
|
)
|
2017-02-16 20:28:35 -05:00
|
|
|
|
|
|
|
form_data = {
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
"adhoc_filters": [],
|
2019-06-25 16:34:48 -04:00
|
|
|
"viz_type": "sankey",
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
"groupby": ["target"],
|
2019-06-25 16:34:48 -04:00
|
|
|
"metric": "sum__value",
|
|
|
|
"row_limit": 5000,
|
|
|
|
"slice_id": slice_id,
|
2020-02-03 13:37:29 -05:00
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
2017-02-16 20:28:35 -05:00
|
|
|
}
|
|
|
|
# Changing name and save as a new slice
|
2019-11-20 10:47:06 -05:00
|
|
|
resp = self.client.post(
|
2019-06-25 16:34:48 -04:00
|
|
|
url.format(tbl_id, copy_name, "saveas"),
|
2019-11-20 10:47:06 -05:00
|
|
|
data={"form_data": json.dumps(form_data)},
|
2017-02-16 20:28:35 -05:00
|
|
|
)
|
2019-11-20 10:47:06 -05:00
|
|
|
db.session.expunge_all()
|
|
|
|
new_slice_id = resp.json["form_data"]["slice_id"]
|
2019-12-18 14:40:45 -05:00
|
|
|
slc = db.session.query(Slice).filter_by(id=new_slice_id).one()
|
2019-11-20 10:47:06 -05:00
|
|
|
|
|
|
|
self.assertEqual(slc.slice_name, copy_name)
|
|
|
|
form_data.pop("slice_id") # We don't save the slice id when saving as
|
|
|
|
self.assertEqual(slc.viz.form_data, form_data)
|
2017-02-16 20:28:35 -05:00
|
|
|
|
|
|
|
form_data = {
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
"adhoc_filters": [],
|
2019-06-25 16:34:48 -04:00
|
|
|
"viz_type": "sankey",
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
"groupby": ["source"],
|
2019-06-25 16:34:48 -04:00
|
|
|
"metric": "sum__value",
|
|
|
|
"row_limit": 5000,
|
|
|
|
"slice_id": new_slice_id,
|
|
|
|
"time_range": "now",
|
2020-02-03 13:37:29 -05:00
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
2017-02-16 20:28:35 -05:00
|
|
|
}
|
|
|
|
# Setting the name back to its original name by overwriting new slice
|
2019-11-20 10:47:06 -05:00
|
|
|
self.client.post(
|
2019-06-25 16:34:48 -04:00
|
|
|
url.format(tbl_id, new_slice_name, "overwrite"),
|
2019-11-20 10:47:06 -05:00
|
|
|
data={"form_data": json.dumps(form_data)},
|
2017-02-16 20:28:35 -05:00
|
|
|
)
|
2019-11-20 10:47:06 -05:00
|
|
|
db.session.expunge_all()
|
2019-12-18 14:40:45 -05:00
|
|
|
slc = db.session.query(Slice).filter_by(id=new_slice_id).one()
|
2019-11-20 10:47:06 -05:00
|
|
|
self.assertEqual(slc.slice_name, new_slice_name)
|
|
|
|
self.assertEqual(slc.viz.form_data, form_data)
|
|
|
|
|
|
|
|
# Cleanup
|
feat: [explore] don't save filters inherited from a dashboard (#9340)
* feat: [explore] don't save filters inherited from a dashboard
When navigating to explore from a dashboard context, the current
dashboard filter(s) are passed along to explore so that the context is
kept. So say you're filtering on "country=Romania", in your dashboard
and pivot to explore, that filter is still there and keep on exploring.
Now a common issue is that you'll want to make some tweak to your chart
that are unrelated to the filter, say toggling the legend off for
instance, and then save it. Now you back to your dashboard and even
though you started with an "all countries" dashboard, with a global
filter on country, now that one chart is stuck on "Romania". Typically
you notice this when filtering on something else, say "Italy" and then
that one chart now has two mutually exclusive filters, and show "No data".
Now, the fix is to flag the filter as "extra" (that's the not-so-good internal
name we use for these inherited filters) and make it clear that that
specific filter is special and won't be saved when saving the chart.
* fix build
2020-03-24 02:05:00 -04:00
|
|
|
slices = (
|
|
|
|
db.session.query(Slice)
|
|
|
|
.filter(Slice.slice_name.like(copy_name_prefix + "%"))
|
|
|
|
.all()
|
|
|
|
)
|
|
|
|
for slc in slices:
|
|
|
|
db.session.delete(slc)
|
2019-11-20 10:47:06 -05:00
|
|
|
db.session.commit()
|
2016-11-17 14:58:33 -05:00
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
2016-12-16 17:23:48 -05:00
|
|
|
def test_filter_endpoint(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
slice_name = "Energy Sankey"
|
2020-08-06 18:33:48 -04:00
|
|
|
slice_id = self.get_slice(slice_name, db.session).id
|
2016-12-16 17:23:48 -05:00
|
|
|
db.session.commit()
|
2019-06-25 16:34:48 -04:00
|
|
|
tbl_id = self.table_ids.get("energy_usage")
|
2017-03-10 12:11:51 -05:00
|
|
|
table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id)
|
2016-12-16 17:23:48 -05:00
|
|
|
table.filter_select_enabled = True
|
|
|
|
url = (
|
2019-06-25 16:34:48 -04:00
|
|
|
"/superset/filter/table/{}/target/?viz_type=sankey&groupby=source"
|
|
|
|
"&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&"
|
|
|
|
"slice_id={}&datasource_name=energy_usage&"
|
|
|
|
"datasource_id=1&datasource_type=table"
|
|
|
|
)
|
2016-12-16 17:23:48 -05:00
|
|
|
|
|
|
|
# Changing name
|
|
|
|
resp = self.get_resp(url.format(tbl_id, slice_id))
|
|
|
|
assert len(resp) > 0
|
2020-12-09 15:02:29 -05:00
|
|
|
assert "energy_target0" in resp
|
2016-12-16 17:23:48 -05:00
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
[wip] dashboard builder v2 (#4528)
* [dashboard builder] Add dir structure for dashboard/v2, simplified Header, split pane, Draggable side panel
[grid] add <DashboardGrid />, <ResizableContainer />, and initial grid components.
[grid] gridComponents/ directory, add fixtures/ directory and test layout, add <Column />
[grid] working grid with gutters
[grid] design tweaks and polish, add <Tabs />
[header] add gradient header logo and favicon
[dnd] begin adding dnd functionality
[dnd] add util/isValidChild.js
[react-beautiful-dnd] iterate on dnd until blocked
[dnd] refactor to use react-dnd
[react-dnd] refactor to use composable <DashboardComponent /> structure
[dnd] factor out DashboardComponent, let components render dropInidcator and set draggableRef, add draggable tabs
[dnd] refactor to use redux, add DashboardComponent and DashboardGrid containers
[dragdroppable] rename horizontal/vertical => row/column
[builder] refactor into HoverMenu, add WithPopoverMenu
[builder] add editable header and disableDragDrop prop for Dragdroppable's
[builder] make tabs editable
[builder] add generic popover dropdown and header row style editability
[builder] add hover rowStyle dropdown, make row styles editable
[builder] add some new component icons, add popover with delete to charts
[builder] add preview icons, add popover menu to rows.
[builder] add IconButton and RowStyleDropdown
[resizable] use ResizableContainer instead of DimensionProvider, fix resize and delete bugs
[builder] fix bug with spacer
[builder] clean up, header.size => header.headerSize
[builder] support more drag/drop combinations by wrapping some components in rows upon drop. fix within list drop index. refactor some utils.
[builder][tabs] fix broken add tab button
[dashboard builder] don't pass dashboard layout to all dashboard components, improve drop indicator logic, fix delete component pure component bug
[dnd] refactor drop position logic
* fix rebase error, clean up css organization and use @less vars
* [dashboard-builder] add top-level tabs + undo-redo (#4626)
* [top-level-tabs] initial working version of top-level tabs
* [top-level-tabs] simplify redux and disable ability to displace top-level tabs with other tabs
* [top-level-tabs] improve tab drag and drop css
* [undo-redo] add redux undo redo
* [dnd] clean up dropResult shape, add new component source id + type, use css for drop indicator instead of styles and fix tab indicators.
* [top-level-tabs] add 'Collapse tab content' to delete tabs button
* [dnd] add depth validation to drag and drop logic
* [dashboard-builder] add resize action, enforce minimum width of columns, column children inherit column size when necessary, meta.rowStyle => meta.background, add background to columns
* [dashboard-builder] make sure getChildWidth returns a number
* [dashboard builder] static layout + toasts (#4763)
* [dashboard-builder] remove spacer component
* [dashboard-builder] better transparent indicator, better grid gutter logic, no dragging top-level tabs, headers are multiples of grid unit, fix row height granularity, update redux state key dashboard => dashboardLayout
* [dashboard-builder] don't blast column child dimensions on resize
* [dashboard-builder] ResizableContainer min size can't be smaller than size, fix row style, role=none on WithPopoverMenu container
* [edit mode] add edit mode to redux and propogate to all <DashboardComponent />s
* [toasts] add Toast component, ToastPresenter container and component, and toast redux actions + reducers
* [dashboard-builder] add info toast when dropResult overflows parent
* [dashboard builder] git mv to src/ post-rebase
* Dashboard builder rebased + linted (#4849)
* define dashboard redux state
* update dashboard state reducer
* dashboard layout converter + grid render
* builder pane + slice adder
* Dashboard header + slice header controls
* fix linting
* 2nd code review comments
* [dashboard builder] improve perf (#4855)
* address major perf + css issues
[dashboard builder] fix dashboard filters and some css
[dashboard builder] use VIZ_TYPES, move stricter .eslintrc to dashboard/, more css fixes
[builder] delete GridCell and GridLayout, remove some unused css. fix broken tabs.
* [builder] fix errors post-rebase
* [builder] add support for custom DragDroppable drag layer and add AddSliceDragPreview
* [AddSliceDragPreview] fix type check
* [dashboard builder] add prettier and update all files
* [dashboard builder] merge v2/ directory int dashboard/
* [dashboard builder] move component/*Container => containers/*
* add sticky tabs + sidepane, better tabs perf, better container hierarchy, better chart header (#4893)
* dashboard header, slice header UI improvement
* add slider and sticky
* dashboard header, slice header UI improvement
* make builder pane floating
* [dashboard builder] add sticky top-level tabs, refactor for performant tabs
* [dashboard builder] visually distinct containers, icons for undo-redo, fix some isValidChild bugs
* [dashboard builder] better undo redo <> save changes state, notify upon reaching undo limit
* [dashboard builder] hook up edit + create component actions to saved-state pop.
* [dashboard builder] visual refinement, refactor Dashboard header content and updates into layout for undo-redo, refactor save dashboard modal to use toasts instead of notify.
* [dashboard builder] refactor chart name update logic to use layout for undo redo, save slice name changes on dashboard save
* add slider and sticky
* [dashboard builder] fix layout converter slice_id + chartId type casting, don't change grid size upon edit (perf)
* [dashboard builder] don't set version key in getInitialState
* [dashboard builder] make top level tabs addition/removal undoable, fix double sticky tabs + side panel.
* [dashboard builder] fix sticky tabs offset bug
* [dashboard builder] fix drag preview width, css polish, fix rebase issue
* [dashboard builder] fix side pane labels and hove z-index
* Markdown for dashboard (#4962)
* fix dashboard server-side unit tests (#5009)
* Dashboard save button (#4979)
* save button
* fix slices list height
* save custom css
* merge save-dash changes from dashboard v1
https://github.com/apache/incubator-superset/pull/4900
https://github.com/apache/incubator-superset/pull/5051
* [dashboard v2] check for default_filters before json_loads-ing them (#5064)
[dashboard v2] check for default_filters before json-loads-ing them
* [dashboard v2] fix bugs from rebase
* [dashboard v2] tests! (#5066)
* [dashboard v2][tests] add tests for newComponentFactory, isValidChild, dropOverflowsParent, and dnd-reorder
* [dashboard v2][tests] add tests for componentIsResizable, findParentId, getChartIdsFromLayout, newEntitiesFromDrop, and getDropPosition
* [dashboard v2][tests] add mockStore, mockState, and tests for DragDroppable, DashboardBuilder, DashboardGrid, ToastPresenter, and Toast
* [dashboard builder][tests] separate files for state tree fixtures, add ChartHolder, Chart, Divider, Header, Row tests and WithDragDropContext helper
* [dashboard v2][tests] fix dragdrop context with util/getDragDropManager, add test for menu/* and resizable/*, and new components
* [dashboard v2][tests] fix and re-write Dashboard tests, add getFormDataWithExtraFilters_spec
* [dashboard v2][tests] add reducer tests, fix lint error
* [dashboard-v2][tests] add actions/dashboardLayout_spec
* [dashboard v2] fix some prop bugs, open side pane on edit, fix slice name bug
* [dashboard v2] fix slice name save bug
* [dashboard v2] fix lint errors
* [dashboard v2] fix filters bug and add test
* [dashboard v2] fix getFormDataWithExtraFilters_spec
* [dashboard v2] logging updates (#5087)
* [dashboard v2] initial logging refactor
* [dashboard v2] clean up logger
* [logger] update explore with new log events, add refresh dashboard + refresh dashboard chart actions
* [logging] add logger_spec.js, fix reducers/dashboardState_spec + gridComponents/Chart_spec
* [dashboard v2][logging] refactor for bulk logging in python
* [logging] tweak python, fix and remove dup start_offset entries
* [dashboard v2][logging] add dashboard_first_load event
* [dashboard v2][logging] add slice_ids to dashboard pane load event
* [tests] fix npm test script
* Fix: update slices list when add/remove multiple slices (#5138)
* [dashboard v2] add v1 switch (#5126)
* [dashboard] copy all dashboard v1 into working v1 switch
* [dashboard] add functional v1 <> v2 switch with messaging
* [dashboard] add v2 logging to v1 dashboard, add read-v2-changes link, add client logging to track v1 <> v2 switches
* [dashboard] Remove default values for feedback url + v2 auto convert date
* [dashboard v2] fix misc UI/UX issues
* [dashboard v2] fix Markdown persistance issues and css, fix copy dash title, don't enforce shallow hovering with drop indicator
* [dashboard v2] improve non-shallow drop target UX, fix Markdown drop indicator, clarify slice adder filter/sort
* [dashboard v2] delete empty rows on drag or delete events that leave them without children, add test
* [dashboard v2] improve v1<>v2 switch modals, add convert to v2 badge in v1, fix unsaved changes issue in preview mode, don't auto convert column child widths for now
* [dashboard v2][dnd] add drop position cache to fix non-shallow drops
* [dashboard] fix test script with glob instead of recurse, fix tests, add temp fix for tab nesting, ignore v1 lint errors
* [dashboard] v2 badge style tweaks, add back v1 _set_dash_metadata for v1 editing
* [dashboard] fix python linting and tests
* [dashboard] lint tests
* add slice from explore view (#5141)
* Fix dashboard position row data (#5131)
* add slice_name to markdown
(cherry picked from commit 14b01f1)
* set min grid width be 1 column
* remove empty column
* check total columns count <= 12
* scan position data and fix rows
* fix dashboard url with default_filters
* [dashboard v2] better grid drop ux, fix tab bugs 🐛 (#5151)
* [dashboard v2] add empty droptarget to dashboard grid for better ux and update test
* [dashboard] reset tab index upon top-level tab deletion, fix findparentid bug
* [dashboard] update v1<>v2 modal link for tracking
* Fix: Should pass slice_can_edit flag down (#5159)
* [dash builder fix] combine markdown and slice name, slice picker height (#5165)
* combine markdown code and markdown slice name
* allow dynamic height for slice picker cell
* add word break for long datasource name
* [fix] new dashboard state (#5213)
* [dashboard v2] ui + ux fixes (#5208)
* [dashboard v2] use <Loading /> throughout, small loading gif, improve row/column visual hierarchy, add cached data pop
* [dashboard v2] lots of polish
* [dashboard v2] remove markdown padding on edit, more opaque slice drag preview, unsavedChanges=true upon moving a component, fix initial load logging.
* [dashboard v2] gray loading.gif, sticky header, undo/redo keyboard shortcuts, fix move component saved changes update, v0 double scrollbar fix
* [dashboard v2] move UndoRedoKeylisteners into Header, render only in edit mode, show visual feedback for keyboard shortcut, hide hover menu in top-level tabs
* [dashboard v2] fix grid + sidepane height issues
* [dashboard v2] add auto-resize functionality, update tests. cache findParentId results.
* [dashboard v2][tests] add getDetailedComponentWidth_spec.js
* [dashboard v2] fix lint
* [fix] layout converter fix (#5218)
* [fix] layout converter fix
* add changed_on into initial sliceEntity data
* add unit tests for SliceAdder component
* remove old fixtures file
* [dashboard v2] remove webpack-cli, fresh yarn.lock post-rebase
* [dashboard v2] lint javascript
* [dashboard v2] fix python tests
* [Fix] import/export dash in V2 (#5273)
* [dashboard v2] add markdown tests (#5275)
* [dashboard v2] add Markdown tests
* [dashboard v2][mocks] fix markdown mock
2018-06-25 12:17:22 -04:00
|
|
|
def test_slice_data(self):
|
|
|
|
# slice data should have some required attributes
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice(
|
|
|
|
slice_name="Girls", session=db.session, expunge_from_session=False
|
|
|
|
)
|
[wip] dashboard builder v2 (#4528)
* [dashboard builder] Add dir structure for dashboard/v2, simplified Header, split pane, Draggable side panel
[grid] add <DashboardGrid />, <ResizableContainer />, and initial grid components.
[grid] gridComponents/ directory, add fixtures/ directory and test layout, add <Column />
[grid] working grid with gutters
[grid] design tweaks and polish, add <Tabs />
[header] add gradient header logo and favicon
[dnd] begin adding dnd functionality
[dnd] add util/isValidChild.js
[react-beautiful-dnd] iterate on dnd until blocked
[dnd] refactor to use react-dnd
[react-dnd] refactor to use composable <DashboardComponent /> structure
[dnd] factor out DashboardComponent, let components render dropInidcator and set draggableRef, add draggable tabs
[dnd] refactor to use redux, add DashboardComponent and DashboardGrid containers
[dragdroppable] rename horizontal/vertical => row/column
[builder] refactor into HoverMenu, add WithPopoverMenu
[builder] add editable header and disableDragDrop prop for Dragdroppable's
[builder] make tabs editable
[builder] add generic popover dropdown and header row style editability
[builder] add hover rowStyle dropdown, make row styles editable
[builder] add some new component icons, add popover with delete to charts
[builder] add preview icons, add popover menu to rows.
[builder] add IconButton and RowStyleDropdown
[resizable] use ResizableContainer instead of DimensionProvider, fix resize and delete bugs
[builder] fix bug with spacer
[builder] clean up, header.size => header.headerSize
[builder] support more drag/drop combinations by wrapping some components in rows upon drop. fix within list drop index. refactor some utils.
[builder][tabs] fix broken add tab button
[dashboard builder] don't pass dashboard layout to all dashboard components, improve drop indicator logic, fix delete component pure component bug
[dnd] refactor drop position logic
* fix rebase error, clean up css organization and use @less vars
* [dashboard-builder] add top-level tabs + undo-redo (#4626)
* [top-level-tabs] initial working version of top-level tabs
* [top-level-tabs] simplify redux and disable ability to displace top-level tabs with other tabs
* [top-level-tabs] improve tab drag and drop css
* [undo-redo] add redux undo redo
* [dnd] clean up dropResult shape, add new component source id + type, use css for drop indicator instead of styles and fix tab indicators.
* [top-level-tabs] add 'Collapse tab content' to delete tabs button
* [dnd] add depth validation to drag and drop logic
* [dashboard-builder] add resize action, enforce minimum width of columns, column children inherit column size when necessary, meta.rowStyle => meta.background, add background to columns
* [dashboard-builder] make sure getChildWidth returns a number
* [dashboard builder] static layout + toasts (#4763)
* [dashboard-builder] remove spacer component
* [dashboard-builder] better transparent indicator, better grid gutter logic, no dragging top-level tabs, headers are multiples of grid unit, fix row height granularity, update redux state key dashboard => dashboardLayout
* [dashboard-builder] don't blast column child dimensions on resize
* [dashboard-builder] ResizableContainer min size can't be smaller than size, fix row style, role=none on WithPopoverMenu container
* [edit mode] add edit mode to redux and propogate to all <DashboardComponent />s
* [toasts] add Toast component, ToastPresenter container and component, and toast redux actions + reducers
* [dashboard-builder] add info toast when dropResult overflows parent
* [dashboard builder] git mv to src/ post-rebase
* Dashboard builder rebased + linted (#4849)
* define dashboard redux state
* update dashboard state reducer
* dashboard layout converter + grid render
* builder pane + slice adder
* Dashboard header + slice header controls
* fix linting
* 2nd code review comments
* [dashboard builder] improve perf (#4855)
* address major perf + css issues
[dashboard builder] fix dashboard filters and some css
[dashboard builder] use VIZ_TYPES, move stricter .eslintrc to dashboard/, more css fixes
[builder] delete GridCell and GridLayout, remove some unused css. fix broken tabs.
* [builder] fix errors post-rebase
* [builder] add support for custom DragDroppable drag layer and add AddSliceDragPreview
* [AddSliceDragPreview] fix type check
* [dashboard builder] add prettier and update all files
* [dashboard builder] merge v2/ directory int dashboard/
* [dashboard builder] move component/*Container => containers/*
* add sticky tabs + sidepane, better tabs perf, better container hierarchy, better chart header (#4893)
* dashboard header, slice header UI improvement
* add slider and sticky
* dashboard header, slice header UI improvement
* make builder pane floating
* [dashboard builder] add sticky top-level tabs, refactor for performant tabs
* [dashboard builder] visually distinct containers, icons for undo-redo, fix some isValidChild bugs
* [dashboard builder] better undo redo <> save changes state, notify upon reaching undo limit
* [dashboard builder] hook up edit + create component actions to saved-state pop.
* [dashboard builder] visual refinement, refactor Dashboard header content and updates into layout for undo-redo, refactor save dashboard modal to use toasts instead of notify.
* [dashboard builder] refactor chart name update logic to use layout for undo redo, save slice name changes on dashboard save
* add slider and sticky
* [dashboard builder] fix layout converter slice_id + chartId type casting, don't change grid size upon edit (perf)
* [dashboard builder] don't set version key in getInitialState
* [dashboard builder] make top level tabs addition/removal undoable, fix double sticky tabs + side panel.
* [dashboard builder] fix sticky tabs offset bug
* [dashboard builder] fix drag preview width, css polish, fix rebase issue
* [dashboard builder] fix side pane labels and hove z-index
* Markdown for dashboard (#4962)
* fix dashboard server-side unit tests (#5009)
* Dashboard save button (#4979)
* save button
* fix slices list height
* save custom css
* merge save-dash changes from dashboard v1
https://github.com/apache/incubator-superset/pull/4900
https://github.com/apache/incubator-superset/pull/5051
* [dashboard v2] check for default_filters before json_loads-ing them (#5064)
[dashboard v2] check for default_filters before json-loads-ing them
* [dashboard v2] fix bugs from rebase
* [dashboard v2] tests! (#5066)
* [dashboard v2][tests] add tests for newComponentFactory, isValidChild, dropOverflowsParent, and dnd-reorder
* [dashboard v2][tests] add tests for componentIsResizable, findParentId, getChartIdsFromLayout, newEntitiesFromDrop, and getDropPosition
* [dashboard v2][tests] add mockStore, mockState, and tests for DragDroppable, DashboardBuilder, DashboardGrid, ToastPresenter, and Toast
* [dashboard builder][tests] separate files for state tree fixtures, add ChartHolder, Chart, Divider, Header, Row tests and WithDragDropContext helper
* [dashboard v2][tests] fix dragdrop context with util/getDragDropManager, add test for menu/* and resizable/*, and new components
* [dashboard v2][tests] fix and re-write Dashboard tests, add getFormDataWithExtraFilters_spec
* [dashboard v2][tests] add reducer tests, fix lint error
* [dashboard-v2][tests] add actions/dashboardLayout_spec
* [dashboard v2] fix some prop bugs, open side pane on edit, fix slice name bug
* [dashboard v2] fix slice name save bug
* [dashboard v2] fix lint errors
* [dashboard v2] fix filters bug and add test
* [dashboard v2] fix getFormDataWithExtraFilters_spec
* [dashboard v2] logging updates (#5087)
* [dashboard v2] initial logging refactor
* [dashboard v2] clean up logger
* [logger] update explore with new log events, add refresh dashboard + refresh dashboard chart actions
* [logging] add logger_spec.js, fix reducers/dashboardState_spec + gridComponents/Chart_spec
* [dashboard v2][logging] refactor for bulk logging in python
* [logging] tweak python, fix and remove dup start_offset entries
* [dashboard v2][logging] add dashboard_first_load event
* [dashboard v2][logging] add slice_ids to dashboard pane load event
* [tests] fix npm test script
* Fix: update slices list when add/remove multiple slices (#5138)
* [dashboard v2] add v1 switch (#5126)
* [dashboard] copy all dashboard v1 into working v1 switch
* [dashboard] add functional v1 <> v2 switch with messaging
* [dashboard] add v2 logging to v1 dashboard, add read-v2-changes link, add client logging to track v1 <> v2 switches
* [dashboard] Remove default values for feedback url + v2 auto convert date
* [dashboard v2] fix misc UI/UX issues
* [dashboard v2] fix Markdown persistance issues and css, fix copy dash title, don't enforce shallow hovering with drop indicator
* [dashboard v2] improve non-shallow drop target UX, fix Markdown drop indicator, clarify slice adder filter/sort
* [dashboard v2] delete empty rows on drag or delete events that leave them without children, add test
* [dashboard v2] improve v1<>v2 switch modals, add convert to v2 badge in v1, fix unsaved changes issue in preview mode, don't auto convert column child widths for now
* [dashboard v2][dnd] add drop position cache to fix non-shallow drops
* [dashboard] fix test script with glob instead of recurse, fix tests, add temp fix for tab nesting, ignore v1 lint errors
* [dashboard] v2 badge style tweaks, add back v1 _set_dash_metadata for v1 editing
* [dashboard] fix python linting and tests
* [dashboard] lint tests
* add slice from explore view (#5141)
* Fix dashboard position row data (#5131)
* add slice_name to markdown
(cherry picked from commit 14b01f1)
* set min grid width be 1 column
* remove empty column
* check total columns count <= 12
* scan position data and fix rows
* fix dashboard url with default_filters
* [dashboard v2] better grid drop ux, fix tab bugs 🐛 (#5151)
* [dashboard v2] add empty droptarget to dashboard grid for better ux and update test
* [dashboard] reset tab index upon top-level tab deletion, fix findparentid bug
* [dashboard] update v1<>v2 modal link for tracking
* Fix: Should pass slice_can_edit flag down (#5159)
* [dash builder fix] combine markdown and slice name, slice picker height (#5165)
* combine markdown code and markdown slice name
* allow dynamic height for slice picker cell
* add word break for long datasource name
* [fix] new dashboard state (#5213)
* [dashboard v2] ui + ux fixes (#5208)
* [dashboard v2] use <Loading /> throughout, small loading gif, improve row/column visual hierarchy, add cached data pop
* [dashboard v2] lots of polish
* [dashboard v2] remove markdown padding on edit, more opaque slice drag preview, unsavedChanges=true upon moving a component, fix initial load logging.
* [dashboard v2] gray loading.gif, sticky header, undo/redo keyboard shortcuts, fix move component saved changes update, v0 double scrollbar fix
* [dashboard v2] move UndoRedoKeylisteners into Header, render only in edit mode, show visual feedback for keyboard shortcut, hide hover menu in top-level tabs
* [dashboard v2] fix grid + sidepane height issues
* [dashboard v2] add auto-resize functionality, update tests. cache findParentId results.
* [dashboard v2][tests] add getDetailedComponentWidth_spec.js
* [dashboard v2] fix lint
* [fix] layout converter fix (#5218)
* [fix] layout converter fix
* add changed_on into initial sliceEntity data
* add unit tests for SliceAdder component
* remove old fixtures file
* [dashboard v2] remove webpack-cli, fresh yarn.lock post-rebase
* [dashboard v2] lint javascript
* [dashboard v2] fix python tests
* [Fix] import/export dash in V2 (#5273)
* [dashboard v2] add markdown tests (#5275)
* [dashboard v2] add Markdown tests
* [dashboard v2][mocks] fix markdown mock
2018-06-25 12:17:22 -04:00
|
|
|
slc_data_attributes = slc.data.keys()
|
2019-06-25 16:34:48 -04:00
|
|
|
assert "changed_on" in slc_data_attributes
|
|
|
|
assert "modified" in slc_data_attributes
|
2020-07-20 18:32:17 -04:00
|
|
|
assert "owners" in slc_data_attributes
|
[wip] dashboard builder v2 (#4528)
* [dashboard builder] Add dir structure for dashboard/v2, simplified Header, split pane, Draggable side panel
[grid] add <DashboardGrid />, <ResizableContainer />, and initial grid components.
[grid] gridComponents/ directory, add fixtures/ directory and test layout, add <Column />
[grid] working grid with gutters
[grid] design tweaks and polish, add <Tabs />
[header] add gradient header logo and favicon
[dnd] begin adding dnd functionality
[dnd] add util/isValidChild.js
[react-beautiful-dnd] iterate on dnd until blocked
[dnd] refactor to use react-dnd
[react-dnd] refactor to use composable <DashboardComponent /> structure
[dnd] factor out DashboardComponent, let components render dropInidcator and set draggableRef, add draggable tabs
[dnd] refactor to use redux, add DashboardComponent and DashboardGrid containers
[dragdroppable] rename horizontal/vertical => row/column
[builder] refactor into HoverMenu, add WithPopoverMenu
[builder] add editable header and disableDragDrop prop for Dragdroppable's
[builder] make tabs editable
[builder] add generic popover dropdown and header row style editability
[builder] add hover rowStyle dropdown, make row styles editable
[builder] add some new component icons, add popover with delete to charts
[builder] add preview icons, add popover menu to rows.
[builder] add IconButton and RowStyleDropdown
[resizable] use ResizableContainer instead of DimensionProvider, fix resize and delete bugs
[builder] fix bug with spacer
[builder] clean up, header.size => header.headerSize
[builder] support more drag/drop combinations by wrapping some components in rows upon drop. fix within list drop index. refactor some utils.
[builder][tabs] fix broken add tab button
[dashboard builder] don't pass dashboard layout to all dashboard components, improve drop indicator logic, fix delete component pure component bug
[dnd] refactor drop position logic
* fix rebase error, clean up css organization and use @less vars
* [dashboard-builder] add top-level tabs + undo-redo (#4626)
* [top-level-tabs] initial working version of top-level tabs
* [top-level-tabs] simplify redux and disable ability to displace top-level tabs with other tabs
* [top-level-tabs] improve tab drag and drop css
* [undo-redo] add redux undo redo
* [dnd] clean up dropResult shape, add new component source id + type, use css for drop indicator instead of styles and fix tab indicators.
* [top-level-tabs] add 'Collapse tab content' to delete tabs button
* [dnd] add depth validation to drag and drop logic
* [dashboard-builder] add resize action, enforce minimum width of columns, column children inherit column size when necessary, meta.rowStyle => meta.background, add background to columns
* [dashboard-builder] make sure getChildWidth returns a number
* [dashboard builder] static layout + toasts (#4763)
* [dashboard-builder] remove spacer component
* [dashboard-builder] better transparent indicator, better grid gutter logic, no dragging top-level tabs, headers are multiples of grid unit, fix row height granularity, update redux state key dashboard => dashboardLayout
* [dashboard-builder] don't blast column child dimensions on resize
* [dashboard-builder] ResizableContainer min size can't be smaller than size, fix row style, role=none on WithPopoverMenu container
* [edit mode] add edit mode to redux and propogate to all <DashboardComponent />s
* [toasts] add Toast component, ToastPresenter container and component, and toast redux actions + reducers
* [dashboard-builder] add info toast when dropResult overflows parent
* [dashboard builder] git mv to src/ post-rebase
* Dashboard builder rebased + linted (#4849)
* define dashboard redux state
* update dashboard state reducer
* dashboard layout converter + grid render
* builder pane + slice adder
* Dashboard header + slice header controls
* fix linting
* 2nd code review comments
* [dashboard builder] improve perf (#4855)
* address major perf + css issues
[dashboard builder] fix dashboard filters and some css
[dashboard builder] use VIZ_TYPES, move stricter .eslintrc to dashboard/, more css fixes
[builder] delete GridCell and GridLayout, remove some unused css. fix broken tabs.
* [builder] fix errors post-rebase
* [builder] add support for custom DragDroppable drag layer and add AddSliceDragPreview
* [AddSliceDragPreview] fix type check
* [dashboard builder] add prettier and update all files
* [dashboard builder] merge v2/ directory int dashboard/
* [dashboard builder] move component/*Container => containers/*
* add sticky tabs + sidepane, better tabs perf, better container hierarchy, better chart header (#4893)
* dashboard header, slice header UI improvement
* add slider and sticky
* dashboard header, slice header UI improvement
* make builder pane floating
* [dashboard builder] add sticky top-level tabs, refactor for performant tabs
* [dashboard builder] visually distinct containers, icons for undo-redo, fix some isValidChild bugs
* [dashboard builder] better undo redo <> save changes state, notify upon reaching undo limit
* [dashboard builder] hook up edit + create component actions to saved-state pop.
* [dashboard builder] visual refinement, refactor Dashboard header content and updates into layout for undo-redo, refactor save dashboard modal to use toasts instead of notify.
* [dashboard builder] refactor chart name update logic to use layout for undo redo, save slice name changes on dashboard save
* add slider and sticky
* [dashboard builder] fix layout converter slice_id + chartId type casting, don't change grid size upon edit (perf)
* [dashboard builder] don't set version key in getInitialState
* [dashboard builder] make top level tabs addition/removal undoable, fix double sticky tabs + side panel.
* [dashboard builder] fix sticky tabs offset bug
* [dashboard builder] fix drag preview width, css polish, fix rebase issue
* [dashboard builder] fix side pane labels and hove z-index
* Markdown for dashboard (#4962)
* fix dashboard server-side unit tests (#5009)
* Dashboard save button (#4979)
* save button
* fix slices list height
* save custom css
* merge save-dash changes from dashboard v1
https://github.com/apache/incubator-superset/pull/4900
https://github.com/apache/incubator-superset/pull/5051
* [dashboard v2] check for default_filters before json_loads-ing them (#5064)
[dashboard v2] check for default_filters before json-loads-ing them
* [dashboard v2] fix bugs from rebase
* [dashboard v2] tests! (#5066)
* [dashboard v2][tests] add tests for newComponentFactory, isValidChild, dropOverflowsParent, and dnd-reorder
* [dashboard v2][tests] add tests for componentIsResizable, findParentId, getChartIdsFromLayout, newEntitiesFromDrop, and getDropPosition
* [dashboard v2][tests] add mockStore, mockState, and tests for DragDroppable, DashboardBuilder, DashboardGrid, ToastPresenter, and Toast
* [dashboard builder][tests] separate files for state tree fixtures, add ChartHolder, Chart, Divider, Header, Row tests and WithDragDropContext helper
* [dashboard v2][tests] fix dragdrop context with util/getDragDropManager, add test for menu/* and resizable/*, and new components
* [dashboard v2][tests] fix and re-write Dashboard tests, add getFormDataWithExtraFilters_spec
* [dashboard v2][tests] add reducer tests, fix lint error
* [dashboard-v2][tests] add actions/dashboardLayout_spec
* [dashboard v2] fix some prop bugs, open side pane on edit, fix slice name bug
* [dashboard v2] fix slice name save bug
* [dashboard v2] fix lint errors
* [dashboard v2] fix filters bug and add test
* [dashboard v2] fix getFormDataWithExtraFilters_spec
* [dashboard v2] logging updates (#5087)
* [dashboard v2] initial logging refactor
* [dashboard v2] clean up logger
* [logger] update explore with new log events, add refresh dashboard + refresh dashboard chart actions
* [logging] add logger_spec.js, fix reducers/dashboardState_spec + gridComponents/Chart_spec
* [dashboard v2][logging] refactor for bulk logging in python
* [logging] tweak python, fix and remove dup start_offset entries
* [dashboard v2][logging] add dashboard_first_load event
* [dashboard v2][logging] add slice_ids to dashboard pane load event
* [tests] fix npm test script
* Fix: update slices list when add/remove multiple slices (#5138)
* [dashboard v2] add v1 switch (#5126)
* [dashboard] copy all dashboard v1 into working v1 switch
* [dashboard] add functional v1 <> v2 switch with messaging
* [dashboard] add v2 logging to v1 dashboard, add read-v2-changes link, add client logging to track v1 <> v2 switches
* [dashboard] Remove default values for feedback url + v2 auto convert date
* [dashboard v2] fix misc UI/UX issues
* [dashboard v2] fix Markdown persistance issues and css, fix copy dash title, don't enforce shallow hovering with drop indicator
* [dashboard v2] improve non-shallow drop target UX, fix Markdown drop indicator, clarify slice adder filter/sort
* [dashboard v2] delete empty rows on drag or delete events that leave them without children, add test
* [dashboard v2] improve v1<>v2 switch modals, add convert to v2 badge in v1, fix unsaved changes issue in preview mode, don't auto convert column child widths for now
* [dashboard v2][dnd] add drop position cache to fix non-shallow drops
* [dashboard] fix test script with glob instead of recurse, fix tests, add temp fix for tab nesting, ignore v1 lint errors
* [dashboard] v2 badge style tweaks, add back v1 _set_dash_metadata for v1 editing
* [dashboard] fix python linting and tests
* [dashboard] lint tests
* add slice from explore view (#5141)
* Fix dashboard position row data (#5131)
* add slice_name to markdown
(cherry picked from commit 14b01f1)
* set min grid width be 1 column
* remove empty column
* check total columns count <= 12
* scan position data and fix rows
* fix dashboard url with default_filters
* [dashboard v2] better grid drop ux, fix tab bugs 🐛 (#5151)
* [dashboard v2] add empty droptarget to dashboard grid for better ux and update test
* [dashboard] reset tab index upon top-level tab deletion, fix findparentid bug
* [dashboard] update v1<>v2 modal link for tracking
* Fix: Should pass slice_can_edit flag down (#5159)
* [dash builder fix] combine markdown and slice name, slice picker height (#5165)
* combine markdown code and markdown slice name
* allow dynamic height for slice picker cell
* add word break for long datasource name
* [fix] new dashboard state (#5213)
* [dashboard v2] ui + ux fixes (#5208)
* [dashboard v2] use <Loading /> throughout, small loading gif, improve row/column visual hierarchy, add cached data pop
* [dashboard v2] lots of polish
* [dashboard v2] remove markdown padding on edit, more opaque slice drag preview, unsavedChanges=true upon moving a component, fix initial load logging.
* [dashboard v2] gray loading.gif, sticky header, undo/redo keyboard shortcuts, fix move component saved changes update, v0 double scrollbar fix
* [dashboard v2] move UndoRedoKeylisteners into Header, render only in edit mode, show visual feedback for keyboard shortcut, hide hover menu in top-level tabs
* [dashboard v2] fix grid + sidepane height issues
* [dashboard v2] add auto-resize functionality, update tests. cache findParentId results.
* [dashboard v2][tests] add getDetailedComponentWidth_spec.js
* [dashboard v2] fix lint
* [fix] layout converter fix (#5218)
* [fix] layout converter fix
* add changed_on into initial sliceEntity data
* add unit tests for SliceAdder component
* remove old fixtures file
* [dashboard v2] remove webpack-cli, fresh yarn.lock post-rebase
* [dashboard v2] lint javascript
* [dashboard v2] fix python tests
* [Fix] import/export dash in V2 (#5273)
* [dashboard v2] add markdown tests (#5275)
* [dashboard v2] add Markdown tests
* [dashboard v2][mocks] fix markdown mock
2018-06-25 12:17:22 -04:00
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
2015-09-26 18:55:33 -04:00
|
|
|
def test_slices(self):
|
2016-06-20 12:18:03 -04:00
|
|
|
# Testing by hitting the two supported end points for all slices
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2019-12-18 14:40:45 -05:00
|
|
|
Slc = Slice
|
2016-03-28 11:09:46 -04:00
|
|
|
urls = []
|
2015-09-26 18:55:33 -04:00
|
|
|
for slc in db.session.query(Slc).all():
|
2016-03-28 11:09:46 -04:00
|
|
|
urls += [
|
2019-06-25 16:34:48 -04:00
|
|
|
(slc.slice_name, "explore", slc.slice_url),
|
2016-03-28 11:09:46 -04:00
|
|
|
]
|
2016-06-09 21:05:58 -04:00
|
|
|
for name, method, url in urls:
|
2020-02-08 02:38:48 -05:00
|
|
|
logger.info(f"[{name}]/[{method}]: {url}")
|
2019-08-19 01:36:27 -04:00
|
|
|
print(f"[{name}]/[{method}]: {url}")
|
2019-07-04 16:23:02 -04:00
|
|
|
resp = self.client.get(url)
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2017-06-13 12:44:26 -04:00
|
|
|
def test_tablemodelview_list(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2017-02-10 04:17:49 -05:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
url = "/tablemodelview/list/"
|
2017-02-10 04:17:49 -05:00
|
|
|
resp = self.get_resp(url)
|
|
|
|
|
2017-06-13 12:44:26 -04:00
|
|
|
# assert that a table is listed
|
2017-03-10 12:11:51 -05:00
|
|
|
table = db.session.query(SqlaTable).first()
|
2017-02-10 04:17:49 -05:00
|
|
|
assert table.name in resp
|
2019-06-25 16:34:48 -04:00
|
|
|
assert "/superset/explore/table/{}".format(table.id) in resp
|
2017-02-10 04:17:49 -05:00
|
|
|
|
2017-06-13 12:44:26 -04:00
|
|
|
def test_add_slice(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2018-08-03 15:46:48 -04:00
|
|
|
# assert that /chart/add responds with 200
|
2019-06-25 16:34:48 -04:00
|
|
|
url = "/chart/add"
|
2017-06-13 12:44:26 -04:00
|
|
|
resp = self.client.get(url)
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-06-26 11:10:56 -04:00
|
|
|
def test_get_user_slices_for_owners(self):
|
2020-08-10 16:20:19 -04:00
|
|
|
self.login(username="alpha")
|
|
|
|
user = security_manager.find_user("alpha")
|
2020-06-26 11:10:56 -04:00
|
|
|
slice_name = "Girls"
|
|
|
|
|
|
|
|
# ensure user is not owner of any slices
|
|
|
|
url = f"/superset/user_slices/{user.id}/"
|
|
|
|
resp = self.client.get(url)
|
|
|
|
data = json.loads(resp.data)
|
|
|
|
self.assertEqual(data, [])
|
|
|
|
|
|
|
|
# make user owner of slice and verify that endpoint returns said slice
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice(
|
|
|
|
slice_name=slice_name, session=db.session, expunge_from_session=False
|
|
|
|
)
|
2020-06-26 11:10:56 -04:00
|
|
|
slc.owners = [user]
|
|
|
|
db.session.merge(slc)
|
|
|
|
db.session.commit()
|
|
|
|
url = f"/superset/user_slices/{user.id}/"
|
|
|
|
resp = self.client.get(url)
|
|
|
|
data = json.loads(resp.data)
|
|
|
|
self.assertEqual(len(data), 1)
|
|
|
|
self.assertEqual(data[0]["title"], slice_name)
|
|
|
|
|
|
|
|
# remove ownership and ensure user no longer gets slice
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice(
|
|
|
|
slice_name=slice_name, session=db.session, expunge_from_session=False
|
|
|
|
)
|
2020-06-26 11:10:56 -04:00
|
|
|
slc.owners = []
|
|
|
|
db.session.merge(slc)
|
|
|
|
db.session.commit()
|
|
|
|
url = f"/superset/user_slices/{user.id}/"
|
|
|
|
resp = self.client.get(url)
|
|
|
|
data = json.loads(resp.data)
|
|
|
|
self.assertEqual(data, [])
|
|
|
|
|
2018-02-06 15:38:07 -05:00
|
|
|
def test_get_user_slices(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
userid = security_manager.find_user("admin").id
|
2020-01-23 11:25:15 -05:00
|
|
|
url = f"/sliceasync/api/read?_flt_0_created_by={userid}"
|
2018-02-06 15:38:07 -05:00
|
|
|
resp = self.client.get(url)
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
|
|
|
|
2020-12-09 15:02:29 -05:00
|
|
|
@pytest.mark.usefixtures("load_energy_table_with_slice")
|
2016-12-12 19:42:38 -05:00
|
|
|
def test_slices_V2(self):
|
|
|
|
# Add explore-v2-beta role to admin user
|
|
|
|
# Test all slice urls as user with with explore-v2-beta role
|
2019-06-25 16:34:48 -04:00
|
|
|
security_manager.add_role("explore-v2-beta")
|
2016-12-12 19:42:38 -05:00
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.add_user(
|
2019-06-25 16:34:48 -04:00
|
|
|
"explore_beta",
|
|
|
|
"explore_beta",
|
|
|
|
" user",
|
|
|
|
"explore_beta@airbnb.com",
|
|
|
|
security_manager.find_role("explore-v2-beta"),
|
|
|
|
password="general",
|
|
|
|
)
|
|
|
|
self.login(username="explore_beta", password="general")
|
2016-12-12 19:42:38 -05:00
|
|
|
|
2019-12-18 14:40:45 -05:00
|
|
|
Slc = Slice
|
2016-12-12 19:42:38 -05:00
|
|
|
urls = []
|
|
|
|
for slc in db.session.query(Slc).all():
|
2019-06-25 16:34:48 -04:00
|
|
|
urls += [(slc.slice_name, "slice_url", slc.slice_url)]
|
2016-12-12 19:42:38 -05:00
|
|
|
for name, method, url in urls:
|
2019-06-25 16:34:48 -04:00
|
|
|
print(f"[{name}]/[{method}]: {url}")
|
2018-12-02 16:50:49 -05:00
|
|
|
self.client.get(url)
|
2016-12-12 19:42:38 -05:00
|
|
|
|
2016-02-10 12:34:09 -05:00
|
|
|
def test_doctests(self):
|
2016-12-01 22:53:23 -05:00
|
|
|
modules = [utils, models, sql_lab]
|
2016-02-10 12:34:09 -05:00
|
|
|
for mod in modules:
|
|
|
|
failed, tests = doctest.testmod(mod)
|
|
|
|
if failed:
|
2019-06-25 16:34:48 -04:00
|
|
|
raise Exception("Failed a doctest")
|
2016-02-10 12:34:09 -05:00
|
|
|
|
2016-03-28 11:09:46 -04:00
|
|
|
def test_misc(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
assert self.get_resp("/health") == "OK"
|
|
|
|
assert self.get_resp("/healthcheck") == "OK"
|
|
|
|
assert self.get_resp("/ping") == "OK"
|
2016-03-27 17:23:33 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
def test_testconn(self, username="admin"):
|
2020-03-02 16:13:11 -05:00
|
|
|
# need to temporarily allow sqlite dbs, teardown will undo this
|
|
|
|
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False
|
2017-10-16 23:15:16 -04:00
|
|
|
self.login(username=username)
|
2019-09-08 13:18:09 -04:00
|
|
|
database = utils.get_example_database()
|
2016-09-19 18:14:00 -04:00
|
|
|
# validate that the endpoint works with the password-masked sqlalchemy uri
|
2019-06-25 16:34:48 -04:00
|
|
|
data = json.dumps(
|
|
|
|
{
|
|
|
|
"uri": database.safe_sqlalchemy_uri(),
|
2019-09-08 13:18:09 -04:00
|
|
|
"name": "examples",
|
2019-06-25 16:34:48 -04:00
|
|
|
"impersonate_user": False,
|
|
|
|
}
|
|
|
|
)
|
2017-11-12 14:09:22 -05:00
|
|
|
response = self.client.post(
|
2019-06-25 16:34:48 -04:00
|
|
|
"/superset/testconn", data=data, content_type="application/json"
|
|
|
|
)
|
2016-09-17 15:32:41 -04:00
|
|
|
assert response.status_code == 200
|
2019-06-25 16:34:48 -04:00
|
|
|
assert response.headers["Content-Type"] == "application/json"
|
2016-09-17 15:32:41 -04:00
|
|
|
|
2016-09-19 18:14:00 -04:00
|
|
|
# validate that the endpoint works with the decrypted sqlalchemy uri
|
2019-06-25 16:34:48 -04:00
|
|
|
data = json.dumps(
|
|
|
|
{
|
|
|
|
"uri": database.sqlalchemy_uri_decrypted,
|
2019-09-08 13:18:09 -04:00
|
|
|
"name": "examples",
|
2019-06-25 16:34:48 -04:00
|
|
|
"impersonate_user": False,
|
|
|
|
}
|
|
|
|
)
|
2017-11-12 14:09:22 -05:00
|
|
|
response = self.client.post(
|
2019-06-25 16:34:48 -04:00
|
|
|
"/superset/testconn", data=data, content_type="application/json"
|
|
|
|
)
|
2016-09-17 15:32:41 -04:00
|
|
|
assert response.status_code == 200
|
2019-06-25 16:34:48 -04:00
|
|
|
assert response.headers["Content-Type"] == "application/json"
|
2016-09-17 15:32:41 -04:00
|
|
|
|
2020-02-05 00:01:43 -05:00
|
|
|
def test_testconn_failed_conn(self, username="admin"):
|
|
|
|
self.login(username=username)
|
|
|
|
|
|
|
|
data = json.dumps(
|
|
|
|
{"uri": "broken://url", "name": "examples", "impersonate_user": False}
|
|
|
|
)
|
|
|
|
response = self.client.post(
|
|
|
|
"/superset/testconn", data=data, content_type="application/json"
|
|
|
|
)
|
|
|
|
assert response.status_code == 400
|
|
|
|
assert response.headers["Content-Type"] == "application/json"
|
|
|
|
response_body = json.loads(response.data.decode("utf-8"))
|
2020-02-24 05:40:51 -05:00
|
|
|
expected_body = {"error": "Could not load database driver: broken"}
|
2020-02-05 00:01:43 -05:00
|
|
|
assert response_body == expected_body, "%s != %s" % (
|
|
|
|
response_body,
|
|
|
|
expected_body,
|
|
|
|
)
|
|
|
|
|
2020-04-27 04:24:41 -04:00
|
|
|
data = json.dumps(
|
|
|
|
{
|
|
|
|
"uri": "mssql+pymssql://url",
|
|
|
|
"name": "examples",
|
|
|
|
"impersonate_user": False,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
response = self.client.post(
|
|
|
|
"/superset/testconn", data=data, content_type="application/json"
|
|
|
|
)
|
|
|
|
assert response.status_code == 400
|
|
|
|
assert response.headers["Content-Type"] == "application/json"
|
|
|
|
response_body = json.loads(response.data.decode("utf-8"))
|
|
|
|
expected_body = {"error": "Could not load database driver: mssql+pymssql"}
|
|
|
|
assert response_body == expected_body, "%s != %s" % (
|
|
|
|
response_body,
|
|
|
|
expected_body,
|
|
|
|
)
|
|
|
|
|
2020-03-02 16:13:11 -05:00
|
|
|
def test_testconn_unsafe_uri(self, username="admin"):
|
|
|
|
self.login(username=username)
|
|
|
|
app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True
|
|
|
|
|
|
|
|
response = self.client.post(
|
|
|
|
"/superset/testconn",
|
|
|
|
data=json.dumps(
|
|
|
|
{
|
|
|
|
"uri": "sqlite:///home/superset/unsafe.db",
|
|
|
|
"name": "unsafe",
|
|
|
|
"impersonate_user": False,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
content_type="application/json",
|
|
|
|
)
|
|
|
|
self.assertEqual(400, response.status_code)
|
|
|
|
response_body = json.loads(response.data.decode("utf-8"))
|
|
|
|
expected_body = {
|
2021-02-18 12:48:18 -05:00
|
|
|
"error": "SQLiteDialect_pysqlite cannot be used as a data source for security reasons."
|
2020-03-02 16:13:11 -05:00
|
|
|
}
|
|
|
|
self.assertEqual(expected_body, response_body)
|
|
|
|
|
2017-09-13 23:59:03 -04:00
|
|
|
def test_custom_password_store(self):
|
2019-09-08 13:18:09 -04:00
|
|
|
database = utils.get_example_database()
|
2017-09-13 23:59:03 -04:00
|
|
|
conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
|
|
|
|
|
|
|
|
def custom_password_store(uri):
|
2019-06-25 16:34:48 -04:00
|
|
|
return "password_store_test"
|
2017-09-13 23:59:03 -04:00
|
|
|
|
2018-01-09 16:14:20 -05:00
|
|
|
models.custom_password_store = custom_password_store
|
2017-09-13 23:59:03 -04:00
|
|
|
conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
|
|
|
|
if conn_pre.password:
|
2019-06-25 16:34:48 -04:00
|
|
|
assert conn.password == "password_store_test"
|
2017-09-13 23:59:03 -04:00
|
|
|
assert conn.password != conn_pre.password
|
2018-01-09 16:14:20 -05:00
|
|
|
# Disable for password store for later tests
|
|
|
|
models.custom_password_store = None
|
2017-09-13 23:59:03 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
def test_databaseview_edit(self, username="admin"):
|
2017-11-12 14:09:22 -05:00
|
|
|
# validate that sending a password-masked uri does not over-write the decrypted
|
|
|
|
# uri
|
2016-10-11 19:49:40 -04:00
|
|
|
self.login(username=username)
|
2019-09-08 13:18:09 -04:00
|
|
|
database = utils.get_example_database()
|
2016-10-11 19:49:40 -04:00
|
|
|
sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted
|
2019-06-25 16:34:48 -04:00
|
|
|
url = "databaseview/edit/{}".format(database.id)
|
2016-10-11 19:49:40 -04:00
|
|
|
data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns}
|
2019-06-25 16:34:48 -04:00
|
|
|
data["sqlalchemy_uri"] = database.safe_sqlalchemy_uri()
|
2016-10-19 12:17:08 -04:00
|
|
|
self.client.post(url, data=data)
|
2019-09-08 13:18:09 -04:00
|
|
|
database = utils.get_example_database()
|
2016-10-11 19:49:40 -04:00
|
|
|
self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted)
|
2016-09-19 18:14:00 -04:00
|
|
|
|
2019-11-20 10:47:06 -05:00
|
|
|
# Need to clean up after ourselves
|
|
|
|
database.impersonate_user = False
|
|
|
|
database.allow_dml = False
|
|
|
|
database.allow_run_async = False
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures(
|
|
|
|
"load_energy_table_with_slice", "load_birth_names_dashboard_with_slices"
|
|
|
|
)
|
2016-09-06 16:58:09 -04:00
|
|
|
def test_warm_up_cache(self):
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice("Girls", db.session)
|
2019-06-25 16:34:48 -04:00
|
|
|
data = self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(slc.id))
|
2020-04-16 18:19:32 -04:00
|
|
|
self.assertEqual(
|
|
|
|
data, [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}]
|
|
|
|
)
|
2016-09-06 16:58:09 -04:00
|
|
|
|
2016-10-19 12:17:08 -04:00
|
|
|
data = self.get_json_resp(
|
2019-06-25 16:34:48 -04:00
|
|
|
"/superset/warm_up_cache?table_name=energy_usage&db_name=main"
|
|
|
|
)
|
2018-10-16 20:59:34 -04:00
|
|
|
assert len(data) > 0
|
2016-09-06 16:58:09 -04:00
|
|
|
|
2020-08-25 18:19:34 -04:00
|
|
|
dashboard = self.get_dash_by_slug("births")
|
|
|
|
|
|
|
|
assert self.get_json_resp(
|
|
|
|
f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}"
|
|
|
|
) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}]
|
|
|
|
|
|
|
|
assert self.get_json_resp(
|
|
|
|
f"/superset/warm_up_cache?dashboard_id={dashboard.id}&slice_id={slc.id}&extra_filters="
|
|
|
|
+ quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}]))
|
|
|
|
) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}]
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2020-09-01 12:41:25 -04:00
|
|
|
def test_cache_logging(self):
|
2021-02-23 04:50:22 -05:00
|
|
|
self.login("admin")
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
store_cache_keys = app.config["STORE_CACHE_KEYS_IN_METADATA_DB"]
|
|
|
|
app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = True
|
2020-10-09 12:00:41 -04:00
|
|
|
girls_slice = self.get_slice("Girls", db.session)
|
|
|
|
self.get_json_resp("/superset/warm_up_cache?slice_id={}".format(girls_slice.id))
|
2020-09-01 12:41:25 -04:00
|
|
|
ck = db.session.query(CacheKey).order_by(CacheKey.id.desc()).first()
|
2020-10-09 12:00:41 -04:00
|
|
|
assert ck.datasource_uid == f"{girls_slice.table.id}__table"
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = store_cache_keys
|
2020-09-01 12:41:25 -04:00
|
|
|
|
2016-04-20 18:08:10 -04:00
|
|
|
def test_shortner(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2016-08-30 00:55:31 -04:00
|
|
|
data = (
|
2019-06-25 16:34:48 -04:00
|
|
|
"//superset/explore/table/1/?viz_type=sankey&groupby=source&"
|
|
|
|
"groupby=target&metric=sum__value&row_limit=5000&where=&having=&"
|
|
|
|
"flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name="
|
|
|
|
"Energy+Sankey&collapsed_fieldsets=&action=&datasource_name="
|
|
|
|
"energy_usage&datasource_id=1&datasource_type=table&"
|
|
|
|
"previous_viz_type=sankey"
|
2016-09-22 12:53:14 -04:00
|
|
|
)
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.client.post("/r/shortner/", data=dict(data=data))
|
|
|
|
assert re.search(r"\/r\/[0-9]+", resp.data.decode("utf-8"))
|
2016-04-20 18:08:10 -04:00
|
|
|
|
2021-03-04 15:46:45 -05:00
|
|
|
def test_shortner_invalid(self):
|
|
|
|
self.login(username="admin")
|
|
|
|
invalid_urls = [
|
|
|
|
"hhttp://invalid.com",
|
|
|
|
"hhttps://invalid.com",
|
|
|
|
"www.invalid.com",
|
|
|
|
]
|
|
|
|
for invalid_url in invalid_urls:
|
|
|
|
resp = self.client.post("/r/shortner/", data=dict(data=invalid_url))
|
|
|
|
assert resp.status_code == 400
|
|
|
|
|
|
|
|
def test_redirect_invalid(self):
|
|
|
|
model_url = models.Url(url="hhttp://invalid.com")
|
|
|
|
db.session.add(model_url)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
response = self.client.get(f"/r/{model_url.id}")
|
|
|
|
assert response.headers["Location"] == "http://localhost/"
|
|
|
|
db.session.delete(model_url)
|
|
|
|
db.session.commit()
|
|
|
|
|
2021-05-21 17:29:52 -04:00
|
|
|
@with_feature_flags(KV_STORE=False)
|
|
|
|
def test_kv_disabled(self):
|
|
|
|
self.login(username="admin")
|
|
|
|
|
|
|
|
resp = self.client.get("/kv/10001/")
|
|
|
|
self.assertEqual(404, resp.status_code)
|
|
|
|
|
|
|
|
value = json.dumps({"data": "this is a test"})
|
|
|
|
resp = self.client.post("/kv/store/", data=dict(data=value))
|
|
|
|
self.assertEqual(resp.status_code, 404)
|
|
|
|
|
|
|
|
@with_feature_flags(KV_STORE=True)
|
|
|
|
def test_kv_enabled(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2017-01-27 13:20:24 -05:00
|
|
|
|
2019-11-20 10:47:06 -05:00
|
|
|
resp = self.client.get("/kv/10001/")
|
|
|
|
self.assertEqual(404, resp.status_code)
|
2017-01-27 13:20:24 -05:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
value = json.dumps({"data": "this is a test"})
|
|
|
|
resp = self.client.post("/kv/store/", data=dict(data=value))
|
2017-01-27 13:20:24 -05:00
|
|
|
self.assertEqual(resp.status_code, 200)
|
|
|
|
kv = db.session.query(models.KeyValue).first()
|
|
|
|
kv_value = kv.value
|
|
|
|
self.assertEqual(json.loads(value), json.loads(kv_value))
|
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.client.get("/kv/{}/".format(kv.id))
|
2017-01-27 13:20:24 -05:00
|
|
|
self.assertEqual(resp.status_code, 200)
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertEqual(json.loads(value), json.loads(resp.data.decode("utf-8")))
|
2017-01-27 13:20:24 -05:00
|
|
|
|
2016-04-26 19:44:51 -04:00
|
|
|
def test_gamma(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="gamma")
|
|
|
|
assert "Charts" in self.get_resp("/chart/list/")
|
|
|
|
assert "Dashboards" in self.get_resp("/dashboard/list/")
|
2016-04-26 19:44:51 -04:00
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2016-08-30 00:55:31 -04:00
|
|
|
def test_csv_endpoint(self):
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2021-01-11 08:57:55 -05:00
|
|
|
client_id = "{}".format(random.getrandbits(64))[:10]
|
|
|
|
get_name_sql = """
|
|
|
|
SELECT name
|
|
|
|
FROM birth_names
|
|
|
|
LIMIT 1
|
|
|
|
"""
|
|
|
|
resp = self.run_sql(get_name_sql, client_id, raise_on_error=True)
|
|
|
|
name = resp["data"][0]["name"]
|
|
|
|
sql = f"""
|
2019-09-08 13:18:09 -04:00
|
|
|
SELECT name
|
|
|
|
FROM birth_names
|
2021-01-11 08:57:55 -05:00
|
|
|
WHERE name = '{name}'
|
2019-09-08 13:18:09 -04:00
|
|
|
LIMIT 1
|
2016-09-11 10:39:07 -04:00
|
|
|
"""
|
2019-06-25 16:34:48 -04:00
|
|
|
client_id = "{}".format(random.getrandbits(64))[:10]
|
2017-01-25 21:06:29 -05:00
|
|
|
self.run_sql(sql, client_id, raise_on_error=True)
|
2016-07-30 01:39:33 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.get_resp("/superset/csv/{}".format(client_id))
|
2016-10-02 21:03:19 -04:00
|
|
|
data = csv.reader(io.StringIO(resp))
|
2021-01-11 08:57:55 -05:00
|
|
|
expected_data = csv.reader(io.StringIO(f"name\n{name}\n"))
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
client_id = "{}".format(random.getrandbits(64))[:10]
|
2018-07-21 15:01:26 -04:00
|
|
|
self.run_sql(sql, client_id, raise_on_error=True)
|
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
resp = self.get_resp("/superset/csv/{}".format(client_id))
|
2018-07-21 15:01:26 -04:00
|
|
|
data = csv.reader(io.StringIO(resp))
|
2021-01-11 08:57:55 -05:00
|
|
|
expected_data = csv.reader(io.StringIO(f"name\n{name}\n"))
|
2018-07-21 15:01:26 -04:00
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
self.assertEqual(list(expected_data), list(data))
|
|
|
|
self.logout()
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2016-10-19 12:17:08 -04:00
|
|
|
def test_extra_table_metadata(self):
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2020-08-06 15:07:22 -04:00
|
|
|
example_db = utils.get_example_database()
|
2020-08-27 12:49:18 -04:00
|
|
|
schema = "default" if example_db.backend in {"presto", "hive"} else "superset"
|
2016-10-19 12:17:08 -04:00
|
|
|
self.get_json_resp(
|
2020-08-06 15:07:22 -04:00
|
|
|
f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/"
|
2019-06-25 16:34:48 -04:00
|
|
|
)
|
2016-10-19 12:17:08 -04:00
|
|
|
|
2016-10-26 14:09:27 -04:00
|
|
|
def test_templated_sql_json(self):
|
2020-08-06 15:07:22 -04:00
|
|
|
if utils.get_example_database().backend == "presto":
|
|
|
|
# TODO: make it work for presto
|
|
|
|
return
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2020-11-17 14:55:47 -05:00
|
|
|
sql = "SELECT '{{ 1+1 }}' as test"
|
2019-06-25 16:34:48 -04:00
|
|
|
data = self.run_sql(sql, "fdaklj3ws")
|
2020-11-17 14:55:47 -05:00
|
|
|
self.assertEqual(data["data"][0]["test"], "2")
|
2020-04-07 16:00:42 -04:00
|
|
|
|
2021-07-01 11:03:07 -04:00
|
|
|
@mock.patch(
|
|
|
|
"tests.integration_tests.superset_test_custom_template_processors.datetime"
|
|
|
|
)
|
2021-10-03 04:15:46 -04:00
|
|
|
@mock.patch("superset.views.core.get_sql_results")
|
2020-04-07 16:00:42 -04:00
|
|
|
def test_custom_templated_sql_json(self, sql_lab_mock, mock_dt) -> None:
|
|
|
|
"""Test sqllab receives macros expanded query."""
|
|
|
|
mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1))
|
2020-09-11 09:28:41 -04:00
|
|
|
self.login()
|
2020-04-07 16:00:42 -04:00
|
|
|
sql = "SELECT '$DATE()' as test"
|
|
|
|
resp = {
|
2021-09-26 14:15:57 -04:00
|
|
|
"status": QueryStatus.SUCCESS,
|
2020-04-07 16:00:42 -04:00
|
|
|
"query": {"rows": 1},
|
|
|
|
"data": [{"test": "'1970-01-01'"}],
|
|
|
|
}
|
|
|
|
sql_lab_mock.return_value = resp
|
|
|
|
|
2020-08-27 16:12:24 -04:00
|
|
|
dbobj = self.create_fake_db_for_macros()
|
2020-04-07 16:00:42 -04:00
|
|
|
json_payload = dict(database_id=dbobj.id, sql=sql)
|
|
|
|
self.get_json_resp(
|
|
|
|
"/superset/sql_json/", raise_on_error=False, json_=json_payload
|
|
|
|
)
|
|
|
|
assert sql_lab_mock.called
|
|
|
|
self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test")
|
|
|
|
|
2020-08-27 16:12:24 -04:00
|
|
|
self.delete_fake_db_for_macros()
|
2020-04-07 16:00:42 -04:00
|
|
|
|
2016-11-08 18:55:49 -05:00
|
|
|
def test_fetch_datasource_metadata(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
url = "/superset/fetch_datasource_metadata?" "datasourceKey=1__table"
|
2017-01-06 15:38:44 -05:00
|
|
|
resp = self.get_json_resp(url)
|
|
|
|
keys = [
|
2019-06-25 16:34:48 -04:00
|
|
|
"name",
|
|
|
|
"type",
|
|
|
|
"order_by_choices",
|
|
|
|
"granularity_sqla",
|
|
|
|
"time_grain_sqla",
|
|
|
|
"id",
|
2017-01-06 15:38:44 -05:00
|
|
|
]
|
|
|
|
for k in keys:
|
|
|
|
self.assertIn(k, resp.keys())
|
2016-11-01 02:52:37 -04:00
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2019-06-25 16:34:48 -04:00
|
|
|
def test_user_profile(self, username="admin"):
|
2017-04-24 23:48:25 -04:00
|
|
|
self.login(username=username)
|
2020-08-06 18:33:48 -04:00
|
|
|
slc = self.get_slice("Girls", db.session)
|
2016-12-01 18:21:18 -05:00
|
|
|
|
|
|
|
# Setting some faves
|
2020-06-26 11:10:56 -04:00
|
|
|
url = f"/superset/favstar/Slice/{slc.id}/select/"
|
2016-12-01 18:21:18 -05:00
|
|
|
resp = self.get_json_resp(url)
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertEqual(resp["count"], 1)
|
2016-12-01 18:21:18 -05:00
|
|
|
|
2019-12-18 14:40:45 -05:00
|
|
|
dash = db.session.query(Dashboard).filter_by(slug="births").first()
|
2020-06-26 11:10:56 -04:00
|
|
|
url = f"/superset/favstar/Dashboard/{dash.id}/select/"
|
2016-12-01 18:21:18 -05:00
|
|
|
resp = self.get_json_resp(url)
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertEqual(resp["count"], 1)
|
2016-12-01 18:21:18 -05:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
userid = security_manager.find_user("admin").id
|
2020-06-26 11:10:56 -04:00
|
|
|
resp = self.get_resp(f"/superset/profile/{username}/")
|
2016-11-20 00:23:44 -05:00
|
|
|
self.assertIn('"app"', resp)
|
2020-06-26 11:10:56 -04:00
|
|
|
data = self.get_json_resp(f"/superset/recent_activity/{userid}/")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertNotIn("message", data)
|
2020-06-26 11:10:56 -04:00
|
|
|
data = self.get_json_resp(f"/superset/created_slices/{userid}/")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertNotIn("message", data)
|
2020-06-26 11:10:56 -04:00
|
|
|
data = self.get_json_resp(f"/superset/created_dashboards/{userid}/")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertNotIn("message", data)
|
2020-06-26 11:10:56 -04:00
|
|
|
data = self.get_json_resp(f"/superset/fave_slices/{userid}/")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertNotIn("message", data)
|
2020-06-26 11:10:56 -04:00
|
|
|
data = self.get_json_resp(f"/superset/fave_dashboards/{userid}/")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertNotIn("message", data)
|
2020-06-26 11:10:56 -04:00
|
|
|
data = self.get_json_resp(f"/superset/user_slices/{userid}/")
|
|
|
|
self.assertNotIn("message", data)
|
|
|
|
data = self.get_json_resp(f"/superset/fave_dashboards_by_username/{username}/")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertNotIn("message", data)
|
2016-11-20 00:23:44 -05:00
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2017-08-24 12:11:41 -04:00
|
|
|
def test_slice_id_is_always_logged_correctly_on_web_request(self):
|
|
|
|
# superset/explore case
|
2021-02-23 04:50:22 -05:00
|
|
|
self.login("admin")
|
2019-12-18 14:40:45 -05:00
|
|
|
slc = db.session.query(Slice).filter_by(slice_name="Girls").one()
|
2017-08-24 12:11:41 -04:00
|
|
|
qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
|
2019-06-25 16:34:48 -04:00
|
|
|
self.get_resp(slc.slice_url, {"form_data": json.dumps(slc.form_data)})
|
2017-08-24 12:11:41 -04:00
|
|
|
self.assertEqual(1, qry.count())
|
|
|
|
|
2020-05-21 16:49:53 -04:00
|
|
|
def create_sample_csvfile(self, filename: str, content: List[str]) -> None:
|
|
|
|
with open(filename, "w+") as test_file:
|
|
|
|
for l in content:
|
|
|
|
test_file.write(f"{l}\n")
|
2019-06-25 16:34:48 -04:00
|
|
|
|
2020-07-29 15:50:39 -04:00
|
|
|
def create_sample_excelfile(self, filename: str, content: Dict[str, str]) -> None:
|
|
|
|
pd.DataFrame(content).to_excel(filename)
|
|
|
|
|
2020-05-21 16:49:53 -04:00
|
|
|
def enable_csv_upload(self, database: models.Database) -> None:
|
|
|
|
"""Enables csv upload in the given database."""
|
2021-10-25 06:53:06 -04:00
|
|
|
database.allow_file_upload = True
|
2020-05-21 16:49:53 -04:00
|
|
|
db.session.commit()
|
|
|
|
add_datasource_page = self.get_resp("/databaseview/list/")
|
|
|
|
self.assertIn("Upload a CSV", add_datasource_page)
|
2019-11-07 13:03:42 -05:00
|
|
|
|
2020-05-21 16:49:53 -04:00
|
|
|
form_get = self.get_resp("/csvtodatabaseview/form")
|
|
|
|
self.assertIn("CSV to Database configuration", form_get)
|
2019-11-07 13:03:42 -05:00
|
|
|
|
2017-11-20 11:33:18 -05:00
|
|
|
def test_dataframe_timezone(self):
|
2020-01-03 11:55:39 -05:00
|
|
|
tz = pytz.FixedOffset(60)
|
2018-01-02 23:21:33 -05:00
|
|
|
data = [
|
|
|
|
(datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz),),
|
2020-01-03 11:55:39 -05:00
|
|
|
(datetime.datetime(2017, 11, 18, 22, 6, 30, tzinfo=tz),),
|
2018-01-02 23:21:33 -05:00
|
|
|
]
|
2020-01-03 11:55:39 -05:00
|
|
|
results = SupersetResultSet(list(data), [["data"]], BaseEngineSpec)
|
|
|
|
df = results.to_pandas_df()
|
|
|
|
data = dataframe.df_to_records(df)
|
|
|
|
json_str = json.dumps(data, default=utils.pessimistic_json_iso_dttm_ser)
|
2017-11-20 11:33:18 -05:00
|
|
|
self.assertDictEqual(
|
2019-06-25 16:34:48 -04:00
|
|
|
data[0], {"data": pd.Timestamp("2017-11-18 21:53:00.219225+0100", tz=tz)}
|
2017-11-20 11:33:18 -05:00
|
|
|
)
|
|
|
|
self.assertDictEqual(
|
2020-01-03 11:55:39 -05:00
|
|
|
data[1], {"data": pd.Timestamp("2017-11-18 22:06:30+0100", tz=tz)}
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
json_str,
|
|
|
|
'[{"data": "2017-11-18T21:53:00.219225+01:00"}, {"data": "2017-11-18T22:06:30+01:00"}]',
|
2017-11-20 11:33:18 -05:00
|
|
|
)
|
|
|
|
|
2019-01-13 12:30:05 -05:00
|
|
|
def test_mssql_engine_spec_pymssql(self):
|
|
|
|
# Test for case when tuple is returned (pymssql)
|
2019-06-25 16:34:48 -04:00
|
|
|
data = [
|
|
|
|
(1, 1, datetime.datetime(2017, 10, 19, 23, 39, 16, 660000)),
|
|
|
|
(2, 2, datetime.datetime(2018, 10, 19, 23, 39, 16, 660000)),
|
|
|
|
]
|
2020-01-03 11:55:39 -05:00
|
|
|
results = SupersetResultSet(
|
2019-06-25 16:34:48 -04:00
|
|
|
list(data), [["col1"], ["col2"], ["col3"]], MssqlEngineSpec
|
|
|
|
)
|
2020-01-03 11:55:39 -05:00
|
|
|
df = results.to_pandas_df()
|
|
|
|
data = dataframe.df_to_records(df)
|
2019-01-13 12:30:05 -05:00
|
|
|
self.assertEqual(len(data), 2)
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertEqual(
|
|
|
|
data[0],
|
|
|
|
{"col1": 1, "col2": 1, "col3": pd.Timestamp("2017-10-19 23:39:16.660000")},
|
|
|
|
)
|
2019-01-13 12:30:05 -05:00
|
|
|
|
2018-02-18 19:30:11 -05:00
|
|
|
def test_comments_in_sqlatable_query(self):
|
|
|
|
clean_query = "SELECT '/* val 1 */' as c1, '-- val 2' as c2 FROM tbl"
|
2019-06-25 16:34:48 -04:00
|
|
|
commented_query = "/* comment 1 */" + clean_query + "-- comment 2"
|
2019-10-15 19:51:04 -04:00
|
|
|
table = SqlaTable(
|
2020-12-03 13:44:11 -05:00
|
|
|
table_name="test_comments_in_sqlatable_query_table",
|
|
|
|
sql=commented_query,
|
|
|
|
database=get_example_database(),
|
2019-10-15 19:51:04 -04:00
|
|
|
)
|
2018-10-13 12:38:46 -04:00
|
|
|
rendered_query = str(table.get_from_clause())
|
2018-02-18 19:30:11 -05:00
|
|
|
self.assertEqual(clean_query, rendered_query)
|
|
|
|
|
2019-07-01 14:55:25 -04:00
|
|
|
def test_slice_payload_no_datasource(self):
|
|
|
|
self.login(username="admin")
|
|
|
|
data = self.get_json_resp("/superset/explore_json/", raise_on_error=False)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2020-05-13 20:10:37 -04:00
|
|
|
data["errors"][0]["message"],
|
2021-01-19 01:42:03 -05:00
|
|
|
"The dataset associated with this chart no longer exists",
|
2019-07-01 14:55:25 -04:00
|
|
|
)
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
def test_explore_json(self):
|
|
|
|
tbl_id = self.table_ids.get("birth_names")
|
|
|
|
form_data = {
|
|
|
|
"datasource": f"{tbl_id}__table",
|
|
|
|
"viz_type": "dist_bar",
|
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
|
|
|
"granularity_sqla": "ds",
|
|
|
|
"time_range": "No filter",
|
|
|
|
"metrics": ["count"],
|
|
|
|
"adhoc_filters": [],
|
|
|
|
"groupby": ["gender"],
|
|
|
|
"row_limit": 100,
|
|
|
|
}
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"/superset/explore_json/", data={"form_data": json.dumps(form_data)},
|
|
|
|
)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertEqual(data["rowcount"], 2)
|
|
|
|
|
2021-01-22 05:38:33 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
|
|
|
def test_explore_json_dist_bar_order(self):
|
|
|
|
tbl_id = self.table_ids.get("birth_names")
|
|
|
|
form_data = {
|
|
|
|
"datasource": f"{tbl_id}__table",
|
|
|
|
"viz_type": "dist_bar",
|
|
|
|
"url_params": {},
|
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
|
|
|
"granularity_sqla": "ds",
|
|
|
|
"time_range": 'DATEADD(DATETIME("2021-01-22T00:00:00"), -100, year) : 2021-01-22T00:00:00',
|
|
|
|
"metrics": [
|
|
|
|
{
|
|
|
|
"expressionType": "SIMPLE",
|
|
|
|
"column": {
|
|
|
|
"id": 334,
|
|
|
|
"column_name": "name",
|
|
|
|
"verbose_name": "null",
|
|
|
|
"description": "null",
|
|
|
|
"expression": "",
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_dttm": False,
|
|
|
|
"type": "VARCHAR(255)",
|
|
|
|
"python_date_format": "null",
|
|
|
|
},
|
|
|
|
"aggregate": "COUNT",
|
|
|
|
"sqlExpression": "null",
|
|
|
|
"isNew": False,
|
|
|
|
"hasCustomLabel": False,
|
|
|
|
"label": "COUNT(name)",
|
|
|
|
"optionName": "metric_xdzsijn42f9_khi4h3v3vci",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"expressionType": "SIMPLE",
|
|
|
|
"column": {
|
|
|
|
"id": 332,
|
|
|
|
"column_name": "ds",
|
|
|
|
"verbose_name": "null",
|
|
|
|
"description": "null",
|
|
|
|
"expression": "",
|
|
|
|
"filterable": True,
|
|
|
|
"groupby": True,
|
|
|
|
"is_dttm": True,
|
|
|
|
"type": "TIMESTAMP WITHOUT TIME ZONE",
|
|
|
|
"python_date_format": "null",
|
|
|
|
},
|
|
|
|
"aggregate": "COUNT",
|
|
|
|
"sqlExpression": "null",
|
|
|
|
"isNew": False,
|
|
|
|
"hasCustomLabel": False,
|
|
|
|
"label": "COUNT(ds)",
|
|
|
|
"optionName": "metric_80g1qb9b6o7_ci5vquydcbe",
|
|
|
|
},
|
|
|
|
],
|
2021-06-30 07:36:05 -04:00
|
|
|
"order_desc": True,
|
2021-01-22 05:38:33 -05:00
|
|
|
"adhoc_filters": [],
|
|
|
|
"groupby": ["name"],
|
|
|
|
"columns": [],
|
|
|
|
"row_limit": 10,
|
|
|
|
"color_scheme": "supersetColors",
|
|
|
|
"label_colors": {},
|
|
|
|
"show_legend": True,
|
|
|
|
"y_axis_format": "SMART_NUMBER",
|
|
|
|
"bottom_margin": "auto",
|
|
|
|
"x_ticks_layout": "auto",
|
|
|
|
}
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"/superset/explore_json/", data={"form_data": json.dumps(form_data)},
|
|
|
|
)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
resp = self.run_sql(
|
|
|
|
"""
|
|
|
|
SELECT count(name) AS count_name, count(ds) AS count_ds
|
|
|
|
FROM birth_names
|
|
|
|
WHERE ds >= '1921-01-22 00:00:00.000000' AND ds < '2021-01-22 00:00:00.000000'
|
2021-06-30 07:36:05 -04:00
|
|
|
GROUP BY name
|
|
|
|
ORDER BY count_name DESC
|
2021-01-22 05:38:33 -05:00
|
|
|
LIMIT 10;
|
|
|
|
""",
|
|
|
|
client_id="client_id_1",
|
|
|
|
user_name="admin",
|
|
|
|
)
|
|
|
|
count_ds = []
|
|
|
|
count_name = []
|
|
|
|
for series in data["data"]:
|
|
|
|
if series["key"] == "COUNT(ds)":
|
|
|
|
count_ds = series["values"]
|
|
|
|
if series["key"] == "COUNT(name)":
|
|
|
|
count_name = series["values"]
|
|
|
|
for expected, actual_ds, actual_name in zip(resp["data"], count_ds, count_name):
|
|
|
|
assert expected["count_name"] == actual_name["y"]
|
|
|
|
assert expected["count_ds"] == actual_ds["y"]
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
@mock.patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
GLOBAL_ASYNC_QUERIES=True,
|
|
|
|
)
|
|
|
|
def test_explore_json_async(self):
|
|
|
|
tbl_id = self.table_ids.get("birth_names")
|
|
|
|
form_data = {
|
|
|
|
"datasource": f"{tbl_id}__table",
|
|
|
|
"viz_type": "dist_bar",
|
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
|
|
|
"granularity_sqla": "ds",
|
|
|
|
"time_range": "No filter",
|
|
|
|
"metrics": ["count"],
|
|
|
|
"adhoc_filters": [],
|
|
|
|
"groupby": ["gender"],
|
|
|
|
"row_limit": 100,
|
|
|
|
}
|
|
|
|
async_query_manager.init_app(app)
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"/superset/explore_json/", data={"form_data": json.dumps(form_data)},
|
|
|
|
)
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
keys = list(data.keys())
|
|
|
|
|
|
|
|
self.assertEqual(rv.status_code, 202)
|
|
|
|
self.assertCountEqual(
|
|
|
|
keys, ["channel_id", "job_id", "user_id", "status", "errors", "result_url"]
|
|
|
|
)
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
@mock.patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
GLOBAL_ASYNC_QUERIES=True,
|
|
|
|
)
|
|
|
|
def test_explore_json_async_results_format(self):
|
|
|
|
tbl_id = self.table_ids.get("birth_names")
|
|
|
|
form_data = {
|
|
|
|
"datasource": f"{tbl_id}__table",
|
|
|
|
"viz_type": "dist_bar",
|
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
|
|
|
"granularity_sqla": "ds",
|
|
|
|
"time_range": "No filter",
|
|
|
|
"metrics": ["count"],
|
|
|
|
"adhoc_filters": [],
|
|
|
|
"groupby": ["gender"],
|
|
|
|
"row_limit": 100,
|
|
|
|
}
|
|
|
|
async_query_manager.init_app(app)
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.post(
|
|
|
|
"/superset/explore_json/?results=true",
|
|
|
|
data={"form_data": json.dumps(form_data)},
|
|
|
|
)
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
feat(SIP-39): Async query support for charts (#11499)
* Generate JWT in Flask app
* Refactor chart data API query logic, add JWT validation and async worker
* Add redis stream implementation, refactoring
* Add chart data cache endpoint, refactor QueryContext caching
* Typing, linting, refactoring
* pytest fixes and openapi schema update
* Enforce caching be configured for async query init
* Async query processing for explore_json endpoint
* Add /api/v1/async_event endpoint
* Async frontend for dashboards [WIP]
* Chart async error message support, refactoring
* Abstract asyncEvent middleware
* Async chart loading for Explore
* Pylint fixes
* asyncEvent middleware -> TypeScript, JS linting
* Chart data API: enforce forced_cache, add tests
* Add tests for explore_json endpoints
* Add test for chart data cache enpoint (no login)
* Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag
* Add tests for tasks/async_queries and address PR comments
* Bypass non-JSON result formats for async queries
* Add tests for redux middleware
* Remove debug statement
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
* Skip force_cached if no queryObj
* SunburstViz: don't modify self.form_data
* Fix failing annotation test
* Resolve merge/lint issues
* Reduce polling delay
* Fix new getClientErrorObject reference
* Fix flakey unit tests
* /api/v1/async_event: increment redis stream ID, add tests
* PR feedback: refactoring, configuration
* Fixup: remove debugging
* Fix typescript errors due to redux upgrade
* Update UPDATING.md
* Fix failing py tests
* asyncEvent_spec.js -> asyncEvent_spec.ts
* Refactor flakey Python 3.7 mock assertions
* Fix another shared state issue in Py tests
* Use 'sub' claim in JWT for user_id
* Refactor async middleware config
* Fixup: restore FeatureFlag boolean type
Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
2020-12-10 23:21:56 -05:00
|
|
|
@mock.patch(
|
|
|
|
"superset.utils.cache_manager.CacheManager.cache",
|
|
|
|
new_callable=mock.PropertyMock,
|
|
|
|
)
|
|
|
|
@mock.patch("superset.viz.BaseViz.force_cached", new_callable=mock.PropertyMock)
|
|
|
|
def test_explore_json_data(self, mock_force_cached, mock_cache):
|
|
|
|
tbl_id = self.table_ids.get("birth_names")
|
|
|
|
form_data = dict(
|
|
|
|
{
|
|
|
|
"form_data": {
|
|
|
|
"datasource": f"{tbl_id}__table",
|
|
|
|
"viz_type": "dist_bar",
|
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
|
|
|
"granularity_sqla": "ds",
|
|
|
|
"time_range": "No filter",
|
|
|
|
"metrics": ["count"],
|
|
|
|
"adhoc_filters": [],
|
|
|
|
"groupby": ["gender"],
|
|
|
|
"row_limit": 100,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
class MockCache:
|
|
|
|
def get(self, key):
|
|
|
|
return form_data
|
|
|
|
|
|
|
|
def set(self):
|
|
|
|
return None
|
|
|
|
|
|
|
|
mock_cache.return_value = MockCache()
|
|
|
|
mock_force_cached.return_value = False
|
|
|
|
|
|
|
|
self.login(username="admin")
|
|
|
|
rv = self.client.get("/superset/explore_json/data/valid-cache-key")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
self.assertEqual(rv.status_code, 200)
|
|
|
|
self.assertEqual(data["rowcount"], 2)
|
|
|
|
|
|
|
|
@mock.patch(
|
|
|
|
"superset.utils.cache_manager.CacheManager.cache",
|
|
|
|
new_callable=mock.PropertyMock,
|
|
|
|
)
|
|
|
|
def test_explore_json_data_no_login(self, mock_cache):
|
|
|
|
tbl_id = self.table_ids.get("birth_names")
|
|
|
|
form_data = dict(
|
|
|
|
{
|
|
|
|
"form_data": {
|
|
|
|
"datasource": f"{tbl_id}__table",
|
|
|
|
"viz_type": "dist_bar",
|
|
|
|
"time_range_endpoints": ["inclusive", "exclusive"],
|
|
|
|
"granularity_sqla": "ds",
|
|
|
|
"time_range": "No filter",
|
|
|
|
"metrics": ["count"],
|
|
|
|
"adhoc_filters": [],
|
|
|
|
"groupby": ["gender"],
|
|
|
|
"row_limit": 100,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
class MockCache:
|
|
|
|
def get(self, key):
|
|
|
|
return form_data
|
|
|
|
|
|
|
|
def set(self):
|
|
|
|
return None
|
|
|
|
|
|
|
|
mock_cache.return_value = MockCache()
|
|
|
|
|
|
|
|
rv = self.client.get("/superset/explore_json/data/valid-cache-key")
|
|
|
|
self.assertEqual(rv.status_code, 401)
|
|
|
|
|
|
|
|
def test_explore_json_data_invalid_cache_key(self):
|
|
|
|
self.login(username="admin")
|
|
|
|
cache_key = "invalid-cache-key"
|
|
|
|
rv = self.client.get(f"/superset/explore_json/data/{cache_key}")
|
|
|
|
data = json.loads(rv.data.decode("utf-8"))
|
|
|
|
|
|
|
|
self.assertEqual(rv.status_code, 404)
|
|
|
|
self.assertEqual(data["error"], "Cached data not found")
|
|
|
|
|
2020-06-11 14:29:43 -04:00
|
|
|
@mock.patch(
|
|
|
|
"superset.security.SupersetSecurityManager.get_schemas_accessible_by_user"
|
|
|
|
)
|
2020-06-11 16:12:23 -04:00
|
|
|
@mock.patch("superset.security.SupersetSecurityManager.can_access_database")
|
|
|
|
@mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources")
|
2019-06-25 16:34:48 -04:00
|
|
|
def test_schemas_access_for_csv_upload_endpoint(
|
2020-06-11 16:12:23 -04:00
|
|
|
self,
|
|
|
|
mock_can_access_all_datasources,
|
|
|
|
mock_can_access_database,
|
|
|
|
mock_schemas_accessible,
|
2019-06-25 16:34:48 -04:00
|
|
|
):
|
2019-09-08 13:18:09 -04:00
|
|
|
self.login(username="admin")
|
|
|
|
dbobj = self.create_fake_db()
|
2020-06-11 16:12:23 -04:00
|
|
|
mock_can_access_all_datasources.return_value = False
|
|
|
|
mock_can_access_database.return_value = False
|
2019-06-25 16:34:48 -04:00
|
|
|
mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"]
|
2018-09-20 14:21:11 -04:00
|
|
|
data = self.get_json_resp(
|
2021-08-31 03:20:25 -04:00
|
|
|
url="/superset/schemas_access_for_file_upload?db_id={db_id}".format(
|
2019-06-25 16:34:48 -04:00
|
|
|
db_id=dbobj.id
|
|
|
|
)
|
|
|
|
)
|
|
|
|
assert data == ["this_schema_is_allowed_too"]
|
2019-11-20 10:47:06 -05:00
|
|
|
self.delete_fake_db()
|
2018-09-20 14:21:11 -04:00
|
|
|
|
2021-01-11 08:57:55 -05:00
|
|
|
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
|
2018-10-08 13:32:40 -04:00
|
|
|
def test_select_star(self):
|
2019-06-25 16:34:48 -04:00
|
|
|
self.login(username="admin")
|
2019-07-17 00:36:56 -04:00
|
|
|
examples_db = utils.get_example_database()
|
|
|
|
resp = self.get_resp(f"/superset/select_star/{examples_db.id}/birth_names")
|
2019-06-25 16:34:48 -04:00
|
|
|
self.assertIn("gender", resp)
|
2018-10-08 13:32:40 -04:00
|
|
|
|
2020-02-14 09:30:49 -05:00
|
|
|
def test_get_select_star_not_allowed(self):
|
|
|
|
"""
|
2020-11-24 15:13:52 -05:00
|
|
|
Database API: Test get select star not allowed
|
2020-02-14 09:30:49 -05:00
|
|
|
"""
|
|
|
|
self.login(username="gamma")
|
|
|
|
example_db = utils.get_example_database()
|
|
|
|
resp = self.client.get(f"/superset/select_star/{example_db.id}/birth_names")
|
2021-06-30 19:32:59 -04:00
|
|
|
self.assertEqual(resp.status_code, 403)
|
2020-02-14 09:30:49 -05:00
|
|
|
|
2019-10-25 13:22:16 -04:00
|
|
|
@mock.patch("superset.views.core.results_backend_use_msgpack", False)
|
|
|
|
@mock.patch("superset.views.core.results_backend")
|
2020-09-11 09:28:41 -04:00
|
|
|
def test_display_limit(self, mock_results_backend):
|
|
|
|
self.login()
|
2019-10-25 13:22:16 -04:00
|
|
|
|
|
|
|
data = [{"col_0": i} for i in range(100)]
|
|
|
|
payload = {
|
2021-09-26 14:15:57 -04:00
|
|
|
"status": QueryStatus.SUCCESS,
|
2019-10-25 13:22:16 -04:00
|
|
|
"query": {"rows": 100},
|
|
|
|
"data": data,
|
|
|
|
}
|
2020-09-11 09:28:41 -04:00
|
|
|
# limit results to 1
|
|
|
|
expected_key = {"status": "success", "query": {"rows": 100}, "data": data}
|
|
|
|
limited_data = data[:1]
|
|
|
|
expected_limited = {
|
|
|
|
"status": "success",
|
|
|
|
"query": {"rows": 100},
|
|
|
|
"data": limited_data,
|
|
|
|
"displayLimitReached": True,
|
|
|
|
}
|
|
|
|
|
|
|
|
query_mock = mock.Mock()
|
|
|
|
query_mock.sql = "SELECT *"
|
|
|
|
query_mock.database = 1
|
|
|
|
query_mock.schema = "superset"
|
|
|
|
|
2019-10-25 13:22:16 -04:00
|
|
|
# do not apply msgpack serialization
|
|
|
|
use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"]
|
|
|
|
app.config["RESULTS_BACKEND_USE_MSGPACK"] = False
|
|
|
|
serialized_payload = sql_lab._serialize_payload(payload, False)
|
|
|
|
compressed = utils.zlib_compress(serialized_payload)
|
|
|
|
mock_results_backend.get.return_value = compressed
|
|
|
|
|
2020-09-11 09:28:41 -04:00
|
|
|
with mock.patch("superset.views.core.db") as mock_superset_db:
|
|
|
|
mock_superset_db.session.query().filter_by().one_or_none.return_value = (
|
|
|
|
query_mock
|
|
|
|
)
|
|
|
|
# get all results
|
|
|
|
result_key = json.loads(self.get_resp("/superset/results/key/"))
|
|
|
|
result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1"))
|
2019-10-25 13:22:16 -04:00
|
|
|
|
2020-09-11 09:28:41 -04:00
|
|
|
self.assertEqual(result_key, expected_key)
|
|
|
|
self.assertEqual(result_limited, expected_limited)
|
2019-10-25 13:22:16 -04:00
|
|
|
|
|
|
|
app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack
|
|
|
|
|
2019-08-27 17:23:40 -04:00
|
|
|
def test_results_default_deserialization(self):
|
|
|
|
use_new_deserialization = False
|
|
|
|
data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]
|
|
|
|
cursor_descr = (
|
|
|
|
("a", "string"),
|
|
|
|
("b", "int"),
|
|
|
|
("c", "float"),
|
|
|
|
("d", "datetime"),
|
|
|
|
)
|
|
|
|
db_engine_spec = BaseEngineSpec()
|
2020-01-03 11:55:39 -05:00
|
|
|
results = SupersetResultSet(data, cursor_descr, db_engine_spec)
|
2019-08-27 17:23:40 -04:00
|
|
|
query = {
|
|
|
|
"database_id": 1,
|
|
|
|
"sql": "SELECT * FROM birth_names LIMIT 100",
|
2021-09-26 14:15:57 -04:00
|
|
|
"status": QueryStatus.PENDING,
|
2019-08-27 17:23:40 -04:00
|
|
|
}
|
2020-03-02 13:10:54 -05:00
|
|
|
(
|
|
|
|
serialized_data,
|
|
|
|
selected_columns,
|
|
|
|
all_columns,
|
|
|
|
expanded_columns,
|
|
|
|
) = sql_lab._serialize_and_expand_data(
|
2020-01-03 11:55:39 -05:00
|
|
|
results, db_engine_spec, use_new_deserialization
|
2019-08-27 17:23:40 -04:00
|
|
|
)
|
|
|
|
payload = {
|
|
|
|
"query_id": 1,
|
2021-09-26 14:15:57 -04:00
|
|
|
"status": QueryStatus.SUCCESS,
|
|
|
|
"state": QueryStatus.SUCCESS,
|
2019-08-27 17:23:40 -04:00
|
|
|
"data": serialized_data,
|
|
|
|
"columns": all_columns,
|
|
|
|
"selected_columns": selected_columns,
|
|
|
|
"expanded_columns": expanded_columns,
|
|
|
|
"query": query,
|
|
|
|
}
|
|
|
|
|
|
|
|
serialized_payload = sql_lab._serialize_payload(
|
|
|
|
payload, use_new_deserialization
|
|
|
|
)
|
|
|
|
self.assertIsInstance(serialized_payload, str)
|
|
|
|
|
|
|
|
query_mock = mock.Mock()
|
2020-06-17 16:42:13 -04:00
|
|
|
deserialized_payload = superset.views.utils._deserialize_results_payload(
|
2019-08-27 17:23:40 -04:00
|
|
|
serialized_payload, query_mock, use_new_deserialization
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertDictEqual(deserialized_payload, payload)
|
|
|
|
query_mock.assert_not_called()
|
|
|
|
|
|
|
|
def test_results_msgpack_deserialization(self):
|
|
|
|
use_new_deserialization = True
|
|
|
|
data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")]
|
|
|
|
cursor_descr = (
|
|
|
|
("a", "string"),
|
|
|
|
("b", "int"),
|
|
|
|
("c", "float"),
|
|
|
|
("d", "datetime"),
|
|
|
|
)
|
|
|
|
db_engine_spec = BaseEngineSpec()
|
2020-01-03 11:55:39 -05:00
|
|
|
results = SupersetResultSet(data, cursor_descr, db_engine_spec)
|
2019-08-27 17:23:40 -04:00
|
|
|
query = {
|
|
|
|
"database_id": 1,
|
|
|
|
"sql": "SELECT * FROM birth_names LIMIT 100",
|
2021-09-26 14:15:57 -04:00
|
|
|
"status": QueryStatus.PENDING,
|
2019-08-27 17:23:40 -04:00
|
|
|
}
|
2020-03-02 13:10:54 -05:00
|
|
|
(
|
|
|
|
serialized_data,
|
|
|
|
selected_columns,
|
|
|
|
all_columns,
|
|
|
|
expanded_columns,
|
|
|
|
) = sql_lab._serialize_and_expand_data(
|
2020-01-03 11:55:39 -05:00
|
|
|
results, db_engine_spec, use_new_deserialization
|
2019-08-27 17:23:40 -04:00
|
|
|
)
|
|
|
|
payload = {
|
|
|
|
"query_id": 1,
|
2021-09-26 14:15:57 -04:00
|
|
|
"status": QueryStatus.SUCCESS,
|
|
|
|
"state": QueryStatus.SUCCESS,
|
2019-08-27 17:23:40 -04:00
|
|
|
"data": serialized_data,
|
|
|
|
"columns": all_columns,
|
|
|
|
"selected_columns": selected_columns,
|
|
|
|
"expanded_columns": expanded_columns,
|
|
|
|
"query": query,
|
|
|
|
}
|
|
|
|
|
|
|
|
serialized_payload = sql_lab._serialize_payload(
|
|
|
|
payload, use_new_deserialization
|
|
|
|
)
|
|
|
|
self.assertIsInstance(serialized_payload, bytes)
|
|
|
|
|
|
|
|
with mock.patch.object(
|
|
|
|
db_engine_spec, "expand_data", wraps=db_engine_spec.expand_data
|
|
|
|
) as expand_data:
|
|
|
|
query_mock = mock.Mock()
|
|
|
|
query_mock.database.db_engine_spec.expand_data = expand_data
|
|
|
|
|
2020-06-17 16:42:13 -04:00
|
|
|
deserialized_payload = superset.views.utils._deserialize_results_payload(
|
2019-08-27 17:23:40 -04:00
|
|
|
serialized_payload, query_mock, use_new_deserialization
|
|
|
|
)
|
2020-01-03 11:55:39 -05:00
|
|
|
df = results.to_pandas_df()
|
|
|
|
payload["data"] = dataframe.df_to_records(df)
|
2019-08-27 17:23:40 -04:00
|
|
|
|
|
|
|
self.assertDictEqual(deserialized_payload, payload)
|
|
|
|
expand_data.assert_called_once()
|
|
|
|
|
2019-11-20 10:47:06 -05:00
|
|
|
@mock.patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
{"FOO": lambda x: 1},
|
|
|
|
clear=True,
|
|
|
|
)
|
2021-01-13 17:20:05 -05:00
|
|
|
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
|
2019-11-12 14:10:36 -05:00
|
|
|
def test_feature_flag_serialization(self):
|
|
|
|
"""
|
|
|
|
Functions in feature flags don't break bootstrap data serialization.
|
|
|
|
"""
|
|
|
|
self.login()
|
|
|
|
|
|
|
|
encoded = json.dumps(
|
|
|
|
{"FOO": lambda x: 1, "super": "set"},
|
|
|
|
default=utils.pessimistic_json_iso_dttm_ser,
|
|
|
|
)
|
2020-05-17 17:49:51 -04:00
|
|
|
html_string = (
|
|
|
|
html.escape(encoded, quote=False)
|
|
|
|
.replace("'", "'")
|
|
|
|
.replace('"', """)
|
|
|
|
)
|
2021-01-13 17:20:05 -05:00
|
|
|
dash_id = db.session.query(Dashboard.id).first()[0]
|
|
|
|
tbl_id = self.table_ids.get("wb_health_population")
|
2019-11-14 13:01:57 -05:00
|
|
|
urls = [
|
|
|
|
"/superset/sqllab",
|
|
|
|
"/superset/welcome",
|
2021-01-13 17:20:05 -05:00
|
|
|
f"/superset/dashboard/{dash_id}/",
|
2019-11-14 13:01:57 -05:00
|
|
|
"/superset/profile/admin/",
|
2021-01-13 17:20:05 -05:00
|
|
|
f"/superset/explore/table/{tbl_id}",
|
2019-11-14 13:01:57 -05:00
|
|
|
]
|
|
|
|
for url in urls:
|
|
|
|
data = self.get_resp(url)
|
2020-05-17 17:49:51 -04:00
|
|
|
self.assertTrue(html_string in data)
|
2019-11-12 14:10:36 -05:00
|
|
|
|
2019-12-09 19:12:40 -05:00
|
|
|
@mock.patch.dict(
|
|
|
|
"superset.extensions.feature_flag_manager._feature_flags",
|
|
|
|
{"SQLLAB_BACKEND_PERSISTENCE": True},
|
|
|
|
clear=True,
|
|
|
|
)
|
|
|
|
def test_sqllab_backend_persistence_payload(self):
|
|
|
|
username = "admin"
|
|
|
|
self.login(username)
|
|
|
|
user_id = security_manager.find_user(username).id
|
|
|
|
|
|
|
|
# create a tab
|
|
|
|
data = {
|
|
|
|
"queryEditor": json.dumps(
|
|
|
|
{
|
|
|
|
"title": "Untitled Query 1",
|
|
|
|
"dbId": 1,
|
|
|
|
"schema": None,
|
|
|
|
"autorun": False,
|
|
|
|
"sql": "SELECT ...",
|
|
|
|
"queryLimit": 1000,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
resp = self.get_json_resp("/tabstateview/", data=data)
|
|
|
|
tab_state_id = resp["id"]
|
|
|
|
|
|
|
|
# run a query in the created tab
|
|
|
|
self.run_sql(
|
|
|
|
"SELECT name FROM birth_names",
|
|
|
|
"client_id_1",
|
|
|
|
user_name=username,
|
|
|
|
raise_on_error=True,
|
2021-06-22 16:57:37 -04:00
|
|
|
sql_editor_id=str(tab_state_id),
|
2019-12-09 19:12:40 -05:00
|
|
|
)
|
|
|
|
# run an orphan query (no tab)
|
|
|
|
self.run_sql(
|
|
|
|
"SELECT name FROM birth_names",
|
|
|
|
"client_id_2",
|
|
|
|
user_name=username,
|
|
|
|
raise_on_error=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
# we should have only 1 query returned, since the second one is not
|
|
|
|
# associated with any tabs
|
2020-03-25 16:15:52 -04:00
|
|
|
payload = views.Superset._get_sqllab_tabs(user_id=user_id)
|
2019-12-09 19:12:40 -05:00
|
|
|
self.assertEqual(len(payload["queries"]), 1)
|
|
|
|
|
2020-06-26 12:54:55 -04:00
|
|
|
def test_virtual_table_explore_visibility(self):
|
|
|
|
# test that default visibility it set to True
|
|
|
|
database = utils.get_example_database()
|
|
|
|
self.assertEqual(database.allows_virtual_table_explore, True)
|
|
|
|
|
|
|
|
# test that visibility is disabled when extra is set to False
|
|
|
|
extra = database.get_extra()
|
|
|
|
extra["allows_virtual_table_explore"] = False
|
|
|
|
database.extra = json.dumps(extra)
|
|
|
|
self.assertEqual(database.allows_virtual_table_explore, False)
|
|
|
|
|
|
|
|
# test that visibility is enabled when extra is set to True
|
|
|
|
extra = database.get_extra()
|
|
|
|
extra["allows_virtual_table_explore"] = True
|
|
|
|
database.extra = json.dumps(extra)
|
|
|
|
self.assertEqual(database.allows_virtual_table_explore, True)
|
|
|
|
|
|
|
|
# test that visibility is not broken with bad values
|
|
|
|
extra = database.get_extra()
|
|
|
|
extra["allows_virtual_table_explore"] = "trash value"
|
|
|
|
database.extra = json.dumps(extra)
|
|
|
|
self.assertEqual(database.allows_virtual_table_explore, True)
|
|
|
|
|
2020-06-29 14:13:54 -04:00
|
|
|
def test_explore_database_id(self):
|
|
|
|
database = utils.get_example_database()
|
|
|
|
explore_database = utils.get_example_database()
|
|
|
|
|
|
|
|
# test that explore_database_id is the regular database
|
|
|
|
# id if none is set in the extra
|
|
|
|
self.assertEqual(database.explore_database_id, database.id)
|
|
|
|
|
|
|
|
# test that explore_database_id is correct if the extra is set
|
|
|
|
extra = database.get_extra()
|
|
|
|
extra["explore_database_id"] = explore_database.id
|
|
|
|
database.extra = json.dumps(extra)
|
|
|
|
self.assertEqual(database.explore_database_id, explore_database.id)
|
|
|
|
|
2020-08-14 13:58:24 -04:00
|
|
|
def test_get_column_names_from_metric(self):
|
|
|
|
simple_metric = {
|
|
|
|
"expressionType": utils.AdhocMetricExpressionType.SIMPLE.value,
|
|
|
|
"column": {"column_name": "my_col"},
|
|
|
|
"aggregate": "SUM",
|
|
|
|
"label": "My Simple Label",
|
|
|
|
}
|
|
|
|
assert utils.get_column_name_from_metric(simple_metric) == "my_col"
|
|
|
|
|
|
|
|
sql_metric = {
|
|
|
|
"expressionType": utils.AdhocMetricExpressionType.SQL.value,
|
|
|
|
"sqlExpression": "SUM(my_label)",
|
|
|
|
"label": "My SQL Label",
|
|
|
|
}
|
|
|
|
assert utils.get_column_name_from_metric(sql_metric) is None
|
|
|
|
assert utils.get_column_names_from_metrics([simple_metric, sql_metric]) == [
|
|
|
|
"my_col"
|
|
|
|
]
|
|
|
|
|
2021-03-15 18:46:51 -04:00
|
|
|
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
|
|
|
|
@mock.patch("superset.models.core.DB_CONNECTION_MUTATOR")
|
|
|
|
def test_explore_injected_exceptions(self, mock_db_connection_mutator):
|
|
|
|
"""
|
|
|
|
Handle injected exceptions from the db mutator
|
|
|
|
"""
|
|
|
|
# Assert we can handle a custom exception at the mutator level
|
|
|
|
exception = SupersetException("Error message")
|
|
|
|
mock_db_connection_mutator.side_effect = exception
|
|
|
|
slice = db.session.query(Slice).first()
|
|
|
|
url = f"/superset/explore/?form_data=%7B%22slice_id%22%3A%20{slice.id}%7D"
|
|
|
|
|
|
|
|
self.login()
|
|
|
|
data = self.get_resp(url)
|
|
|
|
self.assertIn("Error message", data)
|
|
|
|
|
|
|
|
# Assert we can handle a driver exception at the mutator level
|
|
|
|
exception = SQLAlchemyError("Error message")
|
|
|
|
mock_db_connection_mutator.side_effect = exception
|
|
|
|
slice = db.session.query(Slice).first()
|
|
|
|
url = f"/superset/explore/?form_data=%7B%22slice_id%22%3A%20{slice.id}%7D"
|
|
|
|
|
|
|
|
self.login()
|
|
|
|
data = self.get_resp(url)
|
|
|
|
self.assertIn("Error message", data)
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
|
|
|
|
@mock.patch("superset.models.core.DB_CONNECTION_MUTATOR")
|
|
|
|
def test_dashboard_injected_exceptions(self, mock_db_connection_mutator):
|
|
|
|
"""
|
|
|
|
Handle injected exceptions from the db mutator
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Assert we can handle a custom excetion at the mutator level
|
|
|
|
exception = SupersetException("Error message")
|
|
|
|
mock_db_connection_mutator.side_effect = exception
|
|
|
|
dash = db.session.query(Dashboard).first()
|
|
|
|
url = f"/superset/dashboard/{dash.id}/"
|
|
|
|
|
|
|
|
self.login()
|
|
|
|
data = self.get_resp(url)
|
|
|
|
self.assertIn("Error message", data)
|
|
|
|
|
|
|
|
# Assert we can handle a driver exception at the mutator level
|
|
|
|
exception = SQLAlchemyError("Error message")
|
|
|
|
mock_db_connection_mutator.side_effect = exception
|
|
|
|
dash = db.session.query(Dashboard).first()
|
|
|
|
url = f"/superset/dashboard/{dash.id}/"
|
|
|
|
|
|
|
|
self.login()
|
|
|
|
data = self.get_resp(url)
|
|
|
|
self.assertIn("Error message", data)
|
|
|
|
|
2021-11-19 12:58:12 -05:00
|
|
|
@mock.patch("superset.sql_lab.cancel_query")
|
|
|
|
@mock.patch("superset.views.core.db.session")
|
|
|
|
def test_stop_query_not_implemented(
|
|
|
|
self, mock_superset_db_session, mock_sql_lab_cancel_query
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Handles stop query when the DB engine spec does not
|
|
|
|
have a cancel query method.
|
|
|
|
"""
|
|
|
|
form_data = {"client_id": "foo"}
|
|
|
|
query_mock = mock.Mock()
|
|
|
|
query_mock.client_id = "foo"
|
|
|
|
query_mock.status = QueryStatus.RUNNING
|
|
|
|
self.login(username="admin")
|
|
|
|
mock_superset_db_session.query().filter_by().one().return_value = query_mock
|
|
|
|
mock_sql_lab_cancel_query.return_value = False
|
|
|
|
rv = self.client.post(
|
|
|
|
"/superset/stop_query/", data={"form_data": json.dumps(form_data)},
|
|
|
|
)
|
|
|
|
|
|
|
|
assert rv.status_code == 422
|
|
|
|
|
2017-09-27 20:51:04 -04:00
|
|
|
|
2019-06-25 16:34:48 -04:00
|
|
|
if __name__ == "__main__":
|
2015-09-26 18:55:33 -04:00
|
|
|
unittest.main()
|