mirror of
https://github.com/apache/superset.git
synced 2024-09-17 11:09:47 -04:00
4d329071a1
* Generate JWT in Flask app * Refactor chart data API query logic, add JWT validation and async worker * Add redis stream implementation, refactoring * Add chart data cache endpoint, refactor QueryContext caching * Typing, linting, refactoring * pytest fixes and openapi schema update * Enforce caching be configured for async query init * Async query processing for explore_json endpoint * Add /api/v1/async_event endpoint * Async frontend for dashboards [WIP] * Chart async error message support, refactoring * Abstract asyncEvent middleware * Async chart loading for Explore * Pylint fixes * asyncEvent middleware -> TypeScript, JS linting * Chart data API: enforce forced_cache, add tests * Add tests for explore_json endpoints * Add test for chart data cache enpoint (no login) * Consolidate set_and_log_cache and add STORE_CACHE_KEYS_IN_METADATA_DB flag * Add tests for tasks/async_queries and address PR comments * Bypass non-JSON result formats for async queries * Add tests for redux middleware * Remove debug statement Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com> * Skip force_cached if no queryObj * SunburstViz: don't modify self.form_data * Fix failing annotation test * Resolve merge/lint issues * Reduce polling delay * Fix new getClientErrorObject reference * Fix flakey unit tests * /api/v1/async_event: increment redis stream ID, add tests * PR feedback: refactoring, configuration * Fixup: remove debugging * Fix typescript errors due to redux upgrade * Update UPDATING.md * Fix failing py tests * asyncEvent_spec.js -> asyncEvent_spec.ts * Refactor flakey Python 3.7 mock assertions * Fix another shared state issue in Py tests * Use 'sub' claim in JWT for user_id * Refactor async middleware config * Fixup: restore FeatureFlag boolean type Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com>
98 lines
3.8 KiB
Python
98 lines
3.8 KiB
Python
# Licensed to the Apache Software Foundation (ASF) under one
|
|
# or more contributor license agreements. See the NOTICE file
|
|
# distributed with this work for additional information
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
# to you under the Apache License, Version 2.0 (the
|
|
# "License"); you may not use this file except in compliance
|
|
# with the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing,
|
|
# software distributed under the License is distributed on an
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
# KIND, either express or implied. See the License for the
|
|
# specific language governing permissions and limitations
|
|
# under the License.
|
|
"""Unit tests for Superset with caching"""
|
|
import json
|
|
|
|
from superset import app, db
|
|
from superset.extensions import cache_manager
|
|
from superset.utils.core import QueryStatus
|
|
|
|
from .base_tests import SupersetTestCase
|
|
|
|
|
|
class TestCache(SupersetTestCase):
|
|
def setUp(self):
|
|
self.login(username="admin")
|
|
cache_manager.cache.clear()
|
|
cache_manager.data_cache.clear()
|
|
|
|
def tearDown(self):
|
|
cache_manager.cache.clear()
|
|
cache_manager.data_cache.clear()
|
|
|
|
def test_no_data_cache(self):
|
|
data_cache_config = app.config["DATA_CACHE_CONFIG"]
|
|
app.config["DATA_CACHE_CONFIG"] = {"CACHE_TYPE": "null"}
|
|
cache_manager.init_app(app)
|
|
|
|
slc = self.get_slice("Girls", db.session)
|
|
json_endpoint = "/superset/explore_json/{}/{}/".format(
|
|
slc.datasource_type, slc.datasource_id
|
|
)
|
|
resp = self.get_json_resp(
|
|
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
|
|
)
|
|
resp_from_cache = self.get_json_resp(
|
|
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
|
|
)
|
|
# restore DATA_CACHE_CONFIG
|
|
app.config["DATA_CACHE_CONFIG"] = data_cache_config
|
|
self.assertFalse(resp["is_cached"])
|
|
self.assertFalse(resp_from_cache["is_cached"])
|
|
|
|
def test_slice_data_cache(self):
|
|
# Override cache config
|
|
data_cache_config = app.config["DATA_CACHE_CONFIG"]
|
|
cache_default_timeout = app.config["CACHE_DEFAULT_TIMEOUT"]
|
|
app.config["CACHE_DEFAULT_TIMEOUT"] = 100
|
|
app.config["DATA_CACHE_CONFIG"] = {
|
|
"CACHE_TYPE": "simple",
|
|
"CACHE_DEFAULT_TIMEOUT": 10,
|
|
"CACHE_KEY_PREFIX": "superset_data_cache",
|
|
}
|
|
cache_manager.init_app(app)
|
|
|
|
slc = self.get_slice("Boys", db.session)
|
|
json_endpoint = "/superset/explore_json/{}/{}/".format(
|
|
slc.datasource_type, slc.datasource_id
|
|
)
|
|
resp = self.get_json_resp(
|
|
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
|
|
)
|
|
resp_from_cache = self.get_json_resp(
|
|
json_endpoint, {"form_data": json.dumps(slc.viz.form_data)}
|
|
)
|
|
self.assertFalse(resp["is_cached"])
|
|
self.assertTrue(resp_from_cache["is_cached"])
|
|
# should fallback to default cache timeout
|
|
self.assertEqual(resp_from_cache["cache_timeout"], 10)
|
|
self.assertEqual(resp_from_cache["status"], QueryStatus.SUCCESS)
|
|
self.assertEqual(resp["data"], resp_from_cache["data"])
|
|
self.assertEqual(resp["query"], resp_from_cache["query"])
|
|
# should exists in `data_cache`
|
|
self.assertEqual(
|
|
cache_manager.data_cache.get(resp_from_cache["cache_key"])["query"],
|
|
resp_from_cache["query"],
|
|
)
|
|
# should not exists in `cache`
|
|
self.assertIsNone(cache_manager.cache.get(resp_from_cache["cache_key"]))
|
|
|
|
# reset cache config
|
|
app.config["DATA_CACHE_CONFIG"] = data_cache_config
|
|
app.config["CACHE_DEFAULT_TIMEOUT"] = cache_default_timeout
|
|
cache_manager.init_app(app)
|