mirror of
https://github.com/apache/superset.git
synced 2024-09-17 11:09:47 -04:00
4ab89dbcf7
* Sparkline dates aren't formatting in Time Series Table (#6976) * Exclude venv for python linter to ignore * Fix NaN error * Fix the white background shown in SQL editor on drag (#7021) This PR sets the background-color css property on `.ace_scroller` instead of `.ace_content` to prevent the white background shown during resizing of the SQL editor before drag ends. * Show tooltip with time frame (#6979) * Fix time filter control (#6978) * Enhancement of query context and object. (#6962) * added more functionalities for query context and object. * fixed cache logic * added default value for groupby * updated comments and removed print (cherry picked from commitd5b9795f87
) * [fix] /superset/slice/id url is too long (#6989) (cherry picked from commit6a4d507ab6
) * [WIP] fix user specified JSON metadata not updating dashboard on refresh (#7027) (cherry picked from commitcc58f0e661
) * feat: add ability to change font size in big number (#7003) * Add ability to change font sizes in Big Number * rename big number to header * Add comment to clarify font size values * Allow LIMIT to be specified in parameters (#7052) * [fix] Cursor jumping when editing chart and dashboard titles (#7038) (cherry picked from commitfc1770f7b7
) * Changing time table viz to pass formatTime a date (#7020) (cherry picked from commit7f3c145b1f
) * [db-engine-spec] Aligning Hive/Presto partition logic (#7007) (cherry picked from commit05be866117
) * [fix] explore chart from dashboard missed slice title (#7046) (cherry picked from commita6d48d4052
) * fix inaccurate data calculation with adata rolling and contribution (#7035) (cherry picked from commit0782e831cd
) * Adding warning message for sqllab save query (#7028) (cherry picked from commitead3d48133
) * [datasource] Ensuring consistent behavior of datasource editing/saving. (#7037) * Update datasource.py * Update datasource.py (cherry picked from commitc771625f10
) * [csv-upload] Fixing message encoding (#6971) (cherry picked from commit48431ab5b9
) * [sql-parse] Fixing LIMIT exceptions (#6963) (cherry picked from commit3e076cb60b
) * Adding custom control overrides (#6956) * Adding extraOverrides to line chart * Updating extraOverrides to fit with more cases * Moving extraOverrides to index.js * Removing webpack-merge in package.json * Fixing metrics control clearing metric (cherry picked from commite6194051f4
) * [sqlparse] Fixing table name extraction for ill-defined query (#7029) (cherry picked from commit07c340cf82
) * [missing values] Removing replacing missing values (#4905) (cherry picked from commit61add606ca
) * [SQL Lab] Improved query and results tabs rendering reliability (#7082) closes #7080 (cherry picked from commit9b58e9f492
) * Fix filter_box migration PR #6523 (#7066) * Fix filter_box migration PR #6523 * Fix druid-related bug (cherry picked from commitb210742ad2
) * SQL editor layout makeover (#7102) This PR includes the following layout and css tweaks: - Using flex to layout the north and south sub panes of query pane so resizing works properly in both Chrome and Firefox - Removal of necessary wrapper divs and tweaking of css in sql lab so we can scroll to the bottom of both the table list and the results pane - Make sql lab's content not overflow vertically and layout the query result area to eliminate double scroll bars - css tweaks on the basic.html page so the loading animation appears in the center of the page across the board (cherry picked from commit71f1bbd2ec
) * [forms] Fix handling of NULLs (cherry picked from commite83a07d3df
) * handle null column_name in sqla and druid models (cherry picked from commit2ff721ae07
) * Use metric name instead of metric in filter box (#7106) (cherry picked from commit003364e74e
) * Bump python lib croniter to an existing version (#7132) Package maintainers should really never delete packages, but it appears this happened with croniter and resulted in breaking our builds. This PR bumps to a more recent existing version of the library (cherry picked from commit215ed392a1
) * Revert PR #6933 (#7162) * Celery worker for warming up cache * Remove testing changes * Add documentation * Fix lint * WIP dashboard filters * Use new cache so it works with dashboards * Add more unit tests, fix old ones * Fix flake8 and docs * Sparkline dates aren't formatting in Time Series Table (#6976) * Exclude venv for python linter to ignore * Fix NaN error * Changing time table viz to pass formatTime a date (#7020) (cherry picked from commit7f3c145b1f
) * SQL editor layout makeover (#7102) This PR includes the following layout and css tweaks: - Using flex to layout the north and south sub panes of query pane so resizing works properly in both Chrome and Firefox - Removal of necessary wrapper divs and tweaking of css in sql lab so we can scroll to the bottom of both the table list and the results pane - Make sql lab's content not overflow vertically and layout the query result area to eliminate double scroll bars - css tweaks on the basic.html page so the loading animation appears in the center of the page across the board (cherry picked from commit71f1bbd2ec
) * Celery worker for warming up cache * Remove testing changes * Add documentation * Fix lint * WIP dashboard filters * Use new cache so it works with dashboards * Add more unit tests, fix old ones * Fix flake8 and docs * Fix bad merge and pylint
237 lines
7.7 KiB
Python
237 lines
7.7 KiB
Python
# Licensed to the Apache Software Foundation (ASF) under one
|
|
# or more contributor license agreements. See the NOTICE file
|
|
# distributed with this work for additional information
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
# to you under the Apache License, Version 2.0 (the
|
|
# "License"); you may not use this file except in compliance
|
|
# with the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing,
|
|
# software distributed under the License is distributed on an
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
# KIND, either express or implied. See the License for the
|
|
# specific language governing permissions and limitations
|
|
# under the License.
|
|
"""Unit tests for Superset cache warmup"""
|
|
import json
|
|
from unittest.mock import MagicMock
|
|
|
|
from superset import db
|
|
from superset.models.core import Log
|
|
from superset.models.tags import (
|
|
get_tag,
|
|
ObjectTypes,
|
|
TaggedObject,
|
|
TagTypes,
|
|
)
|
|
from superset.tasks.cache import (
|
|
DashboardTagsStrategy,
|
|
DummyStrategy,
|
|
get_form_data,
|
|
TopNDashboardsStrategy,
|
|
)
|
|
from .base_tests import SupersetTestCase
|
|
|
|
|
|
TEST_URL = 'http://0.0.0.0:8081/superset/explore_json'
|
|
|
|
|
|
class CacheWarmUpTests(SupersetTestCase):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
super(CacheWarmUpTests, self).__init__(*args, **kwargs)
|
|
|
|
def test_get_form_data_chart_only(self):
|
|
chart_id = 1
|
|
result = get_form_data(chart_id, None)
|
|
expected = {'slice_id': chart_id}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_get_form_data_no_dashboard_metadata(self):
|
|
chart_id = 1
|
|
dashboard = MagicMock()
|
|
dashboard.json_metadata = None
|
|
result = get_form_data(chart_id, dashboard)
|
|
expected = {'slice_id': chart_id}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_get_form_data_immune_slice(self):
|
|
chart_id = 1
|
|
filter_box_id = 2
|
|
dashboard = MagicMock()
|
|
dashboard.json_metadata = json.dumps({
|
|
'filter_immune_slices': [chart_id],
|
|
'default_filters': json.dumps({
|
|
str(filter_box_id): {'name': ['Alice', 'Bob']},
|
|
}),
|
|
})
|
|
result = get_form_data(chart_id, dashboard)
|
|
expected = {'slice_id': chart_id}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_get_form_data_no_default_filters(self):
|
|
chart_id = 1
|
|
dashboard = MagicMock()
|
|
dashboard.json_metadata = json.dumps({})
|
|
result = get_form_data(chart_id, dashboard)
|
|
expected = {'slice_id': chart_id}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_get_form_data_immune_fields(self):
|
|
chart_id = 1
|
|
filter_box_id = 2
|
|
dashboard = MagicMock()
|
|
dashboard.json_metadata = json.dumps({
|
|
'default_filters': json.dumps({
|
|
str(filter_box_id): {
|
|
'name': ['Alice', 'Bob'],
|
|
'__time_range': '100 years ago : today',
|
|
},
|
|
}),
|
|
'filter_immune_slice_fields': {chart_id: ['__time_range']},
|
|
})
|
|
result = get_form_data(chart_id, dashboard)
|
|
expected = {
|
|
'slice_id': chart_id,
|
|
'extra_filters': [
|
|
{
|
|
'col': 'name',
|
|
'op': 'in',
|
|
'val': ['Alice', 'Bob'],
|
|
},
|
|
],
|
|
}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_get_form_data_no_extra_filters(self):
|
|
chart_id = 1
|
|
filter_box_id = 2
|
|
dashboard = MagicMock()
|
|
dashboard.json_metadata = json.dumps({
|
|
'default_filters': json.dumps({
|
|
str(filter_box_id): {
|
|
'__time_range': '100 years ago : today',
|
|
},
|
|
}),
|
|
'filter_immune_slice_fields': {chart_id: ['__time_range']},
|
|
})
|
|
result = get_form_data(chart_id, dashboard)
|
|
expected = {'slice_id': chart_id}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_get_form_data(self):
|
|
chart_id = 1
|
|
filter_box_id = 2
|
|
dashboard = MagicMock()
|
|
dashboard.json_metadata = json.dumps({
|
|
'default_filters': json.dumps({
|
|
str(filter_box_id): {
|
|
'name': ['Alice', 'Bob'],
|
|
'__time_range': '100 years ago : today',
|
|
},
|
|
}),
|
|
})
|
|
result = get_form_data(chart_id, dashboard)
|
|
expected = {
|
|
'slice_id': chart_id,
|
|
'extra_filters': [
|
|
{
|
|
'col': 'name',
|
|
'op': 'in',
|
|
'val': ['Alice', 'Bob'],
|
|
},
|
|
{
|
|
'col': '__time_range',
|
|
'op': 'in',
|
|
'val': '100 years ago : today',
|
|
},
|
|
],
|
|
}
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_dummy_strategy(self):
|
|
strategy = DummyStrategy()
|
|
result = sorted(strategy.get_urls())
|
|
expected = [
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+1%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+17%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+18%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+19%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+30%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+31%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+8%7D',
|
|
]
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_top_n_dashboards_strategy(self):
|
|
# create a top visited dashboard
|
|
db.session.query(Log).delete()
|
|
self.login(username='admin')
|
|
for _ in range(10):
|
|
self.client.get('/superset/dashboard/3/')
|
|
|
|
strategy = TopNDashboardsStrategy(1)
|
|
result = sorted(strategy.get_urls())
|
|
expected = [
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+31%7D',
|
|
]
|
|
self.assertEqual(result, expected)
|
|
|
|
def test_dashboard_tags(self):
|
|
strategy = DashboardTagsStrategy(['tag1'])
|
|
|
|
result = sorted(strategy.get_urls())
|
|
expected = []
|
|
self.assertEqual(result, expected)
|
|
|
|
# tag dashboard 3 with `tag1`
|
|
tag1 = get_tag('tag1', db.session, TagTypes.custom)
|
|
object_id = 3
|
|
tagged_object = TaggedObject(
|
|
tag_id=tag1.id,
|
|
object_id=object_id,
|
|
object_type=ObjectTypes.dashboard,
|
|
)
|
|
db.session.add(tagged_object)
|
|
db.session.commit()
|
|
|
|
result = sorted(strategy.get_urls())
|
|
expected = [
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+31%7D',
|
|
]
|
|
self.assertEqual(result, expected)
|
|
|
|
strategy = DashboardTagsStrategy(['tag2'])
|
|
|
|
result = sorted(strategy.get_urls())
|
|
expected = []
|
|
self.assertEqual(result, expected)
|
|
|
|
# tag chart 30 with `tag2`
|
|
tag2 = get_tag('tag2', db.session, TagTypes.custom)
|
|
object_id = 30
|
|
tagged_object = TaggedObject(
|
|
tag_id=tag2.id,
|
|
object_id=object_id,
|
|
object_type=ObjectTypes.chart,
|
|
)
|
|
db.session.add(tagged_object)
|
|
db.session.commit()
|
|
|
|
result = sorted(strategy.get_urls())
|
|
expected = [
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+30%7D',
|
|
]
|
|
self.assertEqual(result, expected)
|
|
|
|
strategy = DashboardTagsStrategy(['tag1', 'tag2'])
|
|
|
|
result = sorted(strategy.get_urls())
|
|
expected = [
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+30%7D',
|
|
f'{TEST_URL}/?form_data=%7B%27slice_id%27%3A+31%7D',
|
|
]
|
|
self.assertEqual(result, expected)
|