fix: respect chart cache timeout setting (#21637)

This commit is contained in:
Mayur 2022-09-29 19:56:19 +05:30 committed by GitHub
parent ab7cfec975
commit 5ea9249059
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 109 additions and 7 deletions

View File

@ -27,6 +27,7 @@ from superset.common.query_context_processor import (
QueryContextProcessor,
)
from superset.common.query_object import QueryObject
from superset.models.slice import Slice
if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource
@ -46,6 +47,7 @@ class QueryContext:
enforce_numerical_metrics: ClassVar[bool] = True
datasource: BaseDatasource
slice_: Optional[Slice] = None
queries: List[QueryObject]
form_data: Optional[Dict[str, Any]]
result_type: ChartDataResultType
@ -64,6 +66,7 @@ class QueryContext:
*,
datasource: BaseDatasource,
queries: List[QueryObject],
slice_: Optional[Slice],
form_data: Optional[Dict[str, Any]],
result_type: ChartDataResultType,
result_format: ChartDataResultFormat,
@ -72,6 +75,7 @@ class QueryContext:
cache_values: Dict[str, Any],
) -> None:
self.datasource = datasource
self.slice_ = slice_
self.result_type = result_type
self.result_format = result_format
self.queries = queries
@ -98,6 +102,8 @@ class QueryContext:
def get_cache_timeout(self) -> Optional[int]:
if self.custom_cache_timeout is not None:
return self.custom_cache_timeout
if self.slice_ and self.slice_.cache_timeout is not None:
return self.slice_.cache_timeout
if self.datasource.cache_timeout is not None:
return self.datasource.cache_timeout
if hasattr(self.datasource, "database"):

View File

@ -19,10 +19,12 @@ from __future__ import annotations
from typing import Any, Dict, List, Optional, TYPE_CHECKING
from superset import app, db
from superset.charts.dao import ChartDAO
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.query_context import QueryContext
from superset.common.query_object_factory import QueryObjectFactory
from superset.datasource.dao import DatasourceDAO
from superset.models.slice import Slice
from superset.utils.core import DatasourceDict, DatasourceType
if TYPE_CHECKING:
@ -55,6 +57,11 @@ class QueryContextFactory: # pylint: disable=too-few-public-methods
datasource_model_instance = None
if datasource:
datasource_model_instance = self._convert_to_model(datasource)
slice_ = None
if form_data and form_data.get("slice_id") is not None:
slice_ = self._get_slice(form_data.get("slice_id"))
result_type = result_type or ChartDataResultType.FULL
result_format = result_format or ChartDataResultFormat.JSON
queries_ = [
@ -72,6 +79,7 @@ class QueryContextFactory: # pylint: disable=too-few-public-methods
return QueryContext(
datasource=datasource_model_instance,
queries=queries_,
slice_=slice_,
form_data=form_data,
result_type=result_type,
result_format=result_format,
@ -88,3 +96,6 @@ class QueryContextFactory: # pylint: disable=too-few-public-methods
datasource_type=DatasourceType(datasource["type"]),
datasource_id=int(datasource["id"]),
)
def _get_slice(self, slice_id: Any) -> Optional[Slice]:
return ChartDAO.find_by_id(slice_id)

View File

@ -21,7 +21,7 @@ import unittest
import copy
from datetime import datetime
from io import BytesIO
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, List
from unittest import mock
from zipfile import ZipFile
@ -38,8 +38,12 @@ from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_data,
)
from tests.integration_tests.test_app import app
from tests.integration_tests.fixtures.energy_dashboard import (
load_energy_table_with_slice,
load_energy_table_data,
)
import pytest
from superset.models.slice import Slice
from superset.charts.data.commands.get_data_command import ChartDataCommand
from superset.connectors.sqla.models import TableColumn, SqlaTable
@ -976,3 +980,80 @@ def test_data_cache_default_timeout(
):
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 3456
def test_chart_cache_timeout(
test_client,
login_as_admin,
physical_query_context,
load_energy_table_with_slice: List[Slice],
):
# should override datasource cache timeout
slice_with_cache_timeout = load_energy_table_with_slice[0]
slice_with_cache_timeout.cache_timeout = 20
db.session.merge(slice_with_cache_timeout)
datasource: SqlaTable = (
db.session.query(SqlaTable)
.filter(SqlaTable.id == physical_query_context["datasource"]["id"])
.first()
)
datasource.cache_timeout = 1254
db.session.merge(datasource)
db.session.commit()
physical_query_context["form_data"] = {"slice_id": slice_with_cache_timeout.id}
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 20
@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 1010,
},
},
)
def test_chart_cache_timeout_not_present(
test_client, login_as_admin, physical_query_context
):
# should use datasource cache, if it's present
datasource: SqlaTable = (
db.session.query(SqlaTable)
.filter(SqlaTable.id == physical_query_context["datasource"]["id"])
.first()
)
datasource.cache_timeout = 1980
db.session.merge(datasource)
db.session.commit()
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 1980
@mock.patch(
"superset.common.query_context_processor.config",
{
**app.config,
"DATA_CACHE_CONFIG": {
**app.config["DATA_CACHE_CONFIG"],
"CACHE_DEFAULT_TIMEOUT": 1010,
},
},
)
def test_chart_cache_timeout_chart_not_found(
test_client, login_as_admin, physical_query_context
):
# should use default timeout
physical_query_context["form_data"] = {"slice_id": 0}
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cache_timeout"] == 1010

View File

@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import random
from typing import Dict, Set
from typing import Dict, List, Set
import pandas as pd
import pytest
@ -59,8 +59,8 @@ def load_energy_table_data():
@pytest.fixture()
def load_energy_table_with_slice(load_energy_table_data):
with app.app_context():
_create_energy_table()
yield
slices = _create_energy_table()
yield slices
_cleanup()
@ -69,7 +69,7 @@ def _get_dataframe():
return pd.DataFrame.from_dict(data)
def _create_energy_table():
def _create_energy_table() -> List[Slice]:
table = create_table_metadata(
table_name=ENERGY_USAGE_TBL_NAME,
database=get_example_database(),
@ -86,13 +86,17 @@ def _create_energy_table():
db.session.commit()
table.fetch_metadata()
slices = []
for slice_data in _get_energy_slices():
_create_and_commit_energy_slice(
slice = _create_and_commit_energy_slice(
table,
slice_data["slice_title"],
slice_data["viz_type"],
slice_data["params"],
)
slices.append(slice)
return slices
def _create_and_commit_energy_slice(