fix: Ensure verbose mapping exists for SQL Lab Query model (#23597)

This commit is contained in:
John Bodley 2023-04-07 06:34:00 +12:00 committed by GitHub
parent e2e0ad5ef5
commit 83df609782
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 17 additions and 11 deletions

View File

@ -18,7 +18,7 @@ from __future__ import annotations
import json
import logging
from typing import Any, Dict, Optional, TYPE_CHECKING
from typing import Any, Dict, Optional, TYPE_CHECKING, Union
import simplejson
from flask import current_app, make_response, request, Response
@ -44,6 +44,7 @@ from superset.connectors.base.models import BaseDatasource
from superset.dao.exceptions import DatasourceNotFound
from superset.exceptions import QueryObjectValidationError
from superset.extensions import event_logger
from superset.models.sql_lab import Query
from superset.utils.async_query_manager import AsyncQueryTokenException
from superset.utils.core import create_zip, get_user_id, json_int_dttm_ser
from superset.views.base import CsvResponse, generate_download_headers, XlsxResponse
@ -342,7 +343,7 @@ class ChartDataRestApi(ChartRestApi):
self,
result: Dict[Any, Any],
form_data: Optional[Dict[str, Any]] = None,
datasource: Optional[BaseDatasource] = None,
datasource: Optional[Union[BaseDatasource, Query]] = None,
) -> Response:
result_type = result["query_context"].result_type
result_format = result["query_context"].result_format
@ -405,7 +406,7 @@ class ChartDataRestApi(ChartRestApi):
command: ChartDataCommand,
force_cached: bool = False,
form_data: Optional[Dict[str, Any]] = None,
datasource: Optional[BaseDatasource] = None,
datasource: Optional[Union[BaseDatasource, Query]] = None,
) -> Response:
try:
result = command.run(force_cached=force_cached)

View File

@ -27,7 +27,7 @@ for these chart types.
"""
from io import StringIO
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
import pandas as pd
from flask_babel import gettext as __
@ -42,6 +42,7 @@ from superset.utils.core import (
if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource
from superset.models.sql_lab import Query
def get_column_key(label: Tuple[str, ...], metrics: List[str]) -> Tuple[Any, ...]:
@ -223,7 +224,7 @@ pivot_v2_aggfunc_map = {
def pivot_table_v2(
df: pd.DataFrame,
form_data: Dict[str, Any],
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None,
) -> pd.DataFrame:
"""
Pivot table v2.
@ -249,7 +250,7 @@ def pivot_table_v2(
def pivot_table(
df: pd.DataFrame,
form_data: Dict[str, Any],
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None,
) -> pd.DataFrame:
"""
Pivot table (v1).
@ -285,7 +286,9 @@ def pivot_table(
def table(
df: pd.DataFrame,
form_data: Dict[str, Any],
datasource: Optional["BaseDatasource"] = None, # pylint: disable=unused-argument
datasource: Optional[ # pylint: disable=unused-argument
Union["BaseDatasource", "Query"]
] = None,
) -> pd.DataFrame:
"""
Table.
@ -314,7 +317,7 @@ post_processors = {
def apply_post_process(
result: Dict[Any, Any],
form_data: Optional[Dict[str, Any]] = None,
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union["BaseDatasource", "Query"]] = None,
) -> Dict[Any, Any]:
form_data = form_data or {}

View File

@ -246,6 +246,7 @@ class Query(
"database": {"id": self.database_id, "backend": self.database.backend},
"order_by_choices": order_by_choices,
"schema": self.schema,
"verbose_map": {},
}
def raise_for_access(self) -> None:

View File

@ -126,6 +126,7 @@ except ImportError:
if TYPE_CHECKING:
from superset.connectors.base.models import BaseColumn, BaseDatasource
from superset.models.sql_lab import Query
logging.getLogger("MARKDOWN").setLevel(logging.INFO)
logger = logging.getLogger(__name__)
@ -1711,7 +1712,7 @@ def get_column_names_from_metrics(metrics: List[Metric]) -> List[str]:
def extract_dataframe_dtypes(
df: pd.DataFrame,
datasource: Optional["BaseDatasource"] = None,
datasource: Optional[Union[BaseDatasource, Query]] = None,
) -> List[GenericDataType]:
"""Serialize pandas/numpy dtypes to generic types"""
@ -1731,13 +1732,13 @@ def extract_dataframe_dtypes(
if datasource:
for column in datasource.columns:
if isinstance(column, dict):
columns_by_name[column.get("column_name")] = column
columns_by_name[column.get("column_name")] = column # type: ignore
else:
columns_by_name[column.column_name] = column
generic_types: List[GenericDataType] = []
for column in df.columns:
column_object = columns_by_name.get(column)
column_object = columns_by_name.get(column) # type: ignore
series = df[column]
inferred_type = infer_dtype(series)
if isinstance(column_object, dict):