chore: remove SIP-38 feature flag (#12894)

This commit is contained in:
Ville Brofeldt 2021-02-03 09:41:44 +02:00 committed by GitHub
parent fd2d87340b
commit 2f6d1ff4cd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 16 additions and 2888 deletions

View File

@ -24,7 +24,7 @@ import simplejson as json
from flask_babel import gettext as _ from flask_babel import gettext as _
from pandas import DataFrame from pandas import DataFrame
from superset import app, is_feature_enabled from superset import app
from superset.exceptions import QueryObjectValidationError from superset.exceptions import QueryObjectValidationError
from superset.typing import Metric from superset.typing import Metric
from superset.utils import pandas_postprocessing from superset.utils import pandas_postprocessing
@ -114,7 +114,6 @@ class QueryObject:
columns = columns or [] columns = columns or []
groupby = groupby or [] groupby = groupby or []
extras = extras or {} extras = extras or {}
is_sip_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE")
self.annotation_layers = [ self.annotation_layers = [
layer layer
for layer in annotation_layers for layer in annotation_layers
@ -169,16 +168,7 @@ class QueryObject:
self.extras["time_range_endpoints"] = get_time_range_endpoints(form_data={}) self.extras["time_range_endpoints"] = get_time_range_endpoints(form_data={})
self.columns = columns self.columns = columns
if is_sip_38: self.groupby = groupby or []
if groupby:
logger.warning(
"The field `groupby` is deprecated. Viz plugins should "
"pass all selectables via the `columns` field"
)
self.columns += groupby
else:
self.groupby = groupby or []
self.orderby = orderby or [] self.orderby = orderby or []
# rename deprecated fields # rename deprecated fields
@ -254,6 +244,7 @@ class QueryObject:
def to_dict(self) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]:
query_object_dict = { query_object_dict = {
"granularity": self.granularity, "granularity": self.granularity,
"groupby": self.groupby,
"from_dttm": self.from_dttm, "from_dttm": self.from_dttm,
"to_dttm": self.to_dttm, "to_dttm": self.to_dttm,
"is_timeseries": self.is_timeseries, "is_timeseries": self.is_timeseries,
@ -268,9 +259,6 @@ class QueryObject:
"columns": self.columns, "columns": self.columns,
"orderby": self.orderby, "orderby": self.orderby,
} }
if not is_feature_enabled("SIP_38_VIZ_REARCHITECTURE"):
query_object_dict["groupby"] = self.groupby
return query_object_dict return query_object_dict
def cache_key(self, **extra: Any) -> str: def cache_key(self, **extra: Any) -> str:

View File

@ -315,7 +315,6 @@ DEFAULT_FEATURE_FLAGS: Dict[str, bool] = {
"DASHBOARD_CACHE": False, "DASHBOARD_CACHE": False,
"REMOVE_SLICE_LEVEL_LABEL_COLORS": False, "REMOVE_SLICE_LEVEL_LABEL_COLORS": False,
"SHARE_QUERIES_VIA_KV_STORE": False, "SHARE_QUERIES_VIA_KV_STORE": False,
"SIP_38_VIZ_REARCHITECTURE": False,
"TAGGING_SYSTEM": False, "TAGGING_SYSTEM": False,
"SQLLAB_BACKEND_PERSISTENCE": False, "SQLLAB_BACKEND_PERSISTENCE": False,
"LISTVIEWS_DEFAULT_CARD_VIEW": False, "LISTVIEWS_DEFAULT_CARD_VIEW": False,

View File

@ -87,7 +87,6 @@ try:
except ImportError: except ImportError:
pass pass
IS_SIP_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE")
DRUID_TZ = conf.get("DRUID_TZ") DRUID_TZ = conf.get("DRUID_TZ")
POST_AGG_TYPE = "postagg" POST_AGG_TYPE = "postagg"
metadata = Model.metadata # pylint: disable=no-member metadata = Model.metadata # pylint: disable=no-member
@ -1174,8 +1173,7 @@ class DruidDatasource(Model, BaseDatasource):
) )
# the dimensions list with dimensionSpecs expanded # the dimensions list with dimensionSpecs expanded
columns_ = columns if IS_SIP_38 else groupby dimensions = self.get_dimensions(groupby, columns_dict) if groupby else []
dimensions = self.get_dimensions(columns_, columns_dict) if columns_ else []
extras = extras or {} extras = extras or {}
qry = dict( qry = dict(
@ -1209,9 +1207,7 @@ class DruidDatasource(Model, BaseDatasource):
order_direction = "descending" if order_desc else "ascending" order_direction = "descending" if order_desc else "ascending"
if (IS_SIP_38 and not metrics and columns and "__time" not in columns) or ( if columns:
not IS_SIP_38 and columns
):
columns.append("__time") columns.append("__time")
del qry["post_aggregations"] del qry["post_aggregations"]
del qry["aggregations"] del qry["aggregations"]
@ -1221,20 +1217,11 @@ class DruidDatasource(Model, BaseDatasource):
qry["granularity"] = "all" qry["granularity"] = "all"
qry["limit"] = row_limit qry["limit"] = row_limit
client.scan(**qry) client.scan(**qry)
elif (IS_SIP_38 and columns) or ( elif not groupby and not having_filters:
not IS_SIP_38 and not groupby and not having_filters
):
logger.info("Running timeseries query for no groupby values") logger.info("Running timeseries query for no groupby values")
del qry["dimensions"] del qry["dimensions"]
client.timeseries(**qry) client.timeseries(**qry)
elif ( elif not having_filters and order_desc and (groupby and len(groupby) == 1):
not having_filters
and order_desc
and (
(IS_SIP_38 and columns and len(columns) == 1)
or (not IS_SIP_38 and groupby and len(groupby) == 1)
)
):
dim = list(qry["dimensions"])[0] dim = list(qry["dimensions"])[0]
logger.info("Running two-phase topn query for dimension [{}]".format(dim)) logger.info("Running two-phase topn query for dimension [{}]".format(dim))
pre_qry = deepcopy(qry) pre_qry = deepcopy(qry)
@ -1286,7 +1273,7 @@ class DruidDatasource(Model, BaseDatasource):
qry["metric"] = list(qry["aggregations"].keys())[0] qry["metric"] = list(qry["aggregations"].keys())[0]
client.topn(**qry) client.topn(**qry)
logger.info("Phase 2 Complete") logger.info("Phase 2 Complete")
elif having_filters or ((IS_SIP_38 and columns) or (not IS_SIP_38 and groupby)): elif having_filters or groupby:
# If grouping on multiple fields or using a having filter # If grouping on multiple fields or using a having filter
# we have to force a groupby query # we have to force a groupby query
logger.info("Running groupby query for dimensions [{}]".format(dimensions)) logger.info("Running groupby query for dimensions [{}]".format(dimensions))
@ -1397,9 +1384,7 @@ class DruidDatasource(Model, BaseDatasource):
df=df, query=query_str, duration=datetime.now() - qry_start_dttm df=df, query=query_str, duration=datetime.now() - qry_start_dttm
) )
df = self.homogenize_types( df = self.homogenize_types(df, query_obj.get("groupby", []))
df, query_obj.get("columns" if IS_SIP_38 else "groupby", [])
)
df.columns = [ df.columns = [
DTTM_ALIAS if c in ("timestamp", "__time") else c for c in df.columns DTTM_ALIAS if c in ("timestamp", "__time") else c for c in df.columns
] ]
@ -1415,8 +1400,7 @@ class DruidDatasource(Model, BaseDatasource):
if DTTM_ALIAS in df.columns: if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS] cols += [DTTM_ALIAS]
if not IS_SIP_38: cols += query_obj.get("groupby") or []
cols += query_obj.get("groupby") or []
cols += query_obj.get("columns") or [] cols += query_obj.get("columns") or []
cols += query_obj.get("metrics") or [] cols += query_obj.get("metrics") or []

View File

@ -910,7 +910,6 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"filter": filter, "filter": filter,
"columns": [col.column_name for col in self.columns], "columns": [col.column_name for col in self.columns],
} }
is_sip_38 = is_feature_enabled("SIP_38_VIZ_REARCHITECTURE")
template_kwargs.update(self.template_params_dict) template_kwargs.update(self.template_params_dict)
extra_cache_keys: List[Any] = [] extra_cache_keys: List[Any] = []
template_kwargs["extra_cache_keys"] = extra_cache_keys template_kwargs["extra_cache_keys"] = extra_cache_keys
@ -939,11 +938,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"and is required by this type of chart" "and is required by this type of chart"
) )
) )
if ( if not metrics and not columns and not groupby:
not metrics
and not columns
and (is_sip_38 or (not is_sip_38 and not groupby))
):
raise QueryObjectValidationError(_("Empty query?")) raise QueryObjectValidationError(_("Empty query?"))
metrics_exprs: List[ColumnElement] = [] metrics_exprs: List[ColumnElement] = []
@ -975,7 +970,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
if metrics or groupby: if metrics or groupby:
# dedup columns while preserving order # dedup columns while preserving order
columns = columns if is_sip_38 else (groupby or columns) columns = groupby or columns
select_exprs = [] select_exprs = []
for selected in columns: for selected in columns:
# if groupby field/expr equals granularity field/expr # if groupby field/expr equals granularity field/expr
@ -1172,7 +1167,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
is_timeseries # pylint: disable=too-many-boolean-expressions is_timeseries # pylint: disable=too-many-boolean-expressions
and timeseries_limit and timeseries_limit
and not time_groupby_inline and not time_groupby_inline
and ((is_sip_38 and columns) or (not is_sip_38 and groupby)) and groupby
): ):
if self.database.db_engine_spec.allows_joins: if self.database.db_engine_spec.allows_joins:
# some sql dialects require for order by expressions # some sql dialects require for order by expressions
@ -1235,6 +1230,7 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"row_limit": timeseries_limit, "row_limit": timeseries_limit,
"metrics": metrics, "metrics": metrics,
"granularity": granularity, "granularity": granularity,
"groupby": groupby,
"from_dttm": inner_from_dttm or from_dttm, "from_dttm": inner_from_dttm or from_dttm,
"to_dttm": inner_to_dttm or to_dttm, "to_dttm": inner_to_dttm or to_dttm,
"filter": filter, "filter": filter,
@ -1243,8 +1239,6 @@ class SqlaTable( # pylint: disable=too-many-public-methods,too-many-instance-at
"columns": columns, "columns": columns,
"order_desc": True, "order_desc": True,
} }
if not is_sip_38:
prequery_obj["groupby"] = groupby
result = self.query(prequery_obj) result = self.query(prequery_obj)
prequeries.append(result.query) prequeries.append(result.query)

View File

@ -35,11 +35,7 @@ from superset.models.tags import ChartUpdater
from superset.tasks.thumbnails import cache_chart_thumbnail from superset.tasks.thumbnails import cache_chart_thumbnail
from superset.utils import core as utils from superset.utils import core as utils
from superset.utils.urls import get_url_path from superset.utils.urls import get_url_path
from superset.viz import BaseViz, viz_types # type: ignore
if is_feature_enabled("SIP_38_VIZ_REARCHITECTURE"):
from superset.viz_sip38 import BaseViz, viz_types
else:
from superset.viz import BaseViz, viz_types # type: ignore
if TYPE_CHECKING: if TYPE_CHECKING:
from superset.connectors.base.models import BaseDatasource from superset.connectors.base.models import BaseDatasource

View File

@ -31,7 +31,7 @@ from flask_babel import _
from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.exc import NoResultFound
import superset.models.core as models import superset.models.core as models
from superset import app, dataframe, db, is_feature_enabled, result_set from superset import app, dataframe, db, result_set, viz
from superset.connectors.connector_registry import ConnectorRegistry from superset.connectors.connector_registry import ConnectorRegistry
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import ( from superset.exceptions import (
@ -54,11 +54,6 @@ from superset.viz import BaseViz
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
stats_logger = app.config["STATS_LOGGER"] stats_logger = app.config["STATS_LOGGER"]
if is_feature_enabled("SIP_38_VIZ_REARCHITECTURE"):
from superset import viz_sip38 as viz
else:
from superset import viz # type: ignore
REJECTED_FORM_DATA_KEYS: List[str] = [] REJECTED_FORM_DATA_KEYS: List[str] = []
if not app.config["ENABLE_JAVASCRIPT_CONTROLS"]: if not app.config["ENABLE_JAVASCRIPT_CONTROLS"]:

File diff suppressed because it is too large Load Diff