feat: apply Time Grain to X-Axis column (#21163)

This commit is contained in:
Yongjie Zhao 2022-09-07 16:24:15 +08:00 committed by GitHub
parent 875e9f8a04
commit ce3d38d2e7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 705 additions and 29 deletions

View File

@ -27,6 +27,8 @@ assists people when migrating to a new version.
- [20606](https://github.com/apache/superset/pull/20606): When user clicks on chart title or "Edit chart" button in Dashboard page, Explore opens in the same tab. Clicking while holding cmd/ctrl opens Explore in a new tab. To bring back the old behaviour (always opening Explore in a new tab), flip feature flag `DASHBOARD_EDIT_CHART_IN_NEW_TAB` to `True`.
- [20799](https://github.com/apache/superset/pull/20799): Presto and Trino engine will now display tracking URL for running queries in SQL Lab. If for some reason you don't want to show the tracking URL (for example, when your data warehouse hasn't enable access for to Presto or Trino UI), update `TRACKING_URL_TRANSFORMER` in `config.py` to return `None`.
- [21002](https://github.com/apache/superset/pull/21002): Support Python 3.10 and bump pandas 1.4 and pyarrow 6.
- [21163](https://github.com/apache/superset/pull/21163): When `GENERIC_CHART_AXES` feature flags set to `True`, the Time Grain control will move below the X-Axis control.
### Breaking Changes

View File

@ -30,6 +30,11 @@ export const echartsTimeSeriesQuery: ControlPanelSectionConfig = {
expanded: true,
controlSetRows: [
[isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) ? 'x_axis' : null],
[
isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)
? 'time_grain_sqla'
: null,
],
['metrics'],
['groupby'],
[

View File

@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
import { t } from '@superset-ui/core';
import { FeatureFlag, isFeatureEnabled, t } from '@superset-ui/core';
import { ControlPanelSectionConfig } from '../types';
// A few standard controls sections that are used internally.
@ -38,6 +38,19 @@ export const legacyTimeseriesTime: ControlPanelSectionConfig = {
],
};
export const genericTime: ControlPanelSectionConfig = {
...baseTimeSection,
controlSetRows: [
['granularity_sqla'],
[
isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)
? null
: 'time_grain_sqla',
],
['time_range'],
],
};
export const legacyRegularTime: ControlPanelSectionConfig = {
...baseTimeSection,
controlSetRows: [['granularity_sqla'], ['time_range']],

View File

@ -47,6 +47,8 @@ import {
ComparisionType,
QueryResponse,
QueryColumn,
isAdhocColumn,
isPhysicalColumn,
} from '@superset-ui/core';
import {
@ -323,6 +325,21 @@ const time_grain_sqla: SharedControlConfig<'SelectControl'> = {
mapStateToProps: ({ datasource }) => ({
choices: (datasource as Dataset)?.time_grain_sqla || null,
}),
visibility: ({ controls }) => {
if (!isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)) {
return true;
}
const xAxis = controls?.x_axis;
const xAxisValue = xAxis?.value;
if (xAxisValue === undefined || isAdhocColumn(xAxisValue)) {
return true;
}
if (isPhysicalColumn(xAxisValue)) {
return !!xAxis?.options?.[xAxisValue]?.is_dttm;
}
return false;
},
};
const time_range: SharedControlConfig<'DateFilterControl'> = {

View File

@ -23,6 +23,8 @@ import { QueryFieldAliases, QueryFormData } from './types/QueryFormData';
import { QueryContext, QueryObject } from './types/Query';
import { SetDataMaskHook } from '../chart';
import { JsonObject } from '../connection';
import { isFeatureEnabled, FeatureFlag } from '../utils';
import { normalizeTimeColumn } from './normalizeTimeColumn';
const WRAP_IN_ARRAY = (baseQueryObject: QueryObject) => [baseQueryObject];
@ -45,13 +47,16 @@ export default function buildQueryContext(
typeof options === 'function'
? { buildQuery: options, queryFields: {} }
: options || {};
const queries = buildQuery(buildQueryObject(formData, queryFields));
let queries = buildQuery(buildQueryObject(formData, queryFields));
queries.forEach(query => {
if (Array.isArray(query.post_processing)) {
// eslint-disable-next-line no-param-reassign
query.post_processing = query.post_processing.filter(Boolean);
}
});
if (isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)) {
queries = queries.map(query => normalizeTimeColumn(formData, query));
}
return {
datasource: new DatasourceKey(formData.datasource).toObject(),
force: formData.force || false,

View File

@ -28,6 +28,7 @@ export { default as getColumnLabel } from './getColumnLabel';
export { default as getMetricLabel } from './getMetricLabel';
export { default as DatasourceKey } from './DatasourceKey';
export { default as normalizeOrderBy } from './normalizeOrderBy';
export { normalizeTimeColumn } from './normalizeTimeColumn';
export * from './types/AnnotationLayer';
export * from './types/QueryFormData';

View File

@ -0,0 +1,83 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import omit from 'lodash/omit';
import {
AdhocColumn,
isAdhocColumn,
isPhysicalColumn,
QueryFormColumn,
QueryFormData,
QueryObject,
} from './types';
import { FeatureFlag, isFeatureEnabled } from '../utils';
export function normalizeTimeColumn(
formData: QueryFormData,
queryObject: QueryObject,
): QueryObject {
if (!(isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) && formData.x_axis)) {
return queryObject;
}
const { columns: _columns, extras: _extras } = queryObject;
const mutatedColumns: QueryFormColumn[] = [...(_columns || [])];
const axisIdx = _columns?.findIndex(
col =>
(isPhysicalColumn(col) &&
isPhysicalColumn(formData.x_axis) &&
col === formData.x_axis) ||
(isAdhocColumn(col) &&
isAdhocColumn(formData.x_axis) &&
col.sqlExpression === formData.x_axis.sqlExpression),
);
if (
axisIdx !== undefined &&
axisIdx > -1 &&
formData.x_axis &&
Array.isArray(_columns)
) {
if (isAdhocColumn(_columns[axisIdx])) {
mutatedColumns[axisIdx] = {
timeGrain: _extras?.time_grain_sqla,
columnType: 'BASE_AXIS',
...(_columns[axisIdx] as AdhocColumn),
};
} else {
mutatedColumns[axisIdx] = {
timeGrain: _extras?.time_grain_sqla,
columnType: 'BASE_AXIS',
sqlExpression: formData.x_axis,
label: formData.x_axis,
expressionType: 'SQL',
};
}
const newQueryObject = omit(queryObject, [
'extras.time_grain_sqla',
'is_timeseries',
]);
newQueryObject.columns = mutatedColumns;
return newQueryObject;
}
// fallback, return original queryObject
return queryObject;
}

View File

@ -27,6 +27,8 @@ export interface AdhocColumn {
optionName?: string;
sqlExpression: string;
expressionType: 'SQL';
columnType?: 'BASE_AXIS' | 'SERIES';
timeGrain?: string;
}
/**

View File

@ -17,6 +17,7 @@
* under the License.
*/
import { buildQueryContext } from '@superset-ui/core';
import * as queryModule from '../../src/query/normalizeTimeColumn';
describe('buildQueryContext', () => {
it('should build datasource for table sources and apply defaults', () => {
@ -122,4 +123,50 @@ describe('buildQueryContext', () => {
},
]);
});
it('should call normalizeTimeColumn if GENERIC_CHART_AXES is enabled', () => {
// @ts-ignore
const spy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: true,
},
}));
const spyNormalizeTimeColumn = jest.spyOn(
queryModule,
'normalizeTimeColumn',
);
buildQueryContext(
{
datasource: '5__table',
viz_type: 'table',
},
() => [{}],
);
expect(spyNormalizeTimeColumn).toBeCalled();
spy.mockRestore();
spyNormalizeTimeColumn.mockRestore();
});
it("shouldn't call normalizeTimeColumn if GENERIC_CHART_AXES is disabled", () => {
// @ts-ignore
const spy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: false,
},
}));
const spyNormalizeTimeColumn = jest.spyOn(
queryModule,
'normalizeTimeColumn',
);
buildQueryContext(
{
datasource: '5__table',
viz_type: 'table',
},
() => [{}],
);
expect(spyNormalizeTimeColumn).not.toBeCalled();
spy.mockRestore();
spyNormalizeTimeColumn.mockRestore();
});
});

View File

@ -0,0 +1,247 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {
normalizeTimeColumn,
QueryObject,
SqlaFormData,
} from '@superset-ui/core';
describe('disabled GENERIC_CHART_AXES', () => {
let windowSpy: any;
beforeAll(() => {
// @ts-ignore
windowSpy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: false,
},
}));
});
afterAll(() => {
windowSpy.mockRestore();
});
it('should return original QueryObject if disabled GENERIC_CHART_AXES', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
columns: ['col1'],
metrics: ['count(*)'],
x_axis: 'time_column',
};
const query: QueryObject = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
},
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
columns: ['col1'],
metrics: ['count(*)'],
is_timeseries: true,
};
expect(normalizeTimeColumn(formData, query)).toEqual(query);
});
});
describe('enabled GENERIC_CHART_AXES', () => {
let windowSpy: any;
beforeAll(() => {
// @ts-ignore
windowSpy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({
featureFlags: {
GENERIC_CHART_AXES: true,
},
}));
});
afterAll(() => {
windowSpy.mockRestore();
});
it('should return original QueryObject if x_axis is empty', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
columns: ['col1'],
metrics: ['count(*)'],
};
const query: QueryObject = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
},
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
columns: ['col1'],
metrics: ['count(*)'],
is_timeseries: true,
};
expect(normalizeTimeColumn(formData, query)).toEqual(query);
});
it('should support different columns for x-axis and granularity', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
x_axis: 'time_column_in_x_axis',
columns: ['col1'],
metrics: ['count(*)'],
};
const query: QueryObject = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
where: '',
having: '',
},
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
columns: ['time_column_in_x_axis', 'col1'],
metrics: ['count(*)'],
is_timeseries: true,
};
expect(normalizeTimeColumn(formData, query)).toEqual({
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: { where: '', having: '' },
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
columns: [
{
timeGrain: 'P1Y',
columnType: 'BASE_AXIS',
sqlExpression: 'time_column_in_x_axis',
label: 'time_column_in_x_axis',
expressionType: 'SQL',
},
'col1',
],
metrics: ['count(*)'],
});
});
it('should support custom SQL in x-axis', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
x_axis: {
expressionType: 'SQL',
label: 'Order Data + 1 year',
sqlExpression: '"Order Date" + interval \'1 year\'',
},
columns: ['col1'],
metrics: ['count(*)'],
};
const query: QueryObject = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
where: '',
having: '',
},
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
columns: [
{
expressionType: 'SQL',
label: 'Order Data + 1 year',
sqlExpression: '"Order Date" + interval \'1 year\'',
},
'col1',
],
metrics: ['count(*)'],
is_timeseries: true,
};
expect(normalizeTimeColumn(formData, query)).toEqual({
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: { where: '', having: '' },
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
columns: [
{
timeGrain: 'P1Y',
columnType: 'BASE_AXIS',
expressionType: 'SQL',
label: 'Order Data + 1 year',
sqlExpression: `"Order Date" + interval '1 year'`,
},
'col1',
],
metrics: ['count(*)'],
});
});
it('fallback and invalid columns value', () => {
const formData: SqlaFormData = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
time_grain_sqla: 'P1Y',
time_range: '1 year ago : 2013',
x_axis: {
expressionType: 'SQL',
label: 'Order Data + 1 year',
sqlExpression: '"Order Date" + interval \'1 year\'',
},
columns: ['col1'],
metrics: ['count(*)'],
};
const query: QueryObject = {
datasource: '5__table',
viz_type: 'table',
granularity: 'time_column',
extras: {
time_grain_sqla: 'P1Y',
where: '',
having: '',
},
time_range: '1 year ago : 2013',
orderby: [['count(*)', true]],
metrics: ['count(*)'],
is_timeseries: true,
};
expect(normalizeTimeColumn(formData, query)).toEqual(query);
});
});

View File

@ -291,12 +291,12 @@ function createAdvancedAnalyticsSection(
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)
? {
label: t('Shared query fields'),
expanded: true,
controlSetRows: [['x_axis']],
controlSetRows: [['x_axis'], ['time_grain_sqla']],
}
: null,
createQuerySection(t('Query A'), ''),

View File

@ -52,7 +52,7 @@ const {
} = DEFAULT_FORM_DATA;
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
sections.echartsTimeSeriesQuery,
sections.advancedAnalyticsControls,
sections.annotationsAndLayersControls,

View File

@ -259,7 +259,7 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] {
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
sections.echartsTimeSeriesQuery,
sections.advancedAnalyticsControls,
sections.annotationsAndLayersControls,

View File

@ -51,7 +51,7 @@ const {
} = DEFAULT_FORM_DATA;
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
sections.echartsTimeSeriesQuery,
sections.advancedAnalyticsControls,
sections.annotationsAndLayersControls,

View File

@ -47,7 +47,7 @@ const {
} = DEFAULT_FORM_DATA;
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
sections.echartsTimeSeriesQuery,
sections.advancedAnalyticsControls,
sections.annotationsAndLayersControls,

View File

@ -47,7 +47,7 @@ const {
} = DEFAULT_FORM_DATA;
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
sections.echartsTimeSeriesQuery,
sections.advancedAnalyticsControls,
sections.annotationsAndLayersControls,

View File

@ -50,7 +50,7 @@ const {
} = DEFAULT_FORM_DATA;
const config: ControlPanelConfig = {
controlPanelSections: [
sections.legacyTimeseriesTime,
sections.genericTime,
sections.echartsTimeSeriesQuery,
sections.advancedAnalyticsControls,
sections.annotationsAndLayersControls,

View File

@ -83,6 +83,7 @@ from superset.common.db_query_status import QueryStatus
from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
from superset.connectors.sqla.utils import (
find_cached_objects_in_session,
get_columns_description,
get_physical_table_metadata,
get_virtual_table_metadata,
validate_adhoc_subquery,
@ -1124,7 +1125,29 @@ class SqlaTable(Model, BaseDatasource): # pylint: disable=too-many-public-metho
schema=self.schema,
template_processor=template_processor,
)
sqla_column = literal_column(expression)
col_in_metadata = self.get_column(expression)
if col_in_metadata:
sqla_column = col_in_metadata.get_sqla_col()
is_dttm = col_in_metadata.is_temporal
else:
sqla_column = literal_column(expression)
# probe adhoc column type
tbl, _ = self.get_from_clause(template_processor)
qry = sa.select([sqla_column]).limit(1).select_from(tbl)
sql = self.database.compile_sqla_query(qry)
col_desc = get_columns_description(self.database, sql)
is_dttm = col_desc[0]["is_dttm"]
if (
is_dttm
and col.get("columnType") == "BASE_AXIS"
and (time_grain := col.get("timeGrain"))
):
sqla_column = self.db_engine_spec.get_timestamp_expr(
sqla_column,
None,
time_grain,
)
return self.make_sqla_column_compatible(sqla_column, label)
def make_sqla_column_compatible(

View File

@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import logging
from contextlib import closing
from typing import (
@ -102,7 +104,7 @@ def get_physical_table_metadata(
return cols
def get_virtual_table_metadata(dataset: "SqlaTable") -> List[ResultSetColumnType]:
def get_virtual_table_metadata(dataset: SqlaTable) -> List[ResultSetColumnType]:
"""Use SQLparser to get virtual dataset metadata"""
if not dataset.sql:
raise SupersetGenericDBErrorException(
@ -137,7 +139,7 @@ def get_virtual_table_metadata(dataset: "SqlaTable") -> List[ResultSetColumnType
try:
with closing(engine.raw_connection()) as conn:
cursor = conn.cursor()
query = dataset.database.apply_limit_to_sql(statements[0])
query = dataset.database.apply_limit_to_sql(statements[0], limit=1)
db_engine_spec.execute(cursor, query)
result = db_engine_spec.fetch_data(cursor, limit=1)
result_set = SupersetResultSet(result, cursor.description, db_engine_spec)
@ -147,6 +149,24 @@ def get_virtual_table_metadata(dataset: "SqlaTable") -> List[ResultSetColumnType
return cols
def get_columns_description(
database: Database,
query: str,
) -> List[ResultSetColumnType]:
db_engine_spec = database.db_engine_spec
try:
with closing(database.get_sqla_engine().raw_connection()) as conn:
cursor = conn.cursor()
query = database.apply_limit_to_sql(query, limit=1)
cursor.execute(query)
db_engine_spec.execute(cursor, query)
result = db_engine_spec.fetch_data(cursor, limit=1)
result_set = SupersetResultSet(result, cursor.description, db_engine_spec)
return result_set.columns
except Exception as ex:
raise SupersetGenericDBErrorException(message=str(ex)) from ex
def validate_adhoc_subquery(
sql: str,
database_id: int,
@ -184,12 +204,12 @@ def validate_adhoc_subquery(
@memoized
def get_dialect_name(drivername: str) -> str:
return SqlaURL(drivername).get_dialect().name
return SqlaURL.create(drivername).get_dialect().name
@memoized
def get_identifier_quoter(drivername: str) -> Dict[str, Callable[[str], str]]:
return SqlaURL(drivername).get_dialect()().identifier_preparer.quote
return SqlaURL.create(drivername).get_dialect()().identifier_preparer.quote
DeclarativeModel = TypeVar("DeclarativeModel", bound=DeclarativeMeta)

View File

@ -55,6 +55,8 @@ class AdhocColumn(TypedDict, total=False):
hasCustomLabel: Optional[bool]
label: Optional[str]
sqlExpression: Optional[str]
columnType: Optional[Literal["BASE_AXIS", "SERIES"]]
timeGrain: Optional[str]
class ResultSetColumnType(TypedDict):

View File

@ -1269,6 +1269,17 @@ def is_adhoc_column(column: Column) -> TypeGuard[AdhocColumn]:
return isinstance(column, dict)
def get_base_axis_column(columns: Optional[List[Column]]) -> Optional[AdhocColumn]:
if columns is None:
return None
axis_cols = [
col
for col in columns
if is_adhoc_column(col) and col.get("columnType") == "BASE_AXIS"
]
return axis_cols[0] if axis_cols else None
def get_column_name(
column: Column, verbose_map: Optional[Dict[str, Any]] = None
) -> str:

View File

@ -18,7 +18,7 @@ from __future__ import annotations
import contextlib
import functools
from operator import ge
import os
from typing import Any, Callable, Optional, TYPE_CHECKING
from unittest.mock import patch
@ -303,34 +303,38 @@ def virtual_dataset():
@pytest.fixture
def physical_dataset():
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.connectors.sqla.utils import get_identifier_quoter
example_database = get_example_database()
engine = example_database.get_sqla_engine()
quoter = get_identifier_quoter(engine.name)
# sqlite can only execute one statement at a time
engine.execute(
"""
f"""
CREATE TABLE IF NOT EXISTS physical_dataset(
col1 INTEGER,
col2 VARCHAR(255),
col3 DECIMAL(4,2),
col4 VARCHAR(255),
col5 TIMESTAMP
col5 TIMESTAMP DEFAULT '1970-01-01 00:00:01',
col6 TIMESTAMP DEFAULT '1970-01-01 00:00:01',
{quoter('time column with spaces')} TIMESTAMP DEFAULT '1970-01-01 00:00:01'
);
"""
)
engine.execute(
"""
INSERT INTO physical_dataset values
(0, 'a', 1.0, NULL, '2000-01-01 00:00:00'),
(1, 'b', 1.1, NULL, '2000-01-02 00:00:00'),
(2, 'c', 1.2, NULL, '2000-01-03 00:00:00'),
(3, 'd', 1.3, NULL, '2000-01-04 00:00:00'),
(4, 'e', 1.4, NULL, '2000-01-05 00:00:00'),
(5, 'f', 1.5, NULL, '2000-01-06 00:00:00'),
(6, 'g', 1.6, NULL, '2000-01-07 00:00:00'),
(7, 'h', 1.7, NULL, '2000-01-08 00:00:00'),
(8, 'i', 1.8, NULL, '2000-01-09 00:00:00'),
(9, 'j', 1.9, NULL, '2000-01-10 00:00:00');
(0, 'a', 1.0, NULL, '2000-01-01 00:00:00', '2002-01-03 00:00:00', '2002-01-03 00:00:00'),
(1, 'b', 1.1, NULL, '2000-01-02 00:00:00', '2002-02-04 00:00:00', '2002-02-04 00:00:00'),
(2, 'c', 1.2, NULL, '2000-01-03 00:00:00', '2002-03-07 00:00:00', '2002-03-07 00:00:00'),
(3, 'd', 1.3, NULL, '2000-01-04 00:00:00', '2002-04-12 00:00:00', '2002-04-12 00:00:00'),
(4, 'e', 1.4, NULL, '2000-01-05 00:00:00', '2002-05-11 00:00:00', '2002-05-11 00:00:00'),
(5, 'f', 1.5, NULL, '2000-01-06 00:00:00', '2002-06-13 00:00:00', '2002-06-13 00:00:00'),
(6, 'g', 1.6, NULL, '2000-01-07 00:00:00', '2002-07-15 00:00:00', '2002-07-15 00:00:00'),
(7, 'h', 1.7, NULL, '2000-01-08 00:00:00', '2002-08-18 00:00:00', '2002-08-18 00:00:00'),
(8, 'i', 1.8, NULL, '2000-01-09 00:00:00', '2002-09-20 00:00:00', '2002-09-20 00:00:00'),
(9, 'j', 1.9, NULL, '2000-01-10 00:00:00', '2002-10-22 00:00:00', '2002-10-22 00:00:00');
"""
)
@ -343,6 +347,13 @@ def physical_dataset():
TableColumn(column_name="col3", type="DECIMAL(4,2)", table=dataset)
TableColumn(column_name="col4", type="VARCHAR(255)", table=dataset)
TableColumn(column_name="col5", type="TIMESTAMP", is_dttm=True, table=dataset)
TableColumn(column_name="col6", type="TIMESTAMP", is_dttm=True, table=dataset)
TableColumn(
column_name="time column with spaces",
type="TIMESTAMP",
is_dttm=True,
table=dataset,
)
SqlMetric(metric_name="count", expression="count(*)", table=dataset)
db.session.merge(dataset)
db.session.commit()
@ -385,3 +396,9 @@ def virtual_dataset_comma_in_column_value():
db.session.delete(dataset)
db.session.commit()
only_postgresql = pytest.mark.skipif(
"postgresql" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""),
reason="Only run test case in Postgresql",
)

View File

@ -30,6 +30,7 @@ from superset.common.query_object import QueryObject
from superset.connectors.sqla.models import SqlMetric
from superset.datasource.dao import DatasourceDAO
from superset.extensions import cache_manager
from superset.superset_typing import AdhocColumn
from superset.utils.core import (
AdhocMetricExpressionType,
backend,
@ -38,6 +39,7 @@ from superset.utils.core import (
)
from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.conftest import only_postgresql
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
load_birth_names_data,
@ -728,3 +730,183 @@ def test_get_label_map(app_context, virtual_dataset_comma_in_column_value):
"count, col2, row2": ["count", "col2, row2"],
"count, col2, row3": ["count", "col2, row3"],
}
def test_time_column_with_time_grain(app_context, physical_dataset):
column_on_axis: AdhocColumn = {
"label": "I_AM_AN_ORIGINAL_COLUMN",
"sqlExpression": "col5",
"timeGrain": "P1Y",
}
adhoc_column: AdhocColumn = {
"label": "I_AM_A_TRUNC_COLUMN",
"sqlExpression": "col6",
"columnType": "BASE_AXIS",
"timeGrain": "P1Y",
}
qc = QueryContextFactory().create(
datasource={
"type": physical_dataset.type,
"id": physical_dataset.id,
},
queries=[
{
"columns": ["col1", column_on_axis, adhoc_column],
"metrics": ["count"],
"orderby": [["col1", True]],
}
],
result_type=ChartDataResultType.FULL,
force=True,
)
query_object = qc.queries[0]
df = qc.get_df_payload(query_object)["df"]
if query_object.datasource.database.backend == "sqlite":
# sqlite returns string as timestamp column
assert df["I_AM_AN_ORIGINAL_COLUMN"][0] == "2000-01-01 00:00:00"
assert df["I_AM_AN_ORIGINAL_COLUMN"][1] == "2000-01-02 00:00:00"
assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00"
assert df["I_AM_A_TRUNC_COLUMN"][1] == "2002-01-01 00:00:00"
else:
assert df["I_AM_AN_ORIGINAL_COLUMN"][0].strftime("%Y-%m-%d") == "2000-01-01"
assert df["I_AM_AN_ORIGINAL_COLUMN"][1].strftime("%Y-%m-%d") == "2000-01-02"
assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01"
assert df["I_AM_A_TRUNC_COLUMN"][1].strftime("%Y-%m-%d") == "2002-01-01"
def test_non_time_column_with_time_grain(app_context, physical_dataset):
qc = QueryContextFactory().create(
datasource={
"type": physical_dataset.type,
"id": physical_dataset.id,
},
queries=[
{
"columns": [
"col1",
{
"label": "COL2 ALIAS",
"sqlExpression": "col2",
"columnType": "BASE_AXIS",
"timeGrain": "P1Y",
},
],
"metrics": ["count"],
"orderby": [["col1", True]],
"row_limit": 1,
}
],
result_type=ChartDataResultType.FULL,
force=True,
)
query_object = qc.queries[0]
df = qc.get_df_payload(query_object)["df"]
assert df["COL2 ALIAS"][0] == "a"
def test_special_chars_in_column_name(app_context, physical_dataset):
qc = QueryContextFactory().create(
datasource={
"type": physical_dataset.type,
"id": physical_dataset.id,
},
queries=[
{
"columns": [
"col1",
"time column with spaces",
{
"label": "I_AM_A_TRUNC_COLUMN",
"sqlExpression": "time column with spaces",
"columnType": "BASE_AXIS",
"timeGrain": "P1Y",
},
],
"metrics": ["count"],
"orderby": [["col1", True]],
"row_limit": 1,
}
],
result_type=ChartDataResultType.FULL,
force=True,
)
query_object = qc.queries[0]
df = qc.get_df_payload(query_object)["df"]
if query_object.datasource.database.backend == "sqlite":
# sqlite returns string as timestamp column
assert df["time column with spaces"][0] == "2002-01-03 00:00:00"
assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00"
else:
assert df["time column with spaces"][0].strftime("%Y-%m-%d") == "2002-01-03"
assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01"
@only_postgresql
def test_date_adhoc_column(app_context, physical_dataset):
# sql expression returns date type
column_on_axis: AdhocColumn = {
"label": "ADHOC COLUMN",
"sqlExpression": "col6 + interval '20 year'",
"columnType": "BASE_AXIS",
"timeGrain": "P1Y",
}
qc = QueryContextFactory().create(
datasource={
"type": physical_dataset.type,
"id": physical_dataset.id,
},
queries=[
{
"columns": [column_on_axis],
"metrics": ["count"],
}
],
result_type=ChartDataResultType.FULL,
force=True,
)
query_object = qc.queries[0]
df = qc.get_df_payload(query_object)["df"]
# ADHOC COLUMN count
# 0 2022-01-01 10
assert df["ADHOC COLUMN"][0].strftime("%Y-%m-%d") == "2022-01-01"
assert df["count"][0] == 10
@only_postgresql
def test_non_date_adhoc_column(app_context, physical_dataset):
# sql expression returns non-date type
column_on_axis: AdhocColumn = {
"label": "ADHOC COLUMN",
"sqlExpression": "col1 * 10",
"columnType": "BASE_AXIS",
"timeGrain": "P1Y",
}
qc = QueryContextFactory().create(
datasource={
"type": physical_dataset.type,
"id": physical_dataset.id,
},
queries=[
{
"columns": [column_on_axis],
"metrics": ["count"],
"orderby": [
[
{
"expressionType": "SQL",
"sqlExpression": '"ADHOC COLUMN"',
},
True,
]
],
}
],
result_type=ChartDataResultType.FULL,
force=True,
)
query_object = qc.queries[0]
df = qc.get_df_payload(query_object)["df"]
assert df["ADHOC COLUMN"][0] == 0
assert df["ADHOC COLUMN"][1] == 10

View File

@ -30,7 +30,6 @@ from superset.utils.core import (
get_metric_names,
get_time_filter_status,
is_adhoc_metric,
NO_TIME_RANGE,
)
from tests.unit_tests.fixtures.datasets import get_dataset_mock