mirror of https://github.com/apache/superset.git
chore(druid): Remove legacy Druid NoSQL logic (#23997)
This commit is contained in:
parent
bdb8bbef32
commit
9adb023880
|
@ -1403,13 +1403,11 @@ Note not all fields are correctly categorized. The fields vary based on visualiz
|
|||
|
||||
### Time
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ------------------- | -------- | ------------------------------------- |
|
||||
| `druid_time_origin` | _string_ | The Druid **Origin** widget |
|
||||
| `granularity` | _string_ | The Druid **Time Granularity** widget |
|
||||
| `granularity_sqla` | _string_ | The SQLA **Time Column** widget |
|
||||
| `time_grain_sqla` | _string_ | The SQLA **Time Grain** widget |
|
||||
| `time_range` | _string_ | The **Time range** widget |
|
||||
| Field | Type | Notes |
|
||||
| ------------------ | -------- | ------------------------------------- |
|
||||
| `granularity_sqla` | _string_ | The SQLA **Time Column** widget |
|
||||
| `time_grain_sqla` | _string_ | The SQLA **Time Grain** widget |
|
||||
| `time_range` | _string_ | The **Time range** widget |
|
||||
|
||||
### GROUP BY
|
||||
|
||||
|
|
|
@ -197,30 +197,6 @@
|
|||
|can add on AccessRequestsModelView|:heavy_check_mark:|O|O|O|
|
||||
|can delete on AccessRequestsModelView|:heavy_check_mark:|O|O|O|
|
||||
|muldelete on AccessRequestsModelView|:heavy_check_mark:|O|O|O|
|
||||
|can edit on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can list on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can show on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can add on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can delete on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|muldelete on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|yaml export on DruidDatasourceModelView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can edit on DruidClusterModelView|:heavy_check_mark:|O|O|O|
|
||||
|can list on DruidClusterModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can show on DruidClusterModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can add on DruidClusterModelView|:heavy_check_mark:|O|O|O|
|
||||
|can delete on DruidClusterModelView|:heavy_check_mark:|O|O|O|
|
||||
|muldelete on DruidClusterModelView|:heavy_check_mark:|O|O|O|
|
||||
|yaml export on DruidClusterModelView|:heavy_check_mark:|O|O|O|
|
||||
|can list on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can add on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can delete on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can edit on DruidMetricInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can list on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can add on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can delete on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can edit on DruidColumnInlineView|:heavy_check_mark:|:heavy_check_mark:|O|O|
|
||||
|can refresh datasources on Druid|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can scan new datasources on Druid|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Row Level Security|:heavy_check_mark:|O|O|O|
|
||||
|menu access on Access requests|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Home|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|
@ -229,10 +205,7 @@
|
|||
|menu access on Chart Emails|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Alerts|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Alerts & Report|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Druid Datasources|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Druid Clusters|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Scan New Datasources|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|menu access on Refresh Druid Metadata|:heavy_check_mark:|O|O|O|
|
||||
|can share dashboard on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can share chart on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|can list on FilterSets|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O|
|
||||
|
|
|
@ -26,13 +26,11 @@ Note not all fields are correctly categorized. The fields vary based on visualiz
|
|||
|
||||
### Time
|
||||
|
||||
| Field | Type | Notes |
|
||||
| ------------------- | -------- | ------------------------------------- |
|
||||
| `druid_time_origin` | _string_ | The Druid **Origin** widget |
|
||||
| `granularity` | _string_ | The Druid **Time Granularity** widget |
|
||||
| `granularity_sqla` | _string_ | The SQLA **Time Column** widget |
|
||||
| `time_grain_sqla` | _string_ | The SQLA **Time Grain** widget |
|
||||
| `time_range` | _string_ | The **Time range** widget |
|
||||
| Field | Type | Notes |
|
||||
| ------------------ | -------- | ------------------------------------- |
|
||||
| `granularity_sqla` | _string_ | The SQLA **Time Column** widget |
|
||||
| `time_grain_sqla` | _string_ | The SQLA **Time Grain** widget |
|
||||
| `time_range` | _string_ | The **Time range** widget |
|
||||
|
||||
### GROUP BY
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ version: 1
|
|||
## Importing and Exporting Datasources
|
||||
|
||||
The superset cli allows you to import and export datasources from and to YAML. Datasources include
|
||||
both databases and druid clusters. The data is expected to be organized in the following hierarchy:
|
||||
databases. The data is expected to be organized in the following hierarchy:
|
||||
|
||||
```
|
||||
├──databases
|
||||
|
@ -24,19 +24,6 @@ both databases and druid clusters. The data is expected to be organized in the f
|
|||
| | | └──... (more metrics)
|
||||
| | └── ... (more tables)
|
||||
| └── ... (more databases)
|
||||
└──druid_clusters
|
||||
├──cluster_1
|
||||
| ├──datasource_1
|
||||
| | ├──columns
|
||||
| | | ├──column_1
|
||||
| | | ├──column_2
|
||||
| | | └──... (more columns)
|
||||
| | └──metrics
|
||||
| | ├──metric_1
|
||||
| | ├──metric_2
|
||||
| | └──... (more metrics)
|
||||
| └── ... (more datasources)
|
||||
└── ... (more clusters)
|
||||
```
|
||||
|
||||
### Exporting Datasources to YAML
|
||||
|
@ -59,8 +46,7 @@ references to be included (e.g. a column to include the table id it belongs to)
|
|||
Alternatively, you can export datasources using the UI:
|
||||
|
||||
1. Open **Sources -> Databases** to export all tables associated to a single or multiple databases.
|
||||
(**Tables** for one or more tables, **Druid Clusters** for clusters, **Druid Datasources** for
|
||||
datasources)
|
||||
(**Tables** for one or more tables)
|
||||
2. Select the items you would like to export.
|
||||
3. Click **Actions -> Export** to YAML
|
||||
4. If you want to import an item that you exported through the UI, you will need to nest it inside
|
||||
|
|
|
@ -850,13 +850,6 @@
|
|||
"description": "HAVING clause to be added to aggregate queries using AND operator.",
|
||||
"type": "string"
|
||||
},
|
||||
"having_druid": {
|
||||
"description": "HAVING filters to be added to legacy Druid datasource queries. This field is deprecated",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ChartDataFilter"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"relative_end": {
|
||||
"description": "End time for relative time deltas. Default: `config[\"DEFAULT_RELATIVE_START_TIME\"]`",
|
||||
"enum": ["today", "now"],
|
||||
|
@ -1228,11 +1221,6 @@
|
|||
],
|
||||
"nullable": true
|
||||
},
|
||||
"druid_time_origin": {
|
||||
"description": "Starting point for time grain counting on legacy Druid datasources. Used to change e.g. Monday/Sunday first-day-of-week. This field is deprecated and should be passed to `extras` as `druid_time_origin`.",
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"extras": {
|
||||
"allOf": [
|
||||
{
|
||||
|
@ -1250,7 +1238,7 @@
|
|||
"type": "array"
|
||||
},
|
||||
"granularity": {
|
||||
"description": "Name of temporal column used for time filtering. For legacy Druid datasources this defines the time grain.",
|
||||
"description": "Name of temporal column used for time filtering.
|
||||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
|
@ -1270,14 +1258,6 @@
|
|||
"nullable": true,
|
||||
"type": "string"
|
||||
},
|
||||
"having_filters": {
|
||||
"description": "HAVING filters to be added to legacy Druid datasource queries. This field is deprecated and should be passed to `extras` as `having_druid`.",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ChartDataFilter"
|
||||
},
|
||||
"nullable": true,
|
||||
"type": "array"
|
||||
},
|
||||
"is_rowcount": {
|
||||
"description": "Should the rowcount of the actual query be returned",
|
||||
"nullable": true,
|
||||
|
|
|
@ -334,7 +334,6 @@ export type SharedSectionAlias =
|
|||
| 'annotations'
|
||||
| 'colorScheme'
|
||||
| 'datasourceAndVizType'
|
||||
| 'druidTimeSeries'
|
||||
| 'sqlaTimeSeries'
|
||||
| 'NVD3TimeSeries';
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ import { isDefined } from '../utils';
|
|||
|
||||
/**
|
||||
* Build the common segments of all query objects (e.g. the granularity field derived from
|
||||
* either sql alchemy or druid). The segments specific to each viz type is constructed in the
|
||||
* SQLAlchemy). The segments specific to each viz type is constructed in the
|
||||
* buildQuery method for each viz type (see `wordcloud/buildQuery.ts` for an example).
|
||||
* Note the type of the formData argument passed in here is the type of the formData for a
|
||||
* specific viz, which is a subtype of the generic formData shared among all viz types.
|
||||
|
|
|
@ -61,7 +61,6 @@ export type QueryObjectFilterClause =
|
|||
| UnaryQueryObjectFilterClause;
|
||||
|
||||
export type QueryObjectExtras = Partial<{
|
||||
/** HAVING condition for Druid */
|
||||
/** HAVING condition for SQLAlchemy */
|
||||
having?: string;
|
||||
relative_start?: string;
|
||||
|
@ -107,7 +106,7 @@ export interface QueryObject
|
|||
/** SIMPLE where filters */
|
||||
filters?: QueryObjectFilterClause[];
|
||||
|
||||
/** Time column for SQL, time-grain for Druid (deprecated) */
|
||||
/** Time column for SQL */
|
||||
granularity?: string;
|
||||
|
||||
/** If set, will group by timestamp */
|
||||
|
@ -119,9 +118,6 @@ export interface QueryObject
|
|||
/** Free-form HAVING SQL, multiple clauses are concatenated by AND */
|
||||
having?: string;
|
||||
|
||||
/** SIMPLE having filters */
|
||||
having_filters?: QueryObjectFilterClause[];
|
||||
|
||||
post_processing?: (PostProcessingRule | undefined)[];
|
||||
|
||||
/** Maximum numbers of rows to return */
|
||||
|
|
|
@ -39,15 +39,6 @@ describe('buildQueryObject', () => {
|
|||
expect(query.granularity).toEqual('ds');
|
||||
});
|
||||
|
||||
it('should build granularity for druid datasources', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__druid',
|
||||
granularity: 'ds',
|
||||
viz_type: 'table',
|
||||
});
|
||||
expect(query.granularity).toEqual('ds');
|
||||
});
|
||||
|
||||
it('should build metrics based on default queryFields', () => {
|
||||
query = buildQueryObject({
|
||||
datasource: '5__table',
|
||||
|
|
|
@ -117,7 +117,6 @@ export default {
|
|||
js_columns: [],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{
|
||||
col: 'LATITUDE',
|
||||
|
|
|
@ -69,7 +69,6 @@ export default {
|
|||
],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{ col: 'LAT', op: 'IS NOT NULL', val: '' },
|
||||
{ col: 'LON', op: 'IS NOT NULL', val: '' },
|
||||
|
|
|
@ -69,7 +69,6 @@ export default {
|
|||
],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{ col: 'LAT', op: 'IS NOT NULL', val: '' },
|
||||
{ col: 'LON', op: 'IS NOT NULL', val: '' },
|
||||
|
|
|
@ -70,7 +70,6 @@ export const payload = theme => ({
|
|||
js_columns: ['color'],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [{ col: 'path_json', op: 'IS NOT NULL', val: '' }],
|
||||
},
|
||||
is_cached: false,
|
||||
|
|
|
@ -84,7 +84,6 @@ export default {
|
|||
js_columns: [],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{
|
||||
col: 'geometry',
|
||||
|
|
|
@ -84,7 +84,6 @@ export default {
|
|||
js_columns: ['population', 'area'],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [{ col: 'contour', op: 'IS NOT NULL', val: '' }],
|
||||
},
|
||||
is_cached: false,
|
||||
|
|
|
@ -67,7 +67,6 @@ export default {
|
|||
],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{ col: 'LAT', op: 'IS NOT NULL', val: '' },
|
||||
{ col: 'LON', op: 'IS NOT NULL', val: '' },
|
||||
|
|
|
@ -68,7 +68,6 @@ export default {
|
|||
],
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{ col: 'LAT', op: 'IS NOT NULL', val: '' },
|
||||
{ col: 'LON', op: 'IS NOT NULL', val: '' },
|
||||
|
|
|
@ -104,7 +104,7 @@ export default class AlteredSliceTag extends React.Component {
|
|||
if (!ofd[fdKey] && !cfd[fdKey]) {
|
||||
return;
|
||||
}
|
||||
if (['filters', 'having', 'having_filters', 'where'].includes(fdKey)) {
|
||||
if (['filters', 'having', 'where'].includes(fdKey)) {
|
||||
return;
|
||||
}
|
||||
if (!this.isEqualish(ofd[fdKey], cfd[fdKey])) {
|
||||
|
|
|
@ -765,7 +765,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
|
|||
applied_time_extras: {},
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [],
|
||||
},
|
||||
is_cached: false,
|
||||
|
@ -3131,7 +3130,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
|
|||
applied_time_extras: {},
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [],
|
||||
},
|
||||
is_cached: false,
|
||||
|
@ -16668,7 +16666,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
|
|||
applied_time_extras: {},
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [
|
||||
{
|
||||
col: 'rank',
|
||||
|
@ -17723,7 +17720,6 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in
|
|||
applied_time_extras: {},
|
||||
where: '',
|
||||
having: '',
|
||||
having_filters: [],
|
||||
filters: [],
|
||||
},
|
||||
is_cached: false,
|
||||
|
|
|
@ -27,7 +27,6 @@ export default function getFilterConfigsFromFormdata(form_data = {}) {
|
|||
const {
|
||||
date_filter,
|
||||
filter_configs = [],
|
||||
show_druid_time_granularity,
|
||||
show_sqla_time_column,
|
||||
show_sqla_time_granularity,
|
||||
} = form_data;
|
||||
|
@ -93,13 +92,6 @@ export default function getFilterConfigsFromFormdata(form_data = {}) {
|
|||
};
|
||||
}
|
||||
|
||||
if (show_druid_time_granularity) {
|
||||
updatedColumns = {
|
||||
...updatedColumns,
|
||||
[TIME_FILTER_MAP.granularity]: form_data.granularity,
|
||||
};
|
||||
}
|
||||
|
||||
configs = {
|
||||
...configs,
|
||||
columns: updatedColumns,
|
||||
|
|
|
@ -125,8 +125,6 @@ export const sqlaAutoGeneratedMetricNameRegex =
|
|||
/^(sum|min|max|avg|count|count_distinct)__.*$/i;
|
||||
export const sqlaAutoGeneratedMetricRegex =
|
||||
/^(LONG|DOUBLE|FLOAT)?(SUM|AVG|MAX|MIN|COUNT)\([A-Z0-9_."]*\)$/i;
|
||||
export const druidAutoGeneratedMetricRegex =
|
||||
/^(LONG|DOUBLE|FLOAT)?(SUM|MAX|MIN|COUNT)\([A-Z0-9_."]*\)$/i;
|
||||
|
||||
export const TIME_FILTER_LABELS = {
|
||||
time_range: t('Time range'),
|
||||
|
|
|
@ -69,9 +69,6 @@ export default {
|
|||
},
|
||||
},
|
||||
sectionOverrides: {
|
||||
druidTimeSeries: {
|
||||
controlSetRows: [],
|
||||
},
|
||||
sqlaTimeSeries: {
|
||||
controlSetRows: [],
|
||||
},
|
||||
|
|
|
@ -158,7 +158,7 @@ class FilterBox extends React.PureComponent {
|
|||
getControlData(controlName) {
|
||||
const { selectedValues } = this.state;
|
||||
const control = {
|
||||
...controls[controlName], // TODO: make these controls ('druid_time_origin', 'granularity', 'granularity_sqla', 'time_grain_sqla') accessible from getControlsForVizType.
|
||||
...controls[controlName], // TODO: make these controls ('granularity_sqla', 'time_grain_sqla') accessible from getControlsForVizType.
|
||||
name: controlName,
|
||||
key: `control-${controlName}`,
|
||||
value: selectedValues[TIME_FILTER_MAP[controlName]],
|
||||
|
@ -324,7 +324,6 @@ class FilterBox extends React.PureComponent {
|
|||
const { showSqlaTimeGrain, showSqlaTimeColumn } = this.props;
|
||||
const datasourceFilters = [];
|
||||
const sqlaFilters = [];
|
||||
const druidFilters = [];
|
||||
if (showSqlaTimeGrain) sqlaFilters.push('time_grain_sqla');
|
||||
if (showSqlaTimeColumn) sqlaFilters.push('granularity_sqla');
|
||||
if (sqlaFilters.length) {
|
||||
|
@ -337,16 +336,6 @@ class FilterBox extends React.PureComponent {
|
|||
/>,
|
||||
);
|
||||
}
|
||||
if (druidFilters.length) {
|
||||
datasourceFilters.push(
|
||||
<ControlRow
|
||||
key="druid-filters"
|
||||
controls={druidFilters.map(control => (
|
||||
<Control {...this.getControlData(control)} />
|
||||
))}
|
||||
/>,
|
||||
);
|
||||
}
|
||||
return datasourceFilters;
|
||||
}
|
||||
|
||||
|
|
|
@ -41,8 +41,6 @@ export default function transformProps(chartProps: FilterBoxChartProps) {
|
|||
sliceId,
|
||||
dateFilter,
|
||||
instantFiltering,
|
||||
showDruidTimeGranularity,
|
||||
showDruidTimeOrigin,
|
||||
showSqlaTimeColumn,
|
||||
showSqlaTimeGranularity,
|
||||
} = formData;
|
||||
|
@ -68,8 +66,6 @@ export default function transformProps(chartProps: FilterBoxChartProps) {
|
|||
onFilterMenuClose,
|
||||
origSelectedValues: initialValues || {},
|
||||
showDateFilter: dateFilter,
|
||||
showDruidTimeGrain: showDruidTimeGranularity,
|
||||
showDruidTimeOrigin,
|
||||
showSqlaTimeColumn,
|
||||
showSqlaTimeGrain: showSqlaTimeGranularity,
|
||||
// the original form data, needed for async select options
|
||||
|
|
|
@ -34,7 +34,6 @@ from flask_babel import gettext as __
|
|||
|
||||
from superset.common.chart_data import ChartDataResultFormat
|
||||
from superset.utils.core import (
|
||||
DTTM_ALIAS,
|
||||
extract_dataframe_dtypes,
|
||||
get_column_names,
|
||||
get_metric_names,
|
||||
|
@ -230,8 +229,6 @@ def pivot_table_v2(
|
|||
Pivot table v2.
|
||||
"""
|
||||
verbose_map = datasource.data["verbose_map"] if datasource else None
|
||||
if form_data.get("granularity_sqla") == "all" and DTTM_ALIAS in df:
|
||||
del df[DTTM_ALIAS]
|
||||
|
||||
return pivot_df(
|
||||
df,
|
||||
|
|
|
@ -981,14 +981,6 @@ class ChartDataExtrasSchema(Schema):
|
|||
"AND operator."
|
||||
},
|
||||
)
|
||||
having_druid = fields.List(
|
||||
fields.Nested(ChartDataFilterSchema),
|
||||
metadata={
|
||||
"description": "HAVING filters to be added to legacy Druid datasource "
|
||||
"queries. This field is deprecated",
|
||||
"deprecated": True,
|
||||
},
|
||||
)
|
||||
time_grain_sqla = fields.String(
|
||||
metadata={
|
||||
"description": "To what level of granularity should the temporal column be "
|
||||
|
@ -1159,10 +1151,7 @@ class ChartDataQueryObjectSchema(Schema):
|
|||
)
|
||||
filters = fields.List(fields.Nested(ChartDataFilterSchema), allow_none=True)
|
||||
granularity = fields.String(
|
||||
metadata={
|
||||
"description": "Name of temporal column used for time filtering. "
|
||||
"For legacy Druid datasources this defines the time grain."
|
||||
},
|
||||
metadata={"description": "Name of temporal column used for time filtering. "},
|
||||
allow_none=True,
|
||||
)
|
||||
granularity_sqla = fields.String(
|
||||
|
@ -1341,26 +1330,6 @@ class ChartDataQueryObjectSchema(Schema):
|
|||
},
|
||||
allow_none=True,
|
||||
)
|
||||
having_filters = fields.List(
|
||||
fields.Nested(ChartDataFilterSchema),
|
||||
metadata={
|
||||
"description": "HAVING filters to be added to legacy Druid datasource "
|
||||
"queries. This field is deprecated and should be passed to `extras` "
|
||||
"as `having_druid`.",
|
||||
"deprecated": True,
|
||||
},
|
||||
allow_none=True,
|
||||
)
|
||||
druid_time_origin = fields.String(
|
||||
metadata={
|
||||
"description": "Starting point for time grain counting on legacy Druid "
|
||||
"datasources. Used to change e.g. Monday/Sunday first-day-of-week. "
|
||||
"This field is deprecated and should be passed to `extras` "
|
||||
"as `druid_time_origin`.",
|
||||
"deprecated": True,
|
||||
},
|
||||
allow_none=True,
|
||||
)
|
||||
url_params = fields.Dict(
|
||||
metadata={
|
||||
"description": "Optional query parameters passed to a dashboard or Explore "
|
||||
|
|
|
@ -77,8 +77,7 @@ DEPRECATED_EXTRAS_FIELDS = (
|
|||
|
||||
class QueryObject: # pylint: disable=too-many-instance-attributes
|
||||
"""
|
||||
The query object's schema matches the interfaces of DB connectors like sqla
|
||||
and druid. The query objects are constructed on the client.
|
||||
The query objects are constructed on the client.
|
||||
"""
|
||||
|
||||
annotation_layers: list[dict[str, Any]]
|
||||
|
|
|
@ -38,7 +38,6 @@ from typing import Any, Callable, Literal, TYPE_CHECKING, TypedDict
|
|||
import pkg_resources
|
||||
from cachelib.base import BaseCache
|
||||
from celery.schedules import crontab
|
||||
from dateutil import tz
|
||||
from flask import Blueprint
|
||||
from flask_appbuilder.security.manager import AUTH_DB
|
||||
from pandas import Series
|
||||
|
@ -288,17 +287,6 @@ LOGO_RIGHT_TEXT: Callable[[], str] | str = ""
|
|||
# ex: http://localhost:8080/swagger/v1
|
||||
FAB_API_SWAGGER_UI = True
|
||||
|
||||
# Druid query timezone
|
||||
# tz.tzutc() : Using utc timezone
|
||||
# tz.tzlocal() : Using local timezone
|
||||
# tz.gettz('Asia/Shanghai') : Using the time zone with specific name
|
||||
# [TimeZone List]
|
||||
# See: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
# other tz can be overridden by providing a local_config
|
||||
DRUID_TZ = tz.tzutc()
|
||||
DRUID_ANALYSIS_TYPES = ["cardinality"]
|
||||
|
||||
|
||||
# ----------------------------------------------------
|
||||
# AUTHENTICATION CONFIG
|
||||
# ----------------------------------------------------
|
||||
|
|
|
@ -167,7 +167,6 @@ EXTRA_FORM_DATA_APPEND_KEYS = {
|
|||
}
|
||||
|
||||
EXTRA_FORM_DATA_OVERRIDE_REGULAR_MAPPINGS = {
|
||||
"granularity": "granularity",
|
||||
"granularity_sqla": "granularity",
|
||||
"time_column": "time_column",
|
||||
"time_grain": "time_grain",
|
||||
|
|
|
@ -143,7 +143,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
|||
SecurityManager
|
||||
):
|
||||
userstatschartview = None
|
||||
READ_ONLY_MODEL_VIEWS = {"Database", "DruidClusterModelView", "DynamicPlugin"}
|
||||
READ_ONLY_MODEL_VIEWS = {"Database", "DynamicPlugin"}
|
||||
|
||||
USER_MODEL_VIEWS = {
|
||||
"RegisterUserModelView",
|
||||
|
@ -169,7 +169,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
|||
"Log",
|
||||
"List Users",
|
||||
"List Roles",
|
||||
"Refresh Druid Metadata",
|
||||
"ResetPasswordView",
|
||||
"RoleModelView",
|
||||
"Row Level Security",
|
||||
|
@ -196,7 +195,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
|||
}
|
||||
|
||||
ADMIN_ONLY_PERMISSIONS = {
|
||||
"can_sync_druid_source",
|
||||
"can_override_role_permissions",
|
||||
"can_approve",
|
||||
"can_update_role",
|
||||
|
@ -375,8 +373,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
|||
"""
|
||||
Return True if the user can fully access the Superset database, False otherwise.
|
||||
|
||||
Note for Druid the database is akin to the Druid cluster.
|
||||
|
||||
:param database: The Superset database
|
||||
:returns: Whether the user can fully access the Superset database
|
||||
"""
|
||||
|
@ -392,9 +388,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
|
|||
Return True if the user can fully access the schema associated with the Superset
|
||||
datasource, False otherwise.
|
||||
|
||||
Note for Druid datasources the database and schema are akin to the Druid cluster
|
||||
and datasource name prefix respectively, i.e., [schema.]datasource.
|
||||
|
||||
:param datasource: The Superset datasource
|
||||
:returns: Whether the user can fully access the datasource's schema
|
||||
"""
|
||||
|
|
|
@ -102,11 +102,6 @@ from superset.utils.date_parser import parse_human_timedelta
|
|||
from superset.utils.dates import datetime_to_epoch, EPOCH
|
||||
from superset.utils.hashing import md5_sha_from_dict, md5_sha_from_str
|
||||
|
||||
try:
|
||||
from pydruid.utils.having import Having
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from superset.connectors.base.models import BaseColumn, BaseDatasource
|
||||
from superset.models.sql_lab import Query
|
||||
|
@ -213,7 +208,6 @@ class QueryObjectFilterClause(TypedDict, total=False):
|
|||
|
||||
|
||||
class ExtraFiltersTimeColumnType(str, Enum):
|
||||
GRANULARITY = "__granularity"
|
||||
TIME_COL = "__time_col"
|
||||
TIME_GRAIN = "__time_grain"
|
||||
TIME_ORIGIN = "__time_origin"
|
||||
|
@ -360,25 +354,6 @@ class ColumnSpec(NamedTuple):
|
|||
python_date_format: str | None = None
|
||||
|
||||
|
||||
try:
|
||||
# Having might not have been imported.
|
||||
class DimSelector(Having):
|
||||
def __init__(self, **args: Any) -> None:
|
||||
# Just a hack to prevent any exceptions
|
||||
Having.__init__(self, type="equalTo", aggregation=None, value=None)
|
||||
|
||||
self.having = {
|
||||
"having": {
|
||||
"type": "dimSelector",
|
||||
"dimension": args["dimension"],
|
||||
"value": args["value"],
|
||||
}
|
||||
}
|
||||
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
|
||||
def flasher(msg: str, severity: str = "message") -> None:
|
||||
"""Flask's flash if available, logging call if not"""
|
||||
try:
|
||||
|
@ -1144,11 +1119,7 @@ def merge_extra_form_data(form_data: dict[str, Any]) -> None:
|
|||
for fltr in append_filters
|
||||
if fltr
|
||||
)
|
||||
if (
|
||||
form_data.get("time_range")
|
||||
and not form_data.get("granularity")
|
||||
and not form_data.get("granularity_sqla")
|
||||
):
|
||||
if form_data.get("time_range") and not form_data.get("granularity_sqla"):
|
||||
for adhoc_filter in form_data.get("adhoc_filters", []):
|
||||
if adhoc_filter.get("operator") == "TEMPORAL_RANGE":
|
||||
adhoc_filter["comparator"] = form_data["time_range"]
|
||||
|
@ -1172,7 +1143,6 @@ def merge_extra_filters(form_data: dict[str, Any]) -> None:
|
|||
"__time_range": "time_range",
|
||||
"__time_col": "granularity_sqla",
|
||||
"__time_grain": "time_grain_sqla",
|
||||
"__granularity": "granularity",
|
||||
}
|
||||
|
||||
# Grab list of existing filters 'keyed' on the column and operator
|
||||
|
@ -1394,21 +1364,22 @@ def ensure_path_exists(path: str) -> None:
|
|||
def convert_legacy_filters_into_adhoc( # pylint: disable=invalid-name
|
||||
form_data: FormData,
|
||||
) -> None:
|
||||
mapping = {"having": "having_filters", "where": "filters"}
|
||||
|
||||
if not form_data.get("adhoc_filters"):
|
||||
adhoc_filters: list[AdhocFilterClause] = []
|
||||
form_data["adhoc_filters"] = adhoc_filters
|
||||
|
||||
for clause, filters in mapping.items():
|
||||
for clause in ("having", "where"):
|
||||
if clause in form_data and form_data[clause] != "":
|
||||
adhoc_filters.append(form_data_to_adhoc(form_data, clause))
|
||||
|
||||
if filters in form_data:
|
||||
for filt in filter(lambda x: x is not None, form_data[filters]):
|
||||
adhoc_filters.append(simple_filter_to_adhoc(filt, clause))
|
||||
if "filters" in form_data:
|
||||
adhoc_filters.extend(
|
||||
simple_filter_to_adhoc(fltr, "where")
|
||||
for fltr in form_data["filters"]
|
||||
if fltr is not None
|
||||
)
|
||||
|
||||
for key in ("filters", "having", "having_filters", "where"):
|
||||
for key in ("filters", "having", "where"):
|
||||
if key in form_data:
|
||||
del form_data[key]
|
||||
|
||||
|
@ -1417,15 +1388,13 @@ def split_adhoc_filters_into_base_filters( # pylint: disable=invalid-name
|
|||
form_data: FormData,
|
||||
) -> None:
|
||||
"""
|
||||
Mutates form data to restructure the adhoc filters in the form of the four base
|
||||
filters, `where`, `having`, `filters`, and `having_filters` which represent
|
||||
free form where sql, free form having sql, structured where clauses and structured
|
||||
having clauses.
|
||||
Mutates form data to restructure the adhoc filters in the form of the three base
|
||||
filters, `where`, `having`, and `filters` which represent free form where sql,
|
||||
free form having sql, and structured where clauses.
|
||||
"""
|
||||
adhoc_filters = form_data.get("adhoc_filters")
|
||||
if isinstance(adhoc_filters, list):
|
||||
simple_where_filters = []
|
||||
simple_having_filters = []
|
||||
sql_where_filters = []
|
||||
sql_having_filters = []
|
||||
for adhoc_filter in adhoc_filters:
|
||||
|
@ -1440,14 +1409,6 @@ def split_adhoc_filters_into_base_filters( # pylint: disable=invalid-name
|
|||
"val": adhoc_filter.get("comparator"),
|
||||
}
|
||||
)
|
||||
elif clause == "HAVING":
|
||||
simple_having_filters.append(
|
||||
{
|
||||
"col": adhoc_filter.get("subject"),
|
||||
"op": adhoc_filter.get("operator"),
|
||||
"val": adhoc_filter.get("comparator"),
|
||||
}
|
||||
)
|
||||
elif expression_type == "SQL":
|
||||
sql_expression = adhoc_filter.get("sqlExpression")
|
||||
sql_expression = sanitize_clause(sql_expression)
|
||||
|
@ -1457,7 +1418,6 @@ def split_adhoc_filters_into_base_filters( # pylint: disable=invalid-name
|
|||
sql_having_filters.append(sql_expression)
|
||||
form_data["where"] = " AND ".join([f"({sql})" for sql in sql_where_filters])
|
||||
form_data["having"] = " AND ".join([f"({sql})" for sql in sql_having_filters])
|
||||
form_data["having_filters"] = simple_having_filters
|
||||
form_data["filters"] = simple_where_filters
|
||||
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ def export_schema_to_dict(back_references: bool) -> dict[str, Any]:
|
|||
def export_to_dict(
|
||||
session: Session, recursive: bool, back_references: bool, include_defaults: bool
|
||||
) -> dict[str, Any]:
|
||||
"""Exports databases and druid clusters to a dictionary"""
|
||||
"""Exports databases to a dictionary"""
|
||||
logger.info("Starting export")
|
||||
dbs = session.query(Database)
|
||||
databases = [
|
||||
|
|
|
@ -238,7 +238,7 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
|
|||
{
|
||||
'role_name': '{role_name}',
|
||||
'database': [{
|
||||
'datasource_type': '{table|druid}',
|
||||
'datasource_type': '{table}',
|
||||
'name': '{database_name}',
|
||||
'schema': [{
|
||||
'name': '{schema_name}',
|
||||
|
|
|
@ -254,7 +254,7 @@ def get_datasource_info(
|
|||
This function allows supporting both without duplicating code
|
||||
|
||||
:param datasource_id: The datasource ID
|
||||
:param datasource_type: The datasource type, i.e., 'druid' or 'table'
|
||||
:param datasource_type: The datasource type
|
||||
:param form_data: The URL form data
|
||||
:returns: The datasource ID and type
|
||||
:raises SupersetException: If the datasource no longer exists
|
||||
|
@ -468,7 +468,7 @@ def check_datasource_perms(
|
|||
This function takes `self` since it must have the same signature as the
|
||||
the decorated method.
|
||||
|
||||
:param datasource_type: The datasource type, i.e., 'druid' or 'table'
|
||||
:param datasource_type: The datasource type
|
||||
:param datasource_id: The datasource ID
|
||||
:raises SupersetSecurityException: If the user cannot access the resource
|
||||
"""
|
||||
|
|
|
@ -359,9 +359,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
|
|||
del groupby[groupby_labels.index(DTTM_ALIAS)]
|
||||
is_timeseries = True
|
||||
|
||||
granularity = self.form_data.get("granularity") or self.form_data.get(
|
||||
"granularity_sqla"
|
||||
)
|
||||
granularity = self.form_data.get("granularity_sqla")
|
||||
limit = int(self.form_data.get("limit") or 0)
|
||||
timeseries_limit_metric = self.form_data.get("timeseries_limit_metric")
|
||||
|
||||
|
@ -772,12 +770,8 @@ class TableViz(BaseViz):
|
|||
@deprecated(deprecated_in="3.0")
|
||||
def should_be_timeseries(self) -> bool:
|
||||
# TODO handle datasource-type-specific code in datasource
|
||||
conditions_met = (
|
||||
self.form_data.get("granularity")
|
||||
and self.form_data.get("granularity") != "all"
|
||||
) or (
|
||||
self.form_data.get("granularity_sqla")
|
||||
and self.form_data.get("time_grain_sqla")
|
||||
conditions_met = self.form_data.get("granularity_sqla") and self.form_data.get(
|
||||
"time_grain_sqla"
|
||||
)
|
||||
if self.form_data.get("include_time") and not conditions_met:
|
||||
raise QueryObjectValidationError(
|
||||
|
@ -981,11 +975,9 @@ class CalHeatmapViz(BaseViz):
|
|||
"month": "P1M",
|
||||
"year": "P1Y",
|
||||
}
|
||||
time_grain = mapping[self.form_data.get("subdomain_granularity", "min")]
|
||||
if self.datasource.type == "druid":
|
||||
query_obj["granularity"] = time_grain
|
||||
else:
|
||||
query_obj["extras"]["time_grain_sqla"] = time_grain
|
||||
query_obj["extras"]["time_grain_sqla"] = mapping[
|
||||
self.form_data.get("subdomain_granularity", "min")
|
||||
]
|
||||
return query_obj
|
||||
|
||||
|
||||
|
@ -1231,11 +1223,6 @@ class NVD3TimeSeriesViz(NVD3Viz):
|
|||
|
||||
@deprecated(deprecated_in="3.0")
|
||||
def process_data(self, df: pd.DataFrame, aggregate: bool = False) -> VizData:
|
||||
if self.form_data.get("granularity") == "all":
|
||||
raise QueryObjectValidationError(
|
||||
_("Pick a time granularity for your time series")
|
||||
)
|
||||
|
||||
if df.empty:
|
||||
return df
|
||||
|
||||
|
@ -2398,9 +2385,7 @@ class DeckScatterViz(BaseDeckGLViz):
|
|||
@deprecated(deprecated_in="3.0")
|
||||
def query_obj(self) -> QueryObjectDict:
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self.is_timeseries = bool(
|
||||
self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
|
||||
)
|
||||
self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
|
||||
self.point_radius_fixed = self.form_data.get("point_radius_fixed") or {
|
||||
"type": "fix",
|
||||
"value": 500,
|
||||
|
@ -2453,9 +2438,7 @@ class DeckScreengrid(BaseDeckGLViz):
|
|||
|
||||
@deprecated(deprecated_in="3.0")
|
||||
def query_obj(self) -> QueryObjectDict:
|
||||
self.is_timeseries = bool(
|
||||
self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
|
||||
)
|
||||
self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
|
||||
return super().query_obj()
|
||||
|
||||
@deprecated(deprecated_in="3.0")
|
||||
|
@ -2526,9 +2509,7 @@ class DeckPathViz(BaseDeckGLViz):
|
|||
@deprecated(deprecated_in="3.0")
|
||||
def query_obj(self) -> QueryObjectDict:
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
self.is_timeseries = bool(
|
||||
self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
|
||||
)
|
||||
self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
|
||||
query_obj = super().query_obj()
|
||||
self.metric = self.form_data.get("metric")
|
||||
line_col = self.form_data.get("line_column")
|
||||
|
@ -2675,9 +2656,7 @@ class DeckArc(BaseDeckGLViz):
|
|||
|
||||
@deprecated(deprecated_in="3.0")
|
||||
def query_obj(self) -> QueryObjectDict:
|
||||
self.is_timeseries = bool(
|
||||
self.form_data.get("time_grain_sqla") or self.form_data.get("granularity")
|
||||
)
|
||||
self.is_timeseries = bool(self.form_data.get("time_grain_sqla"))
|
||||
return super().query_obj()
|
||||
|
||||
@deprecated(deprecated_in="3.0")
|
||||
|
|
|
@ -22,7 +22,7 @@ from superset.common.chart_data import ChartDataResultType
|
|||
from superset.utils.core import AnnotationType, DTTM_ALIAS
|
||||
|
||||
query_birth_names = {
|
||||
"extras": {"where": "", "time_grain_sqla": "P1D", "having_druid": []},
|
||||
"extras": {"where": "", "time_grain_sqla": "P1D"},
|
||||
"columns": ["name"],
|
||||
"metrics": [{"label": "sum__num"}],
|
||||
"orderby": [("sum__num", False)],
|
||||
|
@ -38,7 +38,6 @@ query_birth_names = {
|
|||
{"col": "name", "op": "NOT IN", "val": ["<NULL>", '"abc"']},
|
||||
],
|
||||
"having": "",
|
||||
"having_filters": [],
|
||||
"where": "",
|
||||
}
|
||||
|
||||
|
|
|
@ -67,11 +67,6 @@ ROLE_ALL_PERM_DATA = {
|
|||
"name": "examples",
|
||||
"schema": [{"name": "", "datasources": ["birth_names"]}],
|
||||
},
|
||||
{
|
||||
"datasource_type": "druid",
|
||||
"name": "druid_test",
|
||||
"schema": [{"name": "", "datasources": ["druid_ds_1", "druid_ds_2"]}],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
|
|
@ -959,7 +959,6 @@ class TestGetChartDataApi(BaseTestChartDataApi):
|
|||
"filters": [],
|
||||
"extras": {
|
||||
"having": "",
|
||||
"having_druid": [],
|
||||
"where": "",
|
||||
},
|
||||
"applied_time_extras": {},
|
||||
|
|
|
@ -188,6 +188,6 @@ def _get_energy_slices():
|
|||
"xscale_interval": "1",
|
||||
"yscale_interval": "1",
|
||||
},
|
||||
"query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","having_druid":[],"where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
|
||||
"query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
|
||||
},
|
||||
]
|
||||
|
|
|
@ -550,7 +550,7 @@ chart_config: dict[str, Any] = {
|
|||
},
|
||||
"viz_type": "deck_path",
|
||||
},
|
||||
"query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","having_druid":[],"where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
|
||||
"query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}',
|
||||
"cache_timeout": None,
|
||||
"uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
|
||||
"version": "1.0.0",
|
||||
|
|
|
@ -35,7 +35,6 @@ sample_query_context = {
|
|||
"time_grain_sqla": "P1D",
|
||||
"time_range_endpoints": ["inclusive", "exclusive"],
|
||||
"having": "",
|
||||
"having_druid": [],
|
||||
"where": "",
|
||||
},
|
||||
"applied_time_extras": {},
|
||||
|
@ -93,7 +92,6 @@ sample_query_context = {
|
|||
"time_grain_sqla": "P1D",
|
||||
"time_range_endpoints": ["inclusive", "exclusive"],
|
||||
"having": "",
|
||||
"having_druid": [],
|
||||
"where": "",
|
||||
},
|
||||
"applied_time_extras": {},
|
||||
|
|
|
@ -242,7 +242,6 @@ class TestUtils(SupersetTestCase):
|
|||
{"col": "__time_col", "op": "in", "val": "birth_year"},
|
||||
{"col": "__time_grain", "op": "in", "val": "years"},
|
||||
{"col": "A", "op": "like", "val": "hello"},
|
||||
{"col": "__granularity", "op": "in", "val": "90 seconds"},
|
||||
]
|
||||
}
|
||||
expected = {
|
||||
|
@ -260,12 +259,10 @@ class TestUtils(SupersetTestCase):
|
|||
"time_range": "1 year ago :",
|
||||
"granularity_sqla": "birth_year",
|
||||
"time_grain_sqla": "years",
|
||||
"granularity": "90 seconds",
|
||||
"applied_time_extras": {
|
||||
"__time_range": "1 year ago :",
|
||||
"__time_col": "birth_year",
|
||||
"__time_grain": "years",
|
||||
"__granularity": "90 seconds",
|
||||
},
|
||||
}
|
||||
merge_extra_filters(form_data)
|
||||
|
@ -634,38 +631,6 @@ class TestUtils(SupersetTestCase):
|
|||
convert_legacy_filters_into_adhoc(form_data)
|
||||
self.assertEqual(form_data, expected)
|
||||
|
||||
def test_convert_legacy_filters_into_adhoc_having(self):
|
||||
form_data = {"having": "COUNT(1) = 1"}
|
||||
expected = {
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "HAVING",
|
||||
"expressionType": "SQL",
|
||||
"filterOptionName": "683f1c26466ab912f75a00842e0f2f7b",
|
||||
"sqlExpression": "COUNT(1) = 1",
|
||||
}
|
||||
]
|
||||
}
|
||||
convert_legacy_filters_into_adhoc(form_data)
|
||||
self.assertEqual(form_data, expected)
|
||||
|
||||
def test_convert_legacy_filters_into_adhoc_having_filters(self):
|
||||
form_data = {"having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}]}
|
||||
expected = {
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "HAVING",
|
||||
"comparator": 1,
|
||||
"expressionType": "SIMPLE",
|
||||
"filterOptionName": "967d0fb409f6d9c7a6c03a46cf933c9c",
|
||||
"operator": "==",
|
||||
"subject": "COUNT(1)",
|
||||
}
|
||||
]
|
||||
}
|
||||
convert_legacy_filters_into_adhoc(form_data)
|
||||
self.assertEqual(form_data, expected)
|
||||
|
||||
def test_convert_legacy_filters_into_adhoc_present_and_empty(self):
|
||||
form_data = {"adhoc_filters": [], "where": "a = 1"}
|
||||
expected = {
|
||||
|
@ -681,6 +646,21 @@ class TestUtils(SupersetTestCase):
|
|||
convert_legacy_filters_into_adhoc(form_data)
|
||||
self.assertEqual(form_data, expected)
|
||||
|
||||
def test_convert_legacy_filters_into_adhoc_having(self):
|
||||
form_data = {"having": "COUNT(1) = 1"}
|
||||
expected = {
|
||||
"adhoc_filters": [
|
||||
{
|
||||
"clause": "HAVING",
|
||||
"expressionType": "SQL",
|
||||
"filterOptionName": "683f1c26466ab912f75a00842e0f2f7b",
|
||||
"sqlExpression": "COUNT(1) = 1",
|
||||
}
|
||||
]
|
||||
}
|
||||
convert_legacy_filters_into_adhoc(form_data)
|
||||
self.assertEqual(form_data, expected)
|
||||
|
||||
def test_convert_legacy_filters_into_adhoc_present_and_nonempty(self):
|
||||
form_data = {
|
||||
"adhoc_filters": [
|
||||
|
@ -688,7 +668,6 @@ class TestUtils(SupersetTestCase):
|
|||
],
|
||||
"filters": [{"col": "a", "op": "in", "val": "someval"}],
|
||||
"having": "COUNT(1) = 1",
|
||||
"having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}],
|
||||
}
|
||||
expected = {
|
||||
"adhoc_filters": [
|
||||
|
|
Loading…
Reference in New Issue