mirror of https://github.com/apache/superset.git
Fix for week_start_sunday and week_ending_saturday (#4911)
* Handle locked weeks * Fix spelling * Fix druid * Clean unit tests
This commit is contained in:
parent
9c53323c93
commit
13da5a8742
|
@ -0,0 +1,58 @@
|
|||
import { it, describe } from 'mocha';
|
||||
import { expect } from 'chai';
|
||||
import { getPlaySliderParams } from '../../../src/modules/time';
|
||||
|
||||
describe('getPlaySliderParams', () => {
|
||||
it('is a function', () => {
|
||||
assert.isFunction(getPlaySliderParams);
|
||||
});
|
||||
|
||||
it('handles durations', () => {
|
||||
const timestamps = [
|
||||
new Date('2018-01-01'),
|
||||
new Date('2018-01-02'),
|
||||
new Date('2018-01-03'),
|
||||
new Date('2018-01-04'),
|
||||
new Date('2018-01-05'),
|
||||
new Date('2018-01-06'),
|
||||
new Date('2018-01-07'),
|
||||
new Date('2018-01-08'),
|
||||
new Date('2018-01-09'),
|
||||
new Date('2018-01-10'),
|
||||
].map(d => d.getTime());
|
||||
const { start, end, step, values, disabled } = getPlaySliderParams(timestamps, 'P2D');
|
||||
expect(new Date(start)).to.eql(new Date('2018-01-01'));
|
||||
expect(new Date(end)).to.eql(new Date('2018-01-11'));
|
||||
expect(step).to.equal(2 * 24 * 60 * 60 * 1000);
|
||||
expect(values.map(v => new Date(v))).to.eql([
|
||||
new Date('2018-01-01'),
|
||||
new Date('2018-01-03'),
|
||||
]);
|
||||
expect(disabled).to.equal(false);
|
||||
});
|
||||
|
||||
it('handles intervals', () => {
|
||||
const timestamps = [
|
||||
new Date('2018-01-01'),
|
||||
new Date('2018-01-02'),
|
||||
new Date('2018-01-03'),
|
||||
new Date('2018-01-04'),
|
||||
new Date('2018-01-05'),
|
||||
new Date('2018-01-06'),
|
||||
new Date('2018-01-07'),
|
||||
new Date('2018-01-08'),
|
||||
new Date('2018-01-09'),
|
||||
new Date('2018-01-10'),
|
||||
].map(d => d.getTime());
|
||||
// 1970-01-03 was a Saturday
|
||||
const { start, end, step, values, disabled } = getPlaySliderParams(timestamps, 'P1W/1970-01-03T00:00:00Z');
|
||||
expect(new Date(start)).to.eql(new Date('2017-12-30')); // Saturday
|
||||
expect(new Date(end)).to.eql(new Date('2018-01-13')); // Saturday
|
||||
expect(step).to.equal(7 * 24 * 60 * 60 * 1000);
|
||||
expect(values.map(v => new Date(v))).to.eql([
|
||||
new Date('2017-12-30'),
|
||||
new Date('2018-01-06'),
|
||||
]);
|
||||
expect(disabled).to.equal(false);
|
||||
});
|
||||
});
|
|
@ -776,8 +776,8 @@ export const controls = {
|
|||
['P1D', '1 day'],
|
||||
['P7D', '7 days'],
|
||||
['P1W', 'week'],
|
||||
['P1W', 'week_starting_sunday'],
|
||||
['P1W', 'week_ending_saturday'],
|
||||
['week_starting_sunday', 'week starting Sunday'],
|
||||
['week_ending_saturday', 'week ending Saturday'],
|
||||
['P1M', 'month'],
|
||||
],
|
||||
description: t('The time granularity for the visualization. Note that you ' +
|
||||
|
|
|
@ -4,11 +4,31 @@ import parseIsoDuration from 'parse-iso-duration';
|
|||
export const getPlaySliderParams = function (timestamps, timeGrain) {
|
||||
let start = Math.min(...timestamps);
|
||||
let end = Math.max(...timestamps);
|
||||
let step;
|
||||
|
||||
// lock start and end to the closest steps
|
||||
const step = parseIsoDuration(timeGrain);
|
||||
start -= start % step;
|
||||
end += step - end % step;
|
||||
if (timeGrain.indexOf('/') > 0) {
|
||||
// Here, time grain is a time interval instead of a simple duration, either
|
||||
// `reference/duration` or `duration/reference`. We need to parse the
|
||||
// duration and make sure that start and end are in the right places. For
|
||||
// example, if `reference` is a Saturday and `duration` is 1 week (P1W)
|
||||
// then both start and end should be Saturdays.
|
||||
const parts = timeGrain.split('/', 2);
|
||||
let reference;
|
||||
if (parts[0].endsWith('Z')) { // ISO string
|
||||
reference = new Date(parts[0]).getTime();
|
||||
step = parseIsoDuration(parts[1]);
|
||||
} else {
|
||||
reference = new Date(parts[1]).getTime();
|
||||
step = parseIsoDuration(parts[0]);
|
||||
}
|
||||
start = reference + step * Math.floor((start - reference) / step);
|
||||
end = reference + step * (Math.floor((end - reference) / step) + 1);
|
||||
} else {
|
||||
// lock start and end to the closest steps
|
||||
step = parseIsoDuration(timeGrain);
|
||||
start -= start % step;
|
||||
end += step - end % step;
|
||||
}
|
||||
|
||||
const values = timeGrain != null ? [start, start + step] : [start, end];
|
||||
const disabled = timestamps.every(timestamp => timestamp === null);
|
||||
|
|
|
@ -446,6 +446,12 @@ class SqliteEngineSpec(BaseEngineSpec):
|
|||
Grain('month', _('month'),
|
||||
"DATE({col}, -strftime('%d', {col}) || ' days', '+1 day')",
|
||||
'P1M'),
|
||||
Grain('week_ending_saturday', _('week_ending_saturday'),
|
||||
"DATE({col}, 'weekday 6')",
|
||||
'P1W/1970-01-03T00:00:00Z'),
|
||||
Grain('week_start_sunday', _('week_start_sunday'),
|
||||
"DATE({col}, 'weekday 0', '-7 days')",
|
||||
'1969-12-28T00:00:00Z/P1W'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -577,11 +583,11 @@ class PrestoEngineSpec(BaseEngineSpec):
|
|||
Grain('week_ending_saturday', _('week_ending_saturday'),
|
||||
"date_add('day', 5, date_trunc('week', date_add('day', 1, "
|
||||
'CAST({col} AS TIMESTAMP))))',
|
||||
'P1W'),
|
||||
'P1W/1970-01-03T00:00:00Z'),
|
||||
Grain('week_start_sunday', _('week_start_sunday'),
|
||||
"date_add('day', -1, date_trunc('week', "
|
||||
"date_add('day', 1, CAST({col} AS TIMESTAMP))))",
|
||||
'P1W'),
|
||||
'1969-12-28T00:00:00Z/P1W'),
|
||||
Grain('year', _('year'),
|
||||
"date_trunc('year', CAST({col} AS TIMESTAMP))",
|
||||
'P1Y'),
|
||||
|
@ -1169,11 +1175,11 @@ class AthenaEngineSpec(BaseEngineSpec):
|
|||
Grain('week_ending_saturday', _('week_ending_saturday'),
|
||||
"date_add('day', 5, date_trunc('week', date_add('day', 1, "
|
||||
'CAST({col} AS TIMESTAMP))))',
|
||||
'P1W'),
|
||||
'P1W/1970-01-03T00:00:00Z'),
|
||||
Grain('week_start_sunday', _('week_start_sunday'),
|
||||
"date_add('day', -1, date_trunc('week', "
|
||||
"date_add('day', 1, CAST({col} AS TIMESTAMP))))",
|
||||
'P1W'),
|
||||
'1969-12-28T00:00:00Z/P1W'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
Loading…
Reference in New Issue