SIP-23: Persist SQL Lab state in the backend (#8060)

* Squash all commits from VIZ-689

* Fix javascript

* Fix black

* WIP fixing javascript

* Add feature flag SQLLAB_BACKEND_PERSISTENCE

* Use feature flag

* Small fix

* Fix lint

* Fix setQueryEditorSql

* Improve unit tests

* Add unit tests for backend sync

* Rename results to description in table_schema

* Add integration tests

* Fix black

* Migrate query history

* Handle no results backend

* Small improvement

* Address comments

* Store SQL directly instead of reference to query

* Small fixes

* Fix clone tab

* Fix remove query

* Cascade delete

* Cascade deletes

* Fix tab closing

* Small fixes

* Small fix

* Fix error when deleting tab

* Catch 404 when tab is deleted

* Remove tables from state on tab close

* Add index, autoincrement and cascade

* Prevent duplicate table schemas

* Fix mapStateToProps

* Fix lint

* Fix head

* Fix javascript

* Fix mypy

* Fix isort

* Fix javascript

* Fix merge

* Fix heads

* Fix heads

* Fix displayLimit

* Recreate migration script trying to fix heads

* Fix heads
This commit is contained in:
Beto Dealmeida 2019-11-14 09:44:57 -08:00 committed by GitHub
parent 59bc220602
commit d66bc5ad90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 2814 additions and 347 deletions

File diff suppressed because it is too large Load Diff

View File

@ -39,7 +39,7 @@ describe('TabbedSqlEditors', () => {
'newEditorId',
];
const tables = [Object.assign({}, table[0], {
const tables = [Object.assign({}, table, {
dataPreviewQueryId: 'B1-VQU1zW',
queryEditorId: 'newEditorId',
})];
@ -58,6 +58,7 @@ describe('TabbedSqlEditors', () => {
'B1-VQU1zW': {
id: 'B1-VQU1zW',
sqlEditorId: 'newEditorId',
tableName: 'ab_user',
},
};
const mockedProps = {
@ -133,7 +134,7 @@ describe('TabbedSqlEditors', () => {
});
it('should update queriesArray and dataPreviewQueries', () => {
expect(wrapper.state().queriesArray.slice(-1)[0]).toBe(queries['B1-VQU1zW']);
expect(wrapper.state().dataPreviewQueries.slice(-1)[0]).toBe(queries['B1-VQU1zW']);
expect(wrapper.state().dataPreviewQueries.slice(-1)[0]).toEqual(queries['B1-VQU1zW']);
});
});
it('should rename Tab', () => {
@ -171,16 +172,21 @@ describe('TabbedSqlEditors', () => {
.toBe(queryEditors[0]);
});
it('should handle select', () => {
const mockEvent = {
target: {
getAttribute: () => null,
},
};
wrapper = getWrapper();
sinon.spy(wrapper.instance(), 'newQueryEditor');
sinon.stub(wrapper.instance().props.actions, 'setActiveQueryEditor');
sinon.stub(wrapper.instance().props.actions, 'switchQueryEditor');
wrapper.instance().handleSelect('add_tab');
wrapper.instance().handleSelect('add_tab', mockEvent);
expect(wrapper.instance().newQueryEditor.callCount).toBe(1);
wrapper.instance().handleSelect('123');
expect(wrapper.instance().props.actions.setActiveQueryEditor.getCall(0).args[0].id)
.toContain(123);
// cannot switch to current tab, switchQueryEditor never gets called
wrapper.instance().handleSelect('dfsadfs', mockEvent);
expect(wrapper.instance().props.actions.switchQueryEditor.callCount).toEqual(0);
wrapper.instance().newQueryEditor.restore();
});
it('should render', () => {

View File

@ -19,12 +19,29 @@
/* eslint no-unused-expressions: 0 */
import sinon from 'sinon';
import fetchMock from 'fetch-mock';
import configureMockStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import shortid from 'shortid';
import * as featureFlags from 'src/featureFlags';
import * as actions from '../../../../src/SqlLab/actions/sqlLab';
import { query } from '../fixtures';
import { defaultQueryEditor, query } from '../fixtures';
const middlewares = [thunk];
const mockStore = configureMockStore(middlewares);
describe('async actions', () => {
const mockBigNumber = '9223372036854775807';
const queryEditor = {
id: 'abcd',
autorun: false,
dbId: null,
latestQueryId: null,
selectedText: null,
sql: 'SELECT *\nFROM\nWHERE',
title: 'Untitled Query',
schemaOptions: [{ value: 'main', label: 'main', title: 'main' }],
};
let dispatch;
@ -34,23 +51,31 @@ describe('async actions', () => {
afterEach(fetchMock.resetHistory);
const fetchQueryEndpoint = 'glob:*/superset/results/*';
fetchMock.get(
fetchQueryEndpoint,
JSON.stringify({ data: mockBigNumber, query: { sqlEditorId: 'dfsadfs' } }),
);
const runQueryEndpoint = 'glob:*/superset/sql_json/*';
fetchMock.post(runQueryEndpoint, '{ "data": ' + mockBigNumber + ' }');
describe('saveQuery', () => {
const saveQueryEndpoint = 'glob:*/savedqueryviewapi/api/create';
fetchMock.post(saveQueryEndpoint, 'ok');
it('posts to the correct url', () => {
expect.assertions(1);
const thunk = actions.saveQuery(query);
return thunk((/* mockDispatch */) => ({})).then(() => {
const store = mockStore({});
return store.dispatch(actions.saveQuery(query)).then(() => {
expect(fetchMock.calls(saveQueryEndpoint)).toHaveLength(1);
});
});
it('posts the correct query object', () => {
const thunk = actions.saveQuery(query);
return thunk((/* mockDispatch */) => ({})).then(() => {
const store = mockStore({});
return store.dispatch(actions.saveQuery(query)).then(() => {
const call = fetchMock.calls(saveQueryEndpoint)[0];
const formData = call[1].body;
Object.keys(query).forEach((key) => {
@ -61,12 +86,9 @@ describe('async actions', () => {
});
describe('fetchQueryResults', () => {
const fetchQueryEndpoint = 'glob:*/superset/results/*';
fetchMock.get(fetchQueryEndpoint, '{ "data": ' + mockBigNumber + ' }');
const makeRequest = () => {
const actionThunk = actions.fetchQueryResults(query);
return actionThunk(dispatch);
const request = actions.fetchQueryResults(query);
return request(dispatch);
};
it('makes the fetch request', () => {
@ -92,31 +114,40 @@ describe('async actions', () => {
expect(dispatch.getCall(1).lastArg.results.data.toString()).toBe(mockBigNumber);
}));
it('calls querySuccess on fetch success', () =>
makeRequest().then(() => {
expect(dispatch.callCount).toBe(2);
expect(dispatch.getCall(1).args[0].type).toBe(actions.QUERY_SUCCESS);
}));
it('calls querySuccess on fetch success', () => {
expect.assertions(1);
const store = mockStore({});
const expectedActionTypes = [
actions.REQUEST_QUERY_RESULTS,
actions.QUERY_SUCCESS,
];
return store.dispatch(actions.fetchQueryResults(query)).then(() => {
expect(store.getActions().map(a => a.type)).toEqual(expectedActionTypes);
});
});
it('calls queryFailed on fetch error', () => {
expect.assertions(2);
expect.assertions(1);
fetchMock.get(
fetchQueryEndpoint,
{ throws: { error: 'error text' } },
{ overwriteRoutes: true },
);
return makeRequest().then(() => {
expect(dispatch.callCount).toBe(2);
expect(dispatch.getCall(1).args[0].type).toBe(actions.QUERY_FAILED);
const store = mockStore({});
const expectedActionTypes = [
actions.REQUEST_QUERY_RESULTS,
actions.QUERY_FAILED,
];
return store.dispatch(actions.fetchQueryResults(query)).then(() => {
expect(store.getActions().map(a => a.type)).toEqual(expectedActionTypes);
});
});
});
describe('runQuery', () => {
const runQueryEndpoint = 'glob:*/superset/sql_json/';
fetchMock.post(runQueryEndpoint, '{ "data": ' + mockBigNumber + ' }');
const makeRequest = () => {
const request = actions.runQuery(query);
return request(dispatch);
@ -146,17 +177,20 @@ describe('async actions', () => {
}));
it('calls querySuccess on fetch success', () => {
expect.assertions(3);
expect.assertions(1);
return makeRequest().then(() => {
expect(dispatch.callCount).toBe(2);
expect(dispatch.getCall(0).args[0].type).toBe(actions.START_QUERY);
expect(dispatch.getCall(1).args[0].type).toBe(actions.QUERY_SUCCESS);
const store = mockStore({});
const expectedActionTypes = [
actions.START_QUERY,
actions.QUERY_SUCCESS,
];
return store.dispatch(actions.runQuery(query)).then(() => {
expect(store.getActions().map(a => a.type)).toEqual(expectedActionTypes);
});
});
it('calls queryFailed on fetch error', () => {
expect.assertions(2);
expect.assertions(1);
fetchMock.post(
runQueryEndpoint,
@ -164,9 +198,13 @@ describe('async actions', () => {
{ overwriteRoutes: true },
);
return makeRequest().then(() => {
expect(dispatch.callCount).toBe(2);
expect(dispatch.getCall(1).args[0].type).toBe(actions.QUERY_FAILED);
const store = mockStore({});
const expectedActionTypes = [
actions.START_QUERY,
actions.QUERY_FAILED,
];
return store.dispatch(actions.runQuery(query)).then(() => {
expect(store.getActions().map(a => a.type)).toEqual(expectedActionTypes);
});
});
});
@ -206,4 +244,516 @@ describe('async actions', () => {
});
});
});
describe('cloneQueryToNewTab', () => {
let stub;
beforeEach(() => {
stub = sinon.stub(shortid, 'generate').returns('abcd');
});
afterEach(() => {
stub.restore();
});
it('creates new query editor', () => {
expect.assertions(1);
const id = 'id';
const state = {
sqlLab: {
tabHistory: [id],
queryEditors: [{ id, title: 'Dummy query editor' }],
},
};
const store = mockStore(state);
const expectedActions = [{
type: actions.ADD_QUERY_EDITOR,
queryEditor: {
title: 'Copy of Dummy query editor',
dbId: 1,
schema: null,
autorun: true,
sql: 'SELECT * FROM something',
queryLimit: undefined,
maxRow: undefined,
id: 'abcd',
},
}];
return store.dispatch(actions.cloneQueryToNewTab(query)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
describe('addQueryEditor', () => {
let stub;
beforeEach(() => {
stub = sinon.stub(shortid, 'generate').returns('abcd');
});
afterEach(() => {
stub.restore();
});
it('creates new query editor', () => {
expect.assertions(1);
const store = mockStore({});
const expectedActions = [{
type: actions.ADD_QUERY_EDITOR,
queryEditor,
}];
return store.dispatch(actions.addQueryEditor(defaultQueryEditor)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
});
});
});
describe('backend sync', () => {
const updateTabStateEndpoint = 'glob:*/tabstateview/*';
fetchMock.put(updateTabStateEndpoint, {});
fetchMock.delete(updateTabStateEndpoint, {});
fetchMock.post(updateTabStateEndpoint, JSON.stringify({ id: 1 }));
const updateTableSchemaEndpoint = 'glob:*/tableschemaview/*';
fetchMock.put(updateTableSchemaEndpoint, {});
fetchMock.delete(updateTableSchemaEndpoint, {});
fetchMock.post(updateTableSchemaEndpoint, JSON.stringify({ id: 1 }));
const getTableMetadataEndpoint = 'glob:*/superset/table/*';
fetchMock.get(getTableMetadataEndpoint, {});
const getExtraTableMetadataEndpoint = 'glob:*/superset/extra_table_metadata/*';
fetchMock.get(getExtraTableMetadataEndpoint, {});
let isFeatureEnabledMock;
beforeAll(() => {
isFeatureEnabledMock = jest.spyOn(featureFlags, 'isFeatureEnabled')
.mockImplementation(feature => feature === 'SQLLAB_BACKEND_PERSISTENCE');
});
afterAll(() => {
isFeatureEnabledMock.mockRestore();
});
afterEach(fetchMock.resetHistory);
describe('querySuccess', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const store = mockStore({});
const results = { query: { sqlEditorId: 'abcd' } };
const expectedActions = [
{
type: actions.QUERY_SUCCESS,
query,
results,
},
];
return store.dispatch(actions.querySuccess(query, results)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('fetchQueryResults', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const results = {
data: mockBigNumber,
query: { sqlEditorId: 'abcd' },
query_id: 'efgh',
};
fetchMock.get(
fetchQueryEndpoint,
JSON.stringify(results),
{ overwriteRoutes: true },
);
const store = mockStore({});
const expectedActions = [
{
type: actions.REQUEST_QUERY_RESULTS,
query,
},
// missing below
{
type: actions.QUERY_SUCCESS,
query,
results,
},
];
return store.dispatch(actions.fetchQueryResults(query)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('addQueryEditor', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const store = mockStore({});
const expectedActions = [
{
type: actions.ADD_QUERY_EDITOR,
queryEditor: { ...queryEditor, id: '1' },
},
];
return store.dispatch(actions.addQueryEditor(queryEditor)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('setActiveQueryEditor', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const store = mockStore({});
const expectedActions = [
{
type: actions.SET_ACTIVE_QUERY_EDITOR,
queryEditor,
},
];
return store.dispatch(actions.setActiveQueryEditor(queryEditor)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('removeQueryEditor', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const store = mockStore({});
const expectedActions = [
{
type: actions.REMOVE_QUERY_EDITOR,
queryEditor,
},
];
return store.dispatch(actions.removeQueryEditor(queryEditor)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetDb', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const dbId = 42;
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SETDB,
queryEditor,
dbId,
},
];
return store.dispatch(actions.queryEditorSetDb(queryEditor, dbId)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetSchema', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const schema = 'schema';
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_SCHEMA,
queryEditor,
schema,
},
];
return store.dispatch(actions.queryEditorSetSchema(queryEditor, schema)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetAutorun', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const autorun = true;
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_AUTORUN,
queryEditor,
autorun,
},
];
return store.dispatch(actions.queryEditorSetAutorun(queryEditor, autorun)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetTitle', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const title = 'title';
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_TITLE,
queryEditor,
title,
},
];
return store.dispatch(actions.queryEditorSetTitle(queryEditor, title)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetSql', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const sql = 'SELECT * ';
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_SQL,
queryEditor,
sql,
},
];
return store.dispatch(actions.queryEditorSetSql(queryEditor, sql)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetQueryLimit', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const queryLimit = 10;
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_QUERY_LIMIT,
queryEditor,
queryLimit,
},
];
return store.dispatch(
actions.queryEditorSetQueryLimit(queryEditor, queryLimit))
.then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('queryEditorSetTemplateParams', () => {
it('updates the tab state in the backend', () => {
expect.assertions(2);
const templateParams = '{"foo": "bar"}';
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_TEMPLATE_PARAMS,
queryEditor,
templateParams,
},
];
return store.dispatch(
actions.queryEditorSetTemplateParams(queryEditor, templateParams))
.then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(1);
});
});
});
describe('addTable', () => {
it('updates the table schema state in the backend', () => {
expect.assertions(5);
const results = {
data: mockBigNumber,
query: { sqlEditorId: 'null' },
query_id: 'efgh',
};
fetchMock.post(
runQueryEndpoint,
JSON.stringify(results),
{ overwriteRoutes: true },
);
const tableName = 'table';
const schemaName = 'schema';
const store = mockStore({});
const expectedActionTypes = [
actions.MERGE_TABLE, // addTable
actions.MERGE_TABLE, // getTableMetadata
actions.START_QUERY, // runQuery (data preview)
actions.MERGE_TABLE, // getTableExtendedMetadata
actions.QUERY_SUCCESS, // querySuccess
actions.MERGE_TABLE, // addTable
];
return store.dispatch(
actions.addTable(query, tableName, schemaName))
.then(() => {
expect(store.getActions().map(a => a.type)).toEqual(expectedActionTypes);
expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(1);
expect(fetchMock.calls(getTableMetadataEndpoint)).toHaveLength(1);
expect(fetchMock.calls(getExtraTableMetadataEndpoint)).toHaveLength(1);
// tab state is not updated, since the query is a data preview
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(0);
});
});
});
describe('expandTable', () => {
it('updates the table schema state in the backend', () => {
expect.assertions(2);
const table = { id: 1 };
const store = mockStore({});
const expectedActions = [
{
type: actions.EXPAND_TABLE,
table,
},
];
return store.dispatch(actions.expandTable(table)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(1);
});
});
});
describe('collapseTable', () => {
it('updates the table schema state in the backend', () => {
expect.assertions(2);
const table = { id: 1 };
const store = mockStore({});
const expectedActions = [
{
type: actions.COLLAPSE_TABLE,
table,
},
];
return store.dispatch(actions.collapseTable(table)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(1);
});
});
});
describe('removeTable', () => {
it('updates the table schema state in the backend', () => {
expect.assertions(2);
const table = { id: 1 };
const store = mockStore({});
const expectedActions = [
{
type: actions.REMOVE_TABLE,
table,
},
];
return store.dispatch(actions.removeTable(table)).then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(1);
});
});
});
describe('migrateQueryEditorFromLocalStorage', () => {
it('updates the tab state in the backend', () => {
expect.assertions(3);
const results = {
data: mockBigNumber,
query: { sqlEditorId: 'null' },
query_id: 'efgh',
};
fetchMock.post(
runQueryEndpoint,
JSON.stringify(results),
{ overwriteRoutes: true },
);
const tables = [
{ id: 'one', dataPreviewQueryId: 'previewOne' },
{ id: 'two', dataPreviewQueryId: 'previewTwo' },
];
const queries = [
{ ...query, id: 'previewOne' },
{ ...query, id: 'previewTwo' },
];
const store = mockStore({});
const expectedActions = [
{
type: actions.MIGRATE_QUERY_EDITOR,
oldQueryEditor: queryEditor,
// new qe has a different id
newQueryEditor: { ...queryEditor, id: '1' },
},
{
type: actions.MIGRATE_TAB_HISTORY,
newId: '1',
oldId: 'abcd',
},
{
type: actions.MIGRATE_TABLE,
oldTable: tables[0],
// new table has a different id and points to new query editor
newTable: { ...tables[0], id: 1, queryEditorId: '1' },
},
{
type: actions.MIGRATE_TABLE,
oldTable: tables[1],
// new table has a different id and points to new query editor
newTable: { ...tables[1], id: 1, queryEditorId: '1' },
},
{
type: actions.MIGRATE_QUERY,
queryId: 'previewOne',
queryEditorId: '1',
},
{
type: actions.MIGRATE_QUERY,
queryId: 'previewTwo',
queryEditorId: '1',
},
];
return store.dispatch(
actions.migrateQueryEditorFromLocalStorage(queryEditor, tables, queries))
.then(() => {
expect(store.getActions()).toEqual(expectedActions);
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(3);
// query editor has 2 tables loaded in the schema viewer
expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(2);
});
});
});
});
});

View File

@ -19,36 +19,11 @@
import sqlLabReducer from '../../../../src/SqlLab/reducers/sqlLab';
import * as actions from '../../../../src/SqlLab/actions/sqlLab';
import { table, initialState as mockState } from '../fixtures';
import { now } from '../../../../src/modules/dates';
const initialState = mockState.sqlLab;
describe('sqlLabReducer', () => {
describe('CLONE_QUERY_TO_NEW_TAB', () => {
const testQuery = { sql: 'SELECT * FROM...', dbId: 1, id: 'flasj233' };
let newState = {
...initialState,
queries: { [testQuery.id]: testQuery },
};
beforeEach(() => {
newState = sqlLabReducer(newState, actions.cloneQueryToNewTab(testQuery));
});
it('should have at most one more tab', () => {
expect(newState.queryEditors).toHaveLength(2);
});
it('should have the same SQL as the cloned query', () => {
expect(newState.queryEditors[1].sql).toBe(testQuery.sql);
});
it('should prefix the new tab title with "Copy of"', () => {
expect(newState.queryEditors[1].title).toContain('Copy of');
});
it('should push the cloned tab onto tab history stack', () => {
expect(newState.tabHistory[1]).toBe(newState.queryEditors[1].id);
});
});
describe('Query editors actions', () => {
let newState;
let defaultQueryEditor;
@ -56,59 +31,107 @@ describe('sqlLabReducer', () => {
beforeEach(() => {
newState = { ...initialState };
defaultQueryEditor = newState.queryEditors[0];
qe = Object.assign({}, defaultQueryEditor);
newState = sqlLabReducer(newState, actions.addQueryEditor(qe));
qe = newState.queryEditors[newState.queryEditors.length - 1];
const action = {
type: actions.ADD_QUERY_EDITOR,
queryEditor: { ...initialState.queryEditors[0], id: 'abcd' },
};
newState = sqlLabReducer(newState, action);
qe = newState.queryEditors.find(e => e.id === 'abcd');
});
it('should add a query editor', () => {
expect(newState.queryEditors).toHaveLength(2);
});
it('should remove a query editor', () => {
expect(newState.queryEditors).toHaveLength(2);
newState = sqlLabReducer(newState, actions.removeQueryEditor(qe));
const action = {
type: actions.REMOVE_QUERY_EDITOR,
queryEditor: qe,
};
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors).toHaveLength(1);
});
it('should set q query editor active', () => {
newState = sqlLabReducer(newState, actions.addQueryEditor(qe));
newState = sqlLabReducer(newState, actions.setActiveQueryEditor(defaultQueryEditor));
const addQueryEditorAction = {
type: actions.ADD_QUERY_EDITOR,
queryEditor: { ...initialState.queryEditors[0], id: 'abcd' },
};
newState = sqlLabReducer(newState, addQueryEditorAction);
const setActiveQueryEditorAction = {
type: actions.SET_ACTIVE_QUERY_EDITOR,
queryEditor: defaultQueryEditor,
};
newState = sqlLabReducer(newState, setActiveQueryEditorAction);
expect(newState.tabHistory[newState.tabHistory.length - 1]).toBe(defaultQueryEditor.id);
});
it('should not fail while setting DB', () => {
const dbId = 9;
newState = sqlLabReducer(newState, actions.queryEditorSetDb(qe, dbId));
const action = {
type: actions.QUERY_EDITOR_SETDB,
queryEditor: qe,
dbId,
};
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors[1].dbId).toBe(dbId);
});
it('should not fail while setting schema', () => {
const schema = 'foo';
newState = sqlLabReducer(newState, actions.queryEditorSetSchema(qe, schema));
const action = {
type: actions.QUERY_EDITOR_SET_SCHEMA,
queryEditor: qe,
schema,
};
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors[1].schema).toBe(schema);
});
it('should not fail while setting autorun ', () => {
newState = sqlLabReducer(newState, actions.queryEditorSetAutorun(qe, false));
const action = {
type: actions.QUERY_EDITOR_SET_AUTORUN,
queryEditor: qe,
};
newState = sqlLabReducer(newState, { ...action, autorun: false });
expect(newState.queryEditors[1].autorun).toBe(false);
newState = sqlLabReducer(newState, actions.queryEditorSetAutorun(qe, true));
newState = sqlLabReducer(newState, { ...action, autorun: true });
expect(newState.queryEditors[1].autorun).toBe(true);
});
it('should not fail while setting title', () => {
const title = 'a new title';
newState = sqlLabReducer(newState, actions.queryEditorSetTitle(qe, title));
const action = {
type: actions.QUERY_EDITOR_SET_TITLE,
queryEditor: qe,
title,
};
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors[1].title).toBe(title);
});
it('should not fail while setting Sql', () => {
const sql = 'SELECT nothing from dev_null';
newState = sqlLabReducer(newState, actions.queryEditorSetSql(qe, sql));
const action = {
type: actions.QUERY_EDITOR_SET_SQL,
queryEditor: qe,
sql,
};
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors[1].sql).toBe(sql);
});
it('should not fail while setting queryLimit', () => {
const queryLimit = 101;
newState = sqlLabReducer(newState, actions.queryEditorSetQueryLimit(qe, queryLimit));
const action = {
type: actions.QUERY_EDITOR_SET_QUERY_LIMIT,
queryEditor: qe,
queryLimit,
};
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors[1].queryLimit).toEqual(queryLimit);
});
it('should set selectedText', () => {
const selectedText = 'TEST';
const action = {
type: actions.QUERY_EDITOR_SET_SELECTED_TEXT,
queryEditor: newState.queryEditors[0],
sql: selectedText,
};
expect(newState.queryEditors[0].selectedText).toBeNull();
newState = sqlLabReducer(
newState, actions.queryEditorSetSelectedText(newState.queryEditors[0], 'TEST'));
newState = sqlLabReducer(newState, action);
expect(newState.queryEditors[0].selectedText).toBe(selectedText);
});
});
@ -117,7 +140,11 @@ describe('sqlLabReducer', () => {
let newTable;
beforeEach(() => {
newTable = Object.assign({}, table);
newState = sqlLabReducer(initialState, actions.mergeTable(newTable));
const action = {
type: actions.MERGE_TABLE,
table: newTable,
};
newState = sqlLabReducer(initialState, action);
newTable = newState.tables[0];
});
it('should add a table', () => {
@ -127,42 +154,91 @@ describe('sqlLabReducer', () => {
it('should merge the table attributes', () => {
// Merging the extra attribute
newTable.extra = true;
newState = sqlLabReducer(newState, actions.mergeTable(newTable));
const action = {
type: actions.MERGE_TABLE,
table: newTable,
};
newState = sqlLabReducer(newState, action);
expect(newState.tables).toHaveLength(1);
expect(newState.tables[0].extra).toBe(true);
});
it('should expand and collapse a table', () => {
newState = sqlLabReducer(newState, actions.collapseTable(newTable));
const collapseTableAction = {
type: actions.COLLAPSE_TABLE,
table: newTable,
};
newState = sqlLabReducer(newState, collapseTableAction);
expect(newState.tables[0].expanded).toBe(false);
newState = sqlLabReducer(newState, actions.expandTable(newTable));
const expandTableAction = {
type: actions.EXPAND_TABLE,
table: newTable,
};
newState = sqlLabReducer(newState, expandTableAction);
expect(newState.tables[0].expanded).toBe(true);
});
it('should remove a table', () => {
newState = sqlLabReducer(newState, actions.removeTable(newTable));
const action = {
type: actions.REMOVE_TABLE,
table: newTable,
};
newState = sqlLabReducer(newState, action);
expect(newState.tables).toHaveLength(0);
});
});
describe('Run Query', () => {
let newState;
let query;
let newQuery;
beforeEach(() => {
newState = { ...initialState };
newQuery = { ...query };
query = {
id: 'abcd',
progress: 0,
startDttm: now(),
state: 'running',
cached: false,
sqlEditorId: 'dfsadfs',
};
});
it('should start a query', () => {
newState = sqlLabReducer(newState, actions.startQuery(newQuery));
const action = {
type: actions.START_QUERY,
query: {
id: 'abcd',
progress: 0,
startDttm: now(),
state: 'running',
cached: false,
sqlEditorId: 'dfsadfs',
},
};
newState = sqlLabReducer(newState, action);
expect(Object.keys(newState.queries)).toHaveLength(1);
});
it('should stop the query', () => {
newState = sqlLabReducer(newState, actions.startQuery(newQuery));
newState = sqlLabReducer(newState, actions.stopQuery(newQuery));
const startQueryAction = {
type: actions.START_QUERY,
query,
};
newState = sqlLabReducer(newState, startQueryAction);
const stopQueryAction = {
type: actions.STOP_QUERY,
query,
};
newState = sqlLabReducer(newState, stopQueryAction);
const q = newState.queries[Object.keys(newState.queries)[0]];
expect(q.state).toBe('stopped');
});
it('should remove a query', () => {
newState = sqlLabReducer(newState, actions.startQuery(newQuery));
newState = sqlLabReducer(newState, actions.removeQuery(newQuery));
const startQueryAction = {
type: actions.START_QUERY,
query,
};
newState = sqlLabReducer(newState, startQueryAction);
const removeQueryAction = {
type: actions.REMOVE_QUERY,
query,
};
newState = sqlLabReducer(newState, removeQueryAction);
expect(Object.keys(newState.queries)).toHaveLength(0);
});
it('should refresh queries when polling returns empty', () => {

View File

@ -22,7 +22,7 @@ import { Provider } from 'react-redux';
import thunkMiddleware from 'redux-thunk';
import { hot } from 'react-hot-loader';
import { initFeatureFlags } from 'src/featureFlags';
import { initFeatureFlags, isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
import getInitialState from './reducers/getInitialState';
import rootReducer from './reducers/index';
import { initEnhancer } from '../reduxUtils';
@ -79,7 +79,10 @@ const store = createStore(
initialState,
compose(
applyMiddleware(thunkMiddleware),
initEnhancer(true, sqlLabPersistStateConfig),
initEnhancer(
!isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE),
sqlLabPersistStateConfig,
),
),
);

View File

@ -22,12 +22,14 @@ import { t } from '@superset-ui/translation';
import { SupersetClient } from '@superset-ui/connection';
import invert from 'lodash/invert';
import mapKeys from 'lodash/mapKeys';
import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
import { now } from '../../modules/dates';
import {
addSuccessToast as addSuccessToastAction,
addDangerToast as addDangerToastAction,
addInfoToast as addInfoToastAction,
addSuccessToast as addSuccessToastAction,
addWarningToast as addWarningToastAction,
} from '../../messageToasts/actions/index';
import getClientErrorObject from '../../utils/getClientErrorObject';
import COMMON_ERR_MESSAGES from '../../utils/errorMessages';
@ -55,9 +57,15 @@ export const QUERY_EDITOR_SET_QUERY_LIMIT = 'QUERY_EDITOR_SET_QUERY_LIMIT';
export const QUERY_EDITOR_SET_TEMPLATE_PARAMS = 'QUERY_EDITOR_SET_TEMPLATE_PARAMS';
export const QUERY_EDITOR_SET_SELECTED_TEXT = 'QUERY_EDITOR_SET_SELECTED_TEXT';
export const QUERY_EDITOR_PERSIST_HEIGHT = 'QUERY_EDITOR_PERSIST_HEIGHT';
export const MIGRATE_QUERY_EDITOR = 'MIGRATE_QUERY_EDITOR';
export const MIGRATE_TAB_HISTORY = 'MIGRATE_TAB_HISTORY';
export const MIGRATE_TABLE = 'MIGRATE_TABLE';
export const MIGRATE_QUERY = 'MIGRATE_QUERY';
export const SET_DATABASES = 'SET_DATABASES';
export const SET_ACTIVE_QUERY_EDITOR = 'SET_ACTIVE_QUERY_EDITOR';
export const LOAD_QUERY_EDITOR = 'LOAD_QUERY_EDITOR';
export const SET_TABLES = 'SET_TABLES';
export const SET_ACTIVE_SOUTHPANE_TAB = 'SET_ACTIVE_SOUTHPANE_TAB';
export const REFRESH_QUERIES = 'REFRESH_QUERIES';
export const SET_USER_OFFLINE = 'SET_USER_OFFLINE';
@ -85,6 +93,7 @@ export const CREATE_DATASOURCE_FAILED = 'CREATE_DATASOURCE_FAILED';
export const addInfoToast = addInfoToastAction;
export const addSuccessToast = addSuccessToastAction;
export const addDangerToast = addDangerToastAction;
export const addWarningToast = addWarningToastAction;
// a map of SavedQuery field names to the different names used client-side,
// because for now making the names consistent is too complicated
@ -201,11 +210,39 @@ export function startQuery(query) {
}
export function querySuccess(query, results) {
return { type: QUERY_SUCCESS, query, results };
return function (dispatch) {
const sync = (!query.isDataPreview && isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE))
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${results.query.sqlEditorId}`),
postPayload: { latest_query_id: query.id },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_SUCCESS, query, results }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while storing the latest query id in the backend. ' +
'Please contact your administrator if this problem persists.'))));
};
}
export function queryFailed(query, msg, link) {
return { type: QUERY_FAILED, query, msg, link };
return function (dispatch) {
const sync = (!query.isDataPreview && isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE))
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${query.sqlEditorId}`),
postPayload: { latest_query_id: query.id },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_FAILED, query, msg, link }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while storing the latest query id in the backend. ' +
'Please contact your administrator if this problem persists.'))));
};
}
export function stopQuery(query) {
@ -234,7 +271,7 @@ export function fetchQueryResults(query, displayLimit) {
})
.then(({ text = '{}' }) => {
const bigIntJson = JSONbig.parse(text);
dispatch(querySuccess(query, bigIntJson));
return dispatch(querySuccess(query, bigIntJson));
})
.catch(response =>
getClientErrorObject(response).then((error) => {
@ -309,9 +346,7 @@ export function validateQuery(query) {
postPayload,
stringify: false,
})
.then(({ json }) => {
dispatch(queryValidationReturned(query, json));
})
.then(({ json }) => dispatch(queryValidationReturned(query, json)))
.catch(response =>
getClientErrorObject(response).then((error) => {
let message = error.error || error.statusText || t('Unknown error');
@ -341,20 +376,189 @@ export function setDatabases(databases) {
return { type: SET_DATABASES, databases };
}
export function addQueryEditor(queryEditor) {
const newQueryEditor = {
...queryEditor,
id: shortid.generate(),
function migrateTable(table, queryEditorId, dispatch) {
return SupersetClient.post({
endpoint: encodeURI('/tableschemaview/'),
postPayload: { table: { ...table, queryEditorId } },
})
.then(({ json }) => {
const newTable = {
...table,
id: json.id,
queryEditorId,
};
return dispatch({ type: MIGRATE_TABLE, oldTable: table, newTable });
})
.catch(() => dispatch(addWarningToast(t(
'Unable to migrate table schema state to backend. Superset will retry ' +
'later. Please contact your administrator if this problem persists.'))));
}
function migrateQuery(queryId, queryEditorId, dispatch) {
return SupersetClient.post({
endpoint: encodeURI(`/tabstateview/${queryEditorId}/migrate_query`),
postPayload: { queryId },
})
.then(() => dispatch({ type: MIGRATE_QUERY, queryId, queryEditorId }))
.catch(() => dispatch(addWarningToast(t(
'Unable to migrate query state to backend. Superset will retry later. ' +
'Please contact your administrator if this problem persists.'))));
}
export function migrateQueryEditorFromLocalStorage(queryEditor, tables, queries) {
return function (dispatch) {
return SupersetClient.post({ endpoint: '/tabstateview/', postPayload: { queryEditor } })
.then(({ json }) => {
const newQueryEditor = {
...queryEditor,
id: json.id.toString(),
};
dispatch({ type: MIGRATE_QUERY_EDITOR, oldQueryEditor: queryEditor, newQueryEditor });
dispatch({ type: MIGRATE_TAB_HISTORY, oldId: queryEditor.id, newId: newQueryEditor.id });
return Promise.all([
...tables.map(table => migrateTable(table, newQueryEditor.id, dispatch)),
...queries.map(query => migrateQuery(query.id, newQueryEditor.id, dispatch)),
]);
})
.catch(() => dispatch(addWarningToast(t(
'Unable to migrate query editor state to backend. Superset will retry ' +
'later. Please contact your administrator if this problem persists.'))));
};
}
export function addQueryEditor(queryEditor) {
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.post({ endpoint: '/tabstateview/', postPayload: { queryEditor } })
: Promise.resolve({ json: { id: shortid.generate() } });
return sync
.then(({ json }) => {
const newQueryEditor = {
...queryEditor,
id: json.id.toString(),
};
return dispatch({ type: ADD_QUERY_EDITOR, queryEditor: newQueryEditor });
})
.catch(() => dispatch(addDangerToast(t(
'Unable to add a new tab to the backend. Please contact your administrator.'))));
};
return { type: ADD_QUERY_EDITOR, queryEditor: newQueryEditor };
}
export function cloneQueryToNewTab(query) {
return { type: CLONE_QUERY_TO_NEW_TAB, query };
return function (dispatch, getState) {
const state = getState();
const { queryEditors, tabHistory } = state.sqlLab;
const sourceQueryEditor = queryEditors.find(qe => qe.id === tabHistory[tabHistory.length - 1]);
const queryEditor = {
title: t('Copy of %s', sourceQueryEditor.title),
dbId: query.dbId ? query.dbId : null,
schema: query.schema ? query.schema : null,
autorun: true,
sql: query.sql,
queryLimit: sourceQueryEditor.queryLimit,
maxRow: sourceQueryEditor.maxRow,
};
return dispatch(addQueryEditor(queryEditor));
};
}
export function setActiveQueryEditor(queryEditor) {
return { type: SET_ACTIVE_QUERY_EDITOR, queryEditor };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.post({ endpoint: encodeURI(`/tabstateview/${queryEditor.id}/activate`) })
: Promise.resolve();
return sync
.then(() => dispatch({ type: SET_ACTIVE_QUERY_EDITOR, queryEditor }))
.catch((response) => {
if (response.status !== 404) {
return dispatch(addDangerToast(t(
'An error occurred while setting the active tab. Please contact ' +
'your administrator.')));
}
return dispatch({ type: REMOVE_QUERY_EDITOR, queryEditor });
});
};
}
export function loadQueryEditor(queryEditor) {
return { type: LOAD_QUERY_EDITOR, queryEditor };
}
export function setTables(tableSchemas) {
const tables = tableSchemas.map((tableSchema) => {
const {
columns,
selectStar,
primaryKey,
foreignKeys,
indexes,
dataPreviewQueryId,
} = tableSchema.description;
return {
dbId: tableSchema.database_id,
queryEditorId: tableSchema.tab_state_id.toString(),
schema: tableSchema.schema,
name: tableSchema.table,
expanded: tableSchema.expanded,
id: tableSchema.id,
dataPreviewQueryId,
columns,
selectStar,
primaryKey,
foreignKeys,
indexes,
isMetadataLoading: false,
isExtraMetadataLoading: false,
};
});
return { type: SET_TABLES, tables };
}
export function switchQueryEditor(queryEditor, displayLimit) {
return function (dispatch) {
if (isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) && !queryEditor.loaded) {
SupersetClient.get({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
})
.then(({ json }) => {
const loadedQueryEditor = {
id: json.id.toString(),
loaded: true,
title: json.label,
sql: json.sql,
selectedText: null,
latestQueryId: json.latest_query ? json.latest_query.id : null,
autorun: json.autorun,
dbId: json.database_id,
templateParams: json.template_params,
schema: json.schema,
queryLimit: json.query_limit,
validationResult: {
id: null,
errors: [],
completed: false,
},
};
dispatch(loadQueryEditor(loadedQueryEditor));
dispatch(setTables(json.table_schemas || []));
dispatch(setActiveQueryEditor(loadedQueryEditor));
if (json.latest_query && json.latest_query.resultsKey) {
dispatch(fetchQueryResults(json.latest_query, displayLimit));
}
})
.catch((response) => {
if (response.status !== 404) {
return dispatch(addDangerToast(t(
'An error occurred while fetching tab state')));
}
return dispatch({ type: REMOVE_QUERY_EDITOR, queryEditor });
});
} else {
dispatch(setActiveQueryEditor(queryEditor));
}
};
}
export function setActiveSouthPaneTab(tabId) {
@ -362,19 +566,75 @@ export function setActiveSouthPaneTab(tabId) {
}
export function removeQueryEditor(queryEditor) {
return { type: REMOVE_QUERY_EDITOR, queryEditor };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.delete({ endpoint: encodeURI(`/tabstateview/${queryEditor.id}`) })
: Promise.resolve();
return sync
.then(() =>
dispatch({ type: REMOVE_QUERY_EDITOR, queryEditor }),
)
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while removing tab. Please contact your administrator.'))),
);
};
}
export function removeQuery(query) {
return { type: REMOVE_QUERY, query };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.delete({
endpoint: encodeURI(`/tabstateview/${query.sqlEditorId}/query/${query.id}`),
})
: Promise.resolve();
return sync
.then(() =>
dispatch({ type: REMOVE_QUERY, query }),
)
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while removing query. Please contact your administrator.'))),
);
};
}
export function queryEditorSetDb(queryEditor, dbId) {
return { type: QUERY_EDITOR_SETDB, queryEditor, dbId };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { database_id: dbId },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SETDB, queryEditor, dbId }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while setting the tab database ID. Please contact your administrator.'))),
);
};
}
export function queryEditorSetSchema(queryEditor, schema) {
return { type: QUERY_EDITOR_SET_SCHEMA, queryEditor, schema };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { schema },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SET_SCHEMA, queryEditor, schema }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while setting the tab schema. Please contact your administrator.'))),
);
};
}
export function queryEditorSetSchemaOptions(queryEditor, options) {
@ -386,23 +646,96 @@ export function queryEditorSetTableOptions(queryEditor, options) {
}
export function queryEditorSetAutorun(queryEditor, autorun) {
return { type: QUERY_EDITOR_SET_AUTORUN, queryEditor, autorun };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { autorun },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SET_AUTORUN, queryEditor, autorun }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while setting the tab autorun. Please contact your administrator.'))),
);
};
}
export function queryEditorSetTitle(queryEditor, title) {
return { type: QUERY_EDITOR_SET_TITLE, queryEditor, title };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { label: title },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SET_TITLE, queryEditor, title }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while setting the tab title. Please contact your administrator.'))),
);
};
}
export function queryEditorSetSql(queryEditor, sql) {
return { type: QUERY_EDITOR_SET_SQL, queryEditor, sql };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { sql },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SET_SQL, queryEditor, sql }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while storing your query in the backend. To ' +
'avoid losing your changes, please save your query using the ' +
'"Save Query" button.'))),
);
};
}
export function queryEditorSetQueryLimit(queryEditor, queryLimit) {
return { type: QUERY_EDITOR_SET_QUERY_LIMIT, queryEditor, queryLimit };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { query_limit: queryLimit },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SET_QUERY_LIMIT, queryEditor, queryLimit }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while setting the tab title. Please contact your administrator.'))),
);
};
}
export function queryEditorSetTemplateParams(queryEditor, templateParams) {
return { type: QUERY_EDITOR_SET_TEMPLATE_PARAMS, queryEditor, templateParams };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.put({
endpoint: encodeURI(`/tabstateview/${queryEditor.id}`),
postPayload: { template_params: templateParams },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: QUERY_EDITOR_SET_TEMPLATE_PARAMS, queryEditor, templateParams }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while setting the tab template parameters. ' +
'Please contact your administrator.'))),
);
};
}
export function queryEditorSetSelectedText(queryEditor, sql) {
@ -413,6 +746,64 @@ export function mergeTable(table, query) {
return { type: MERGE_TABLE, table, query };
}
function getTableMetadata(table, query, dispatch) {
return SupersetClient.get({ endpoint: encodeURI(`/superset/table/${query.dbId}/` +
`${encodeURIComponent(table.name)}/${encodeURIComponent(table.schema)}/`) })
.then(({ json }) => {
const dataPreviewQuery = {
id: shortid.generate(),
dbId: query.dbId,
sql: json.selectStar,
tableName: table.name,
sqlEditorId: null,
tab: '',
runAsync: false,
ctas: false,
isDataPreview: true,
};
const newTable = {
...table,
...json,
expanded: true,
isMetadataLoading: false,
dataPreviewQueryId: dataPreviewQuery.id,
};
Promise.all([
dispatch(mergeTable(newTable, dataPreviewQuery)), // Merge table to tables in state
dispatch(runQuery(dataPreviewQuery)), // Run query to get preview data for table
]);
return newTable;
})
.catch(() =>
Promise.all([
dispatch(
mergeTable({
...table,
isMetadataLoading: false,
}),
),
dispatch(addDangerToast(t('An error occurred while fetching table metadata'))),
]),
);
}
function getTableExtendedMetadata(table, query, dispatch) {
return SupersetClient.get({
endpoint: encodeURI(`/superset/extra_table_metadata/${query.dbId}/` +
`${encodeURIComponent(table.name)}/${encodeURIComponent(table.schema)}/`),
})
.then(({ json }) => {
dispatch(mergeTable({ ...table, ...json, isExtraMetadataLoading: false }));
return json;
})
.catch(() =>
Promise.all([
dispatch(mergeTable({ ...table, isExtraMetadataLoading: false })),
dispatch(addDangerToast(t('An error occurred while fetching table metadata'))),
]),
);
}
export function addTable(query, tableName, schemaName) {
return function (dispatch) {
const table = {
@ -430,56 +821,28 @@ export function addTable(query, tableName, schemaName) {
}),
);
SupersetClient.get({ endpoint: encodeURI(`/superset/table/${query.dbId}/` +
`${encodeURIComponent(tableName)}/${encodeURIComponent(schemaName)}/`) })
.then(({ json }) => {
const dataPreviewQuery = {
id: shortid.generate(),
dbId: query.dbId,
sql: json.selectStar,
tableName,
sqlEditorId: null,
tab: '',
runAsync: false,
ctas: false,
};
const newTable = {
...table,
...json,
expanded: true,
isMetadataLoading: false,
};
return Promise.all([
getTableMetadata(table, query, dispatch),
getTableExtendedMetadata(table, query, dispatch),
])
.then(([newTable, json]) => {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.post({
endpoint: encodeURI('/tableschemaview/'),
postPayload: { table: { ...newTable, ...json } },
})
: Promise.resolve({ json: { id: shortid.generate() } });
return Promise.all([
dispatch(mergeTable(newTable, dataPreviewQuery)), // Merge table to tables in state
dispatch(runQuery(dataPreviewQuery)), // Run query to get preview data for table
]);
})
.catch(() =>
Promise.all([
dispatch(
mergeTable({
...table,
isMetadataLoading: false,
}),
),
dispatch(addDangerToast(t('An error occurred while fetching table metadata'))),
]),
);
SupersetClient.get({
endpoint: encodeURI(`/superset/extra_table_metadata/${query.dbId}/` +
`${encodeURIComponent(tableName)}/${encodeURIComponent(schemaName)}/`),
})
.then(({ json }) =>
dispatch(mergeTable({ ...table, ...json, isExtraMetadataLoading: false })),
)
.catch(() =>
Promise.all([
dispatch(mergeTable({ ...table, isExtraMetadataLoading: false })),
dispatch(addDangerToast(t('An error occurred while fetching table metadata'))),
]),
);
return sync
.then(({ json: resultJson }) =>
dispatch(mergeTable({ ...table, id: resultJson.id })),
)
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while fetching table metadata. ' +
'Please contact your administrator.'))),
);
});
};
}
@ -499,6 +862,7 @@ export function reFetchQueryResults(query) {
runAsync: false,
ctas: false,
queryLimit: query.queryLimit,
isDataPreview: query.isDataPreview,
};
dispatch(runQuery(newQuery));
dispatch(changeDataPreviewId(query.id, newQuery));
@ -506,15 +870,57 @@ export function reFetchQueryResults(query) {
}
export function expandTable(table) {
return { type: EXPAND_TABLE, table };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.post({
endpoint: encodeURI(`/tableschemaview/${table.id}/expanded`),
postPayload: { expanded: true },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: EXPAND_TABLE, table }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while expanding the table schema. ' +
'Please contact your administrator.'))),
);
};
}
export function collapseTable(table) {
return { type: COLLAPSE_TABLE, table };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.post({
endpoint: encodeURI(`/tableschemaview/${table.id}/expanded`),
postPayload: { expanded: false },
})
: Promise.resolve();
return sync
.then(() => dispatch({ type: COLLAPSE_TABLE, table }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while collapsing the table schema. ' +
'Please contact your administrator.'))),
);
};
}
export function removeTable(table) {
return { type: REMOVE_TABLE, table };
return function (dispatch) {
const sync = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? SupersetClient.delete({ endpoint: encodeURI(`/tableschemaview/${table.id}`) })
: Promise.resolve();
return sync
.then(() => dispatch({ type: REMOVE_TABLE, table }))
.catch(() =>
dispatch(addDangerToast(t(
'An error occurred while removing the table schema. ' +
'Please contact your administrator.'))),
);
};
}
export function refreshQueries(alteredQueries) {

View File

@ -126,15 +126,15 @@ class App extends React.PureComponent {
App.propTypes = {
actions: PropTypes.object,
localStorageUsageInKilobytes: PropTypes.number.isRequired,
common: PropTypes.object,
localStorageUsageInKilobytes: PropTypes.number.isRequired,
};
function mapStateToProps(state) {
const { localStorageUsageInKilobytes, common } = state;
const { common, localStorageUsageInKilobytes } = state;
return {
localStorageUsageInKilobytes,
common,
localStorageUsageInKilobytes,
};
}

View File

@ -42,7 +42,7 @@ export default class LimitControl extends React.PureComponent {
super(props);
const { value, defaultQueryLimit } = props;
this.state = {
textValue: value.toString() || defaultQueryLimit.toString(),
textValue: (value || defaultQueryLimit).toString(),
showOverlay: false,
};
this.handleHide = this.handleHide.bind(this);

View File

@ -200,7 +200,7 @@ export default class ResultSet extends React.PureComponent {
</Button>
</Alert>
</div>);
} else if (query.state === 'success') {
} else if (query.state === 'success' && query.results) {
const results = query.results;
let data;
if (this.props.cache && query.cached) {
@ -229,13 +229,13 @@ export default class ResultSet extends React.PureComponent {
return <Alert bsStyle="warning">{t('The query returned no data')}</Alert>;
}
}
if (query.cached) {
if (query.cached || (query.state === 'success' && !query.results)) {
return (
<Button
bsSize="sm"
className="fetch"
bsStyle="primary"
onClick={this.reFetchQueryResults.bind(this, query)}
onClick={this.reFetchQueryResults.bind(this, { ...query, isDataPreview: true })}
>
{t('Fetch data preview')}
</Button>

View File

@ -23,6 +23,7 @@ import { Alert, Label, Tab, Tabs } from 'react-bootstrap';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { t } from '@superset-ui/translation';
import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
import * as Actions from '../actions/sqlLab';
import QueryHistory from './QueryHistory';
@ -88,19 +89,25 @@ export class SouthPane extends React.PureComponent {
latestQuery = props.editorQueries.find(q => q.id === this.props.latestQueryId);
}
let results;
if (latestQuery &&
(Date.now() - latestQuery.startDttm) <= LOCALSTORAGE_MAX_QUERY_AGE_MS) {
results = (
<ResultSet
showControls
search
query={latestQuery}
actions={props.actions}
height={innerTabContentHeight}
database={this.props.databases[latestQuery.dbId]}
displayLimit={this.props.displayLimit}
/>
);
if (latestQuery) {
if (
isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) &&
(!latestQuery.resultsKey && !latestQuery.results)
) {
results = <Alert bsStyle="warning">{t('No stored results found, you need to re-run your query')}</Alert>;
} else if ((Date.now() - latestQuery.startDttm) <= LOCALSTORAGE_MAX_QUERY_AGE_MS) {
results = (
<ResultSet
showControls
search
query={latestQuery}
actions={props.actions}
height={innerTabContentHeight}
database={this.props.databases[latestQuery.dbId]}
displayLimit={this.props.displayLimit}
/>
);
}
} else {
results = <Alert bsStyle="info">{t('Run a query to display results here')}</Alert>;
}

View File

@ -57,6 +57,7 @@ import { FeatureFlag, isFeatureEnabled } from '../../featureFlags';
const SQL_EDITOR_PADDING = 10;
const INITIAL_NORTH_PERCENT = 30;
const INITIAL_SOUTH_PERCENT = 70;
const SET_QUERY_EDITOR_SQL_DEBOUNCE_MS = 2000;
const VALIDATION_DEBOUNCE_MS = 600;
const WINDOW_RESIZE_THROTTLE_MS = 100;
@ -104,6 +105,10 @@ class SqlEditor extends React.PureComponent {
this.stopQuery = this.stopQuery.bind(this);
this.onSqlChanged = this.onSqlChanged.bind(this);
this.setQueryEditorSql = this.setQueryEditorSql.bind(this);
this.setQueryEditorSqlWithDebounce = debounce(
this.setQueryEditorSql.bind(this),
SET_QUERY_EDITOR_SQL_DEBOUNCE_MS,
);
this.queryPane = this.queryPane.bind(this);
this.getAceEditorAndSouthPaneHeights = this.getAceEditorAndSouthPaneHeights.bind(this);
this.getSqlEditorHeight = this.getSqlEditorHeight.bind(this);
@ -151,6 +156,7 @@ class SqlEditor extends React.PureComponent {
}
onSqlChanged(sql) {
this.setState({ sql });
this.setQueryEditorSqlWithDebounce(sql);
// Request server-side validation of the query text
if (this.canValidateQuery()) {
// NB. requestValidation is debounced
@ -274,6 +280,7 @@ class SqlEditor extends React.PureComponent {
queryLimit: qe.queryLimit || this.props.defaultQueryLimit,
runAsync: this.props.database ? this.props.database.allow_run_async : false,
ctas,
updateTabState: !qe.selectedText,
};
this.props.actions.runQuery(query);
this.props.actions.setActiveSouthPaneTab('Results');

View File

@ -23,6 +23,7 @@ import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import URI from 'urijs';
import { t } from '@superset-ui/translation';
import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
import * as Actions from '../actions/sqlLab';
import SqlEditor from './SqlEditor';
@ -70,6 +71,20 @@ class TabbedSqlEditors extends React.PureComponent {
this.duplicateQueryEditor = this.duplicateQueryEditor.bind(this);
}
componentDidMount() {
// migrate query editor and associated tables state to server
if (isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)) {
const localStorageTables = this.props.tables.filter(table => table.inLocalStorage);
const localStorageQueries = Object.values(this.props.queries)
.filter(query => query.inLocalStorage);
this.props.queryEditors.filter(qe => qe.inLocalStorage).forEach((qe) => {
// get all queries associated with the query editor
const queries = localStorageQueries
.filter(query => query.sqlEditorId === qe.id);
const tables = localStorageTables.filter(table => table.queryEditorId === qe.id);
this.props.actions.migrateQueryEditorFromLocalStorage(qe, tables, queries);
});
}
const query = URI(window.location).search(true);
// Popping a new tab based on the querystring
if (query.id || query.sql || query.savedQueryId || query.datasourceKey) {
@ -104,6 +119,19 @@ class TabbedSqlEditors extends React.PureComponent {
this.props.actions.addQueryEditor(newQueryEditor);
}
this.popNewTab();
} else if (this.props.queryEditors.length === 0) {
this.newQueryEditor();
} else {
const qe = this.activeQueryEditor();
const latestQuery = this.props.queries[qe.latestQueryId];
if (
isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE) &&
latestQuery && latestQuery.resultsKey
) {
// when results are not stored in localStorage they need to be
// fetched from the results backend (if configured)
this.props.actions.fetchQueryResults(latestQuery, this.props.displayLimit);
}
}
}
UNSAFE_componentWillReceiveProps(nextProps) {
@ -122,7 +150,7 @@ class TabbedSqlEditors extends React.PureComponent {
nextProps.tables.forEach((table) => {
const queryId = table.dataPreviewQueryId;
if (queryId && nextProps.queries[queryId] && table.queryEditorId === nextActiveQeId) {
dataPreviewQueries.push(nextProps.queries[queryId]);
dataPreviewQueries.push({ ...nextProps.queries[queryId], tableName: table.name });
}
});
if (!areArraysShallowEqual(dataPreviewQueries, this.state.dataPreviewQueries)) {
@ -142,29 +170,31 @@ class TabbedSqlEditors extends React.PureComponent {
}
}
activeQueryEditor() {
const qeid = this.props.tabHistory[this.props.tabHistory.length - 1];
for (let i = 0; i < this.props.queryEditors.length; i++) {
const qe = this.props.queryEditors[i];
if (qe.id === qeid) {
return qe;
}
if (this.props.tabHistory.length === 0) {
return this.props.queryEditors[0];
}
return null;
const qeid = this.props.tabHistory[this.props.tabHistory.length - 1];
return this.props.queryEditors.find(qe => qe.id === qeid) || null;
}
newQueryEditor() {
queryCount++;
const activeQueryEditor = this.activeQueryEditor();
const firstDbId = Math.min(
...Object.values(this.props.databases).map(database => database.id));
const warning = isFeatureEnabled(FeatureFlag.SQLLAB_BACKEND_PERSISTENCE)
? ''
: `${t(
'-- Note: Unless you save your query, these tabs will NOT persist if you clear your cookies or change browsers.',
)}\n\n`;
const qe = {
title: t('Untitled Query %s', queryCount),
dbId:
activeQueryEditor && activeQueryEditor.dbId
? activeQueryEditor.dbId
: this.props.defaultDbId,
: (this.props.defaultDbId || firstDbId),
schema: activeQueryEditor ? activeQueryEditor.schema : null,
autorun: false,
sql: `${t(
'-- Note: Unless you save your query, these tabs will NOT persist if you clear your cookies or change browsers.',
)}\n\nSELECT ...`,
sql: `${warning}SELECT ...`,
queryLimit: this.props.defaultQueryLimit,
};
this.props.actions.addQueryEditor(qe);
@ -173,7 +203,11 @@ class TabbedSqlEditors extends React.PureComponent {
if (key === 'add_tab') {
this.newQueryEditor();
} else {
this.props.actions.setActiveQueryEditor({ id: key });
const qeid = this.props.tabHistory[this.props.tabHistory.length - 1];
if (key !== qeid) {
const queryEditor = this.props.queryEditors.find(qe => qe.id === key);
this.props.actions.switchQueryEditor(queryEditor, this.props.displayLimit);
}
}
}
removeQueryEditor(qe) {
@ -191,7 +225,7 @@ class TabbedSqlEditors extends React.PureComponent {
}
render() {
const editors = this.props.queryEditors.map((qe, i) => {
const isSelected = qe.id === this.activeQueryEditor().id;
const isSelected = this.activeQueryEditor() && this.activeQueryEditor().id === qe.id;
let latestQuery;
if (qe.latestQueryId) {

View File

@ -70,10 +70,9 @@ export default class TemplateParamsEditor extends React.Component {
isValid = false;
}
this.setState({ parsedJSON, isValid, codeText });
if (isValid) {
this.props.onChange(codeText);
} else {
this.props.onChange('{}');
const newValue = isValid ? codeText : '{}';
if (newValue !== this.props.code) {
this.props.onChange(newValue);
}
}
renderDoc() {

View File

@ -16,18 +16,28 @@
* specific language governing permissions and limitations
* under the License.
*/
import shortid from 'shortid';
import { t } from '@superset-ui/translation';
import getToastsFromPyFlashMessages from '../../messageToasts/utils/getToastsFromPyFlashMessages';
export default function getInitialState({ defaultDbId, ...restBootstrapData }) {
/*
* Before YYYY-MM-DD, the state for SQL Lab was stored exclusively in the
* browser's localStorage. The feature flag `SQLLAB_BACKEND_PERSISTENCE`
* moves the state to the backend instead, migrating it from local storage.
*
* To allow for a transparent migration, the initial state is a combination
* of the backend state (if any) with the browser state (if any).
*/
const queryEditors = [];
const defaultQueryEditor = {
id: shortid.generate(),
id: null,
loaded: true,
title: t('Untitled Query'),
sql: 'SELECT *\nFROM\nWHERE',
selectedText: null,
latestQueryId: null,
autorun: false,
templateParams: null,
dbId: defaultDbId,
queryLimit: restBootstrapData.common.conf.DEFAULT_SQLLAB_LIMIT,
validationResult: {
@ -42,16 +52,114 @@ export default function getInitialState({ defaultDbId, ...restBootstrapData }) {
},
};
/* Load state from the backend. This will be empty if the feature flag
* `SQLLAB_BACKEND_PERSISTENCE` is off.
*/
const activeTab = restBootstrapData.active_tab;
restBootstrapData.tab_state_ids.forEach(({ id, label }) => {
let queryEditor;
if (activeTab && activeTab.id === id) {
queryEditor = {
id: id.toString(),
loaded: true,
title: activeTab.label,
sql: activeTab.sql,
selectedText: null,
latestQueryId: activeTab.latest_query ? activeTab.latest_query.id : null,
autorun: activeTab.autorun,
templateParams: activeTab.template_params,
dbId: activeTab.database_id,
schema: activeTab.schema,
queryLimit: activeTab.query_limit,
validationResult: {
id: null,
errors: [],
completed: false,
},
};
} else {
// dummy state, actual state will be loaded on tab switch
queryEditor = {
...defaultQueryEditor,
id: id.toString(),
loaded: false,
title: label,
};
}
queryEditors.push(queryEditor);
});
const tabHistory = activeTab ? [activeTab.id.toString()] : [];
const tables = [];
if (activeTab) {
activeTab.table_schemas.forEach((tableSchema) => {
const {
columns,
selectStar,
primaryKey,
foreignKeys,
indexes,
dataPreviewQueryId,
} = tableSchema.description;
const table = {
dbId: tableSchema.database_id,
queryEditorId: tableSchema.tab_state_id.toString(),
schema: tableSchema.schema,
name: tableSchema.table,
expanded: tableSchema.expanded,
id: tableSchema.id,
isMetadataLoading: false,
isExtraMetadataLoading: false,
dataPreviewQueryId,
columns,
selectStar,
primaryKey,
foreignKeys,
indexes,
};
tables.push(table);
});
}
const { databases, queries } = restBootstrapData;
/* If the `SQLLAB_BACKEND_PERSISTENCE` feature flag is off, or if the user
* hasn't used SQL Lab after it has been turned on, the state will be stored
* in the browser's local storage.
*/
if (localStorage.getItem('redux') && JSON.parse(localStorage.getItem('redux')).sqlLab) {
const sqlLab = JSON.parse(localStorage.getItem('redux')).sqlLab;
if (sqlLab.queryEditors.length === 0) {
// migration was successful
localStorage.removeItem('redux');
} else {
// add query editors and tables to state with a special flag so they can
// be migrated if the `SQLLAB_BACKEND_PERSISTENCE` feature flag is on
sqlLab.queryEditors.forEach(qe => queryEditors.push({
...qe,
inLocalStorage: true,
loaded: true,
}));
sqlLab.tables.forEach(table => tables.push({ ...table, inLocalStorage: true }));
Object.values(sqlLab.queries).forEach((query) => {
queries[query.id] = { ...query, inLocalStorage: true };
});
tabHistory.push(...sqlLab.tabHistory);
}
}
return {
sqlLab: {
activeSouthPaneTab: 'Results',
alerts: [],
databases: {},
databases,
offline: false,
queries: {},
queryEditors: [defaultQueryEditor],
tabHistory: [defaultQueryEditor.id],
tables: [],
queries,
queryEditors,
tabHistory,
tables,
queriesLastUpdate: Date.now(),
},
messageToasts: getToastsFromPyFlashMessages(

View File

@ -16,7 +16,6 @@
* specific language governing permissions and limitations
* under the License.
*/
import shortid from 'shortid';
import { t } from '@superset-ui/translation';
import getInitialState from './getInitialState';
@ -29,6 +28,7 @@ import {
removeFromArr,
getFromArr,
addToArr,
extendArr,
} from '../../reduxUtils';
export default function sqlLabReducer(state = {}, action) {
@ -59,7 +59,6 @@ export default function sqlLabReducer(state = {}, action) {
);
const qe = {
remoteId: progenitor.remoteId,
id: shortid.generate(),
title: t('Copy of %s', progenitor.title),
dbId: action.query.dbId ? action.query.dbId : null,
schema: action.query.schema ? action.query.schema : null,
@ -68,13 +67,13 @@ export default function sqlLabReducer(state = {}, action) {
queryLimit: action.query.queryLimit,
maxRow: action.query.maxRow,
};
return sqlLabReducer(state, actions.addQueryEditor(qe));
},
[actions.REMOVE_QUERY_EDITOR]() {
let newState = removeFromArr(state, 'queryEditors', action.queryEditor);
// List of remaining queryEditor ids
const qeIds = newState.queryEditors.map(qe => qe.id);
const queries = {};
Object.keys(state.queries).forEach((k) => {
const query = state.queries[k];
@ -82,9 +81,14 @@ export default function sqlLabReducer(state = {}, action) {
queries[k] = query;
}
});
let tabHistory = state.tabHistory.slice();
tabHistory = tabHistory.filter(id => qeIds.indexOf(id) > -1);
newState = Object.assign({}, newState, { tabHistory, queries });
// Remove associated table schemas
const tables = state.tables.filter(table => table.queryEditorId !== action.queryEditor.id);
newState = Object.assign({}, newState, { tabHistory, tables, queries });
return newState;
},
[actions.REMOVE_QUERY]() {
@ -114,7 +118,6 @@ export default function sqlLabReducer(state = {}, action) {
}
return alterInArr(state, 'tables', existingTable, at);
}
at.id = shortid.generate();
// for new table, associate Id of query for data preview
at.dataPreviewQueryId = null;
let newState = addToArr(state, 'tables', at);
@ -318,16 +321,77 @@ export default function sqlLabReducer(state = {}, action) {
},
[actions.SET_ACTIVE_QUERY_EDITOR]() {
const qeIds = state.queryEditors.map(qe => qe.id);
if (qeIds.indexOf(action.queryEditor.id) > -1) {
if (
(qeIds.indexOf(action.queryEditor.id) > -1) &&
(state.tabHistory[state.tabHistory.length - 1] !== action.queryEditor.id)
) {
const tabHistory = state.tabHistory.slice();
tabHistory.push(action.queryEditor.id);
return Object.assign({}, state, { tabHistory });
}
return state;
},
[actions.LOAD_QUERY_EDITOR]() {
return alterInArr(state, 'queryEditors', action.queryEditor, { ...action.queryEditor });
},
[actions.SET_TABLES]() {
return extendArr(state, 'tables', action.tables);
},
[actions.SET_ACTIVE_SOUTHPANE_TAB]() {
return Object.assign({}, state, { activeSouthPaneTab: action.tabId });
},
[actions.MIGRATE_QUERY_EDITOR]() {
// remove migrated query editor from localStorage
const sqlLab = JSON.parse(localStorage.getItem('redux')).sqlLab;
sqlLab.queryEditors = sqlLab.queryEditors.filter(qe => qe.id !== action.oldQueryEditor.id);
localStorage.setItem('redux', JSON.stringify({ sqlLab }));
// replace localStorage query editor with the server backed one
return addToArr(
removeFromArr(
state,
'queryEditors',
action.oldQueryEditor,
),
'queryEditors',
action.newQueryEditor,
);
},
[actions.MIGRATE_TABLE]() {
// remove migrated table from localStorage
const sqlLab = JSON.parse(localStorage.getItem('redux')).sqlLab;
sqlLab.tables = sqlLab.tables.filter(table => table.id !== action.oldTable.id);
localStorage.setItem('redux', JSON.stringify({ sqlLab }));
// replace localStorage table with the server backed one
return addToArr(
removeFromArr(
state,
'tables',
action.oldTable,
),
'tables',
action.newTable,
);
},
[actions.MIGRATE_TAB_HISTORY]() {
// remove migrated tab from localStorage tabHistory
const sqlLab = JSON.parse(localStorage.getItem('redux')).sqlLab;
sqlLab.tabHistory = sqlLab.tabHistory.filter(tabId => tabId !== action.oldId);
localStorage.setItem('redux', JSON.stringify({ sqlLab }));
const tabHistory = state.tabHistory.filter(tabId => tabId !== action.oldId);
tabHistory.push(action.newId);
return Object.assign({}, state, { tabHistory });
},
[actions.MIGRATE_QUERY]() {
const query = {
...state.queries[action.queryId],
// point query to migrated query editor
sqlEditorId: action.queryEditorId,
};
const queries = Object.assign({}, state.queries, { [query.id]: query });
return Object.assign({}, state, { queries });
},
[actions.QUERY_EDITOR_SETDB]() {
return alterInArr(state, 'queryEditors', action.queryEditor, { dbId: action.dbId });
},

View File

@ -127,9 +127,8 @@ export default class TableSelector extends React.PureComponent {
}));
}
fetchTables(force, substr) {
// This can be large so it shouldn't be put in the Redux store
const forceRefresh = force || false;
const { dbId, schema } = this.props;
const { dbId, schema } = this.state;
if (dbId && schema) {
this.setState(() => ({ tableLoading: true, tableOptions: [] }));
const endpoint = encodeURI(`/superset/tables/${dbId}/` +

View File

@ -25,6 +25,7 @@ export enum FeatureFlag {
SCHEDULED_QUERIES = 'SCHEDULED_QUERIES',
SQL_VALIDATORS_BY_ENGINE = 'SQL_VALIDATORS_BY_ENGINE',
ESTIMATE_QUERY_COST = 'ESTIMATE_QUERY_COST',
SQLLAB_BACKEND_PERSISTENCE = 'SQLLAB_BACKEND_PERSISTENCE',
}
export type FeatureFlagMap = {

View File

@ -86,6 +86,23 @@ export function addToArr(state, arrKey, obj, prepend = false) {
return Object.assign({}, state, newState);
}
export function extendArr(state, arrKey, obj, prepend = false) {
const newObj = [...obj];
newObj.forEach((el) => {
if (!el.id) {
/* eslint-disable no-param-reassign */
el.id = shortid.generate();
}
});
const newState = {};
if (prepend) {
newState[arrKey] = [...newObj, ...state[arrKey]];
} else {
newState[arrKey] = [...state[arrKey], ...newObj];
}
return Object.assign({}, state, newState);
}
export function initEnhancer(persist = true, persistConfig = {}) {
const { paths, config } = persistConfig;
const composeEnhancers = process.env.WEBPACK_MODE === 'development'

View File

@ -486,7 +486,7 @@ RESULTS_BACKEND = None
# rather than JSON. This feature requires additional testing from the
# community before it is fully adopted, so this config option is provided
# in order to disable should breaking issues be discovered.
RESULTS_BACKEND_USE_MSGPACK = True
RESULTS_BACKEND_USE_MSGPACK = False
# The S3 bucket where you want to store your external hive tables created
# from CSV files. For example, 'companyname-superset'

View File

@ -0,0 +1,94 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add tables for SQL Lab state
Revision ID: db4b49eb0782
Revises: 78ee127d0d1d
Create Date: 2019-11-13 11:05:30.122167
"""
# revision identifiers, used by Alembic.
revision = "db4b49eb0782"
down_revision = "78ee127d0d1d"
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"tab_state",
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("extra_json", sa.Text(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False, autoincrement=True),
sa.Column("user_id", sa.Integer(), nullable=True),
sa.Column("label", sa.String(length=256), nullable=True),
sa.Column("active", sa.Boolean(), nullable=True),
sa.Column("database_id", sa.Integer(), nullable=True),
sa.Column("schema", sa.String(length=256), nullable=True),
sa.Column("sql", sa.Text(), nullable=True),
sa.Column("query_limit", sa.Integer(), nullable=True),
sa.Column("latest_query_id", sa.String(11), nullable=True),
sa.Column("autorun", sa.Boolean(), nullable=False, default=False),
sa.Column("template_params", sa.Text(), nullable=True),
sa.Column("created_by_fk", sa.Integer(), nullable=True),
sa.Column("changed_by_fk", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]),
sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]),
sa.ForeignKeyConstraint(["database_id"], ["dbs.id"]),
sa.ForeignKeyConstraint(["latest_query_id"], ["query.client_id"]),
sa.ForeignKeyConstraint(["user_id"], ["ab_user.id"]),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(op.f("ix_tab_state_id"), "tab_state", ["id"], unique=True)
op.create_table(
"table_schema",
sa.Column("created_on", sa.DateTime(), nullable=True),
sa.Column("changed_on", sa.DateTime(), nullable=True),
sa.Column("extra_json", sa.Text(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False, autoincrement=True),
sa.Column("tab_state_id", sa.Integer(), nullable=True),
sa.Column("database_id", sa.Integer(), nullable=False),
sa.Column("schema", sa.String(length=256), nullable=True),
sa.Column("table", sa.String(length=256), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("expanded", sa.Boolean(), nullable=True),
sa.Column("created_by_fk", sa.Integer(), nullable=True),
sa.Column("changed_by_fk", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]),
sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]),
sa.ForeignKeyConstraint(["database_id"], ["dbs.id"]),
sa.ForeignKeyConstraint(["tab_state_id"], ["tab_state.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sqlite_autoincrement=True,
)
op.create_index(op.f("ix_table_schema_id"), "table_schema", ["id"], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f("ix_table_schema_id"), table_name="table_schema")
op.drop_table("table_schema")
op.drop_index(op.f("ix_tab_state_id"), table_name="tab_state")
op.drop_table("tab_state")
# ### end Alembic commands ###

View File

@ -19,6 +19,7 @@
import re
from datetime import datetime
import simplejson as json
import sqlalchemy as sqla
from flask import Markup
from flask_appbuilder import Model
@ -188,6 +189,87 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin):
return "/superset/sqllab?savedQueryId={0}".format(self.id)
class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "tab_state"
# basic info
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey("ab_user.id"))
label = Column(String(256))
active = Column(Boolean, default=False)
# selected DB and schema
database_id = Column(Integer, ForeignKey("dbs.id"))
database = relationship("Database", foreign_keys=[database_id])
schema = Column(String(256))
# tables that are open in the schema browser and their data previews
table_schemas = relationship(
"TableSchema",
cascade="all, delete-orphan",
backref="tab_state",
passive_deletes=True,
)
# the query in the textarea, and results (if any)
sql = Column(Text)
query_limit = Column(Integer)
# latest query that was run
latest_query_id = Column(Integer, ForeignKey("query.client_id"))
latest_query = relationship("Query")
# other properties
autorun = Column(Boolean, default=False)
template_params = Column(Text)
def to_dict(self):
return {
"id": self.id,
"user_id": self.user_id,
"label": self.label,
"active": self.active,
"database_id": self.database_id,
"schema": self.schema,
"table_schemas": [ts.to_dict() for ts in self.table_schemas],
"sql": self.sql,
"query_limit": self.query_limit,
"latest_query": self.latest_query.to_dict() if self.latest_query else None,
"autorun": self.autorun,
"template_params": self.template_params,
}
class TableSchema(Model, AuditMixinNullable, ExtraJSONMixin):
__tablename__ = "table_schema"
id = Column(Integer, primary_key=True, autoincrement=True)
tab_state_id = Column(Integer, ForeignKey("tab_state.id", ondelete="CASCADE"))
database_id = Column(Integer, ForeignKey("dbs.id"), nullable=False)
database = relationship("Database", foreign_keys=[database_id])
schema = Column(String(256))
table = Column(String(256))
# JSON describing the schema, partitions, latest partition, etc.
description = Column(Text)
expanded = Column(Boolean, default=False)
def to_dict(self):
return {
"id": self.id,
"tab_state_id": self.tab_state_id,
"database_id": self.database_id,
"schema": self.schema,
"table": self.table,
"description": json.loads(self.description),
"expanded": self.expanded,
}
# events for updating tags
sqla.event.listen(SavedQuery, "after_insert", QueryUpdater.after_insert)
sqla.event.listen(SavedQuery, "after_update", QueryUpdater.after_update)

View File

@ -318,7 +318,7 @@ def execute_sql_statements(
db_engine_spec = database.db_engine_spec
db_engine_spec.patch()
if store_results and not results_backend:
if database.allow_run_async and not results_backend:
raise SqlLabException("Results backend isn't configured.")
# Breaking down into multiple statements
@ -394,7 +394,7 @@ def execute_sql_statements(
)
payload["query"]["state"] = QueryStatus.SUCCESS
if store_results:
if store_results and results_backend:
key = str(uuid.uuid4())
logging.info(
f"Query {query_id}: Storing results in results backend, key: {key}"

View File

@ -77,7 +77,7 @@ from superset.exceptions import (
SupersetTimeoutException,
)
from superset.jinja_context import get_template_processor
from superset.models.sql_lab import Query
from superset.models.sql_lab import Query, TabState
from superset.models.user_attributes import UserAttribute
from superset.sql_parse import ParsedQuery
from superset.sql_validators import get_validator_by_name
@ -117,6 +117,20 @@ stats_logger = config["STATS_LOGGER"]
DAR = models.DatasourceAccessRequest
QueryStatus = utils.QueryStatus
DATABASE_KEYS = [
"allow_csv_upload",
"allow_ctas",
"allow_dml",
"allow_multi_schema_metadata_fetch",
"allow_run_async",
"allows_subquery",
"backend",
"database_name",
"expose_in_sqllab",
"force_ctas_schema",
"id",
]
ALL_DATASOURCE_ACCESS_ERR = __(
"This endpoint requires the `all_datasource_access` permission"
@ -2644,12 +2658,17 @@ class Superset(BaseSupersetView):
try:
timeout = config["SQLLAB_TIMEOUT"]
timeout_msg = f"The query exceeded the {timeout} seconds timeout."
store_results = (
is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE")
and not query.select_as_cta
)
with utils.timeout(seconds=timeout, error_message=timeout_msg):
# pylint: disable=no-value-for-parameter
data = sql_lab.get_sql_results(
query.id,
rendered_query,
return_results=True,
store_results=store_results,
user_name=g.user.username if g.user else None,
expand_data=expand_data,
)
@ -2997,9 +3016,38 @@ class Superset(BaseSupersetView):
@expose("/sqllab")
def sqllab(self):
"""SQL Editor"""
# send list of tab state ids
tab_state_ids = (
db.session.query(TabState.id, TabState.label)
.filter_by(user_id=g.user.get_id())
.all()
)
# return first active tab, or fallback to another one if no tab is active
active_tab = (
db.session.query(TabState)
.filter_by(user_id=g.user.get_id())
.order_by(TabState.active.desc())
.first()
)
databases = {
database.id: {
k: v for k, v in database.to_json().items() if k in DATABASE_KEYS
}
for database in db.session.query(models.Database).all()
}
user_queries = db.session.query(Query).filter_by(user_id=g.user.get_id()).all()
queries = {
query.client_id: {k: v for k, v in query.to_dict().items()}
for query in user_queries
}
d = {
"defaultDbId": config["SQLLAB_DEFAULT_DBID"],
"common": self.common_bootstrap_payload(),
"tab_state_ids": tab_state_ids,
"active_tab": active_tab.to_dict() if active_tab else None,
"databases": databases,
"queries": queries,
}
return self.render_template(
"superset/basic.html",

View File

@ -18,18 +18,24 @@
from typing import Callable
import simplejson as json
from flask import g, redirect
from flask import g, redirect, request, Response
from flask_appbuilder import expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access, has_access_api
from flask_babel import gettext as __, lazy_gettext as _
from flask_sqlalchemy import BaseQuery
from superset import appbuilder, get_feature_flags, security_manager
from superset.models.sql_lab import Query, SavedQuery
from superset import appbuilder, db, get_feature_flags, security_manager
from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState
from superset.utils import core as utils
from .base import BaseSupersetView, DeleteMixin, SupersetFilter, SupersetModelView
from .base import (
BaseSupersetView,
DeleteMixin,
json_success,
SupersetFilter,
SupersetModelView,
)
class QueryFilter(SupersetFilter):
@ -169,6 +175,165 @@ class SavedQueryViewApi(SavedQueryView):
appbuilder.add_view_no_menu(SavedQueryViewApi)
appbuilder.add_view_no_menu(SavedQueryView)
class TabStateView(BaseSupersetView):
def _get_owner_id(self, tab_state_id):
return db.session.query(TabState.user_id).filter_by(id=tab_state_id).scalar()
@has_access_api
@expose("/", methods=["POST"])
def post(self):
query_editor = json.loads(request.form["queryEditor"])
tab_state = TabState(
user_id=g.user.get_id(),
label=query_editor.get("title", "Untitled Query"),
active=True,
database_id=query_editor["dbId"],
schema=query_editor.get("schema"),
sql=query_editor.get("sql", "SELECT ..."),
query_limit=query_editor.get("queryLimit"),
)
(
db.session.query(TabState)
.filter_by(user_id=g.user.get_id())
.update({"active": False})
)
db.session.add(tab_state)
db.session.commit()
return json_success(json.dumps({"id": tab_state.id}))
@has_access_api
@expose("/<int:tab_state_id>", methods=["DELETE"])
def delete(self, tab_state_id):
if self._get_owner_id(tab_state_id) != int(g.user.get_id()):
return Response(status=403)
db.session.query(TabState).filter(TabState.id == tab_state_id).delete(
synchronize_session=False
)
db.session.query(TableSchema).filter(
TableSchema.tab_state_id == tab_state_id
).delete(synchronize_session=False)
db.session.commit()
return json_success(json.dumps("OK"))
@has_access_api
@expose("/<int:tab_state_id>", methods=["GET"])
def get(self, tab_state_id):
if self._get_owner_id(tab_state_id) != int(g.user.get_id()):
return Response(status=403)
tab_state = db.session.query(TabState).filter_by(id=tab_state_id).first()
if tab_state is None:
return Response(status=404)
return json_success(
json.dumps(tab_state.to_dict(), default=utils.json_iso_dttm_ser)
)
@has_access_api
@expose("<int:tab_state_id>/activate", methods=["POST"])
def activate(self, tab_state_id):
owner_id = self._get_owner_id(tab_state_id)
if owner_id is None:
return Response(status=404)
if owner_id != int(g.user.get_id()):
return Response(status=403)
(
db.session.query(TabState)
.filter_by(user_id=g.user.get_id())
.update({"active": TabState.id == tab_state_id})
)
db.session.commit()
return json_success(json.dumps(tab_state_id))
@has_access_api
@expose("<int:tab_state_id>", methods=["PUT"])
def put(self, tab_state_id):
if self._get_owner_id(tab_state_id) != int(g.user.get_id()):
return Response(status=403)
fields = {k: json.loads(v) for k, v in request.form.to_dict().items()}
db.session.query(TabState).filter_by(id=tab_state_id).update(fields)
db.session.commit()
return json_success(json.dumps(tab_state_id))
@has_access_api
@expose("<int:tab_state_id>/migrate_query", methods=["POST"])
def migrate_query(self, tab_state_id):
if self._get_owner_id(tab_state_id) != int(g.user.get_id()):
return Response(status=403)
client_id = json.loads(request.form["queryId"])
db.session.query(Query).filter_by(client_id=client_id).update(
{"sql_editor_id": tab_state_id}
)
db.session.commit()
return json_success(json.dumps(tab_state_id))
@has_access_api
@expose("<int:tab_state_id>/query/<client_id>", methods=["DELETE"])
def delete_query(self, tab_state_id, client_id):
db.session.query(Query).filter_by(
client_id=client_id, user_id=g.user.get_id(), sql_editor_id=tab_state_id
).delete(synchronize_session=False)
db.session.commit()
return json_success(json.dumps("OK"))
class TableSchemaView(BaseSupersetView):
@has_access_api
@expose("/", methods=["POST"])
def post(self):
table = json.loads(request.form["table"])
# delete any existing table schema
db.session.query(TableSchema).filter(
TableSchema.tab_state_id == table["queryEditorId"],
TableSchema.database_id == table["dbId"],
TableSchema.schema == table["schema"],
TableSchema.table == table["name"],
).delete(synchronize_session=False)
table_schema = TableSchema(
tab_state_id=table["queryEditorId"],
database_id=table["dbId"],
schema=table["schema"],
table=table["name"],
description=json.dumps(table),
expanded=True,
)
db.session.add(table_schema)
db.session.commit()
return json_success(json.dumps({"id": table_schema.id}))
@has_access_api
@expose("/<int:table_schema_id>", methods=["DELETE"])
def delete(self, table_schema_id):
db.session.query(TableSchema).filter(TableSchema.id == table_schema_id).delete(
synchronize_session=False
)
db.session.commit()
return json_success(json.dumps("OK"))
@has_access_api
@expose("/<int:table_schema_id>/expanded", methods=["POST"])
def expanded(self, table_schema_id):
payload = json.loads(request.form["expanded"])
(
db.session.query(TableSchema)
.filter_by(id=table_schema_id)
.update({"expanded": payload})
)
db.session.commit()
response = json.dumps({"id": table_schema_id, "expanded": payload})
return json_success(response)
appbuilder.add_view_no_menu(TabStateView)
appbuilder.add_view_no_menu(TableSchemaView)
appbuilder.add_link(
__("Saved Queries"), href="/sqllab/my_queries/", icon="fa-save", category="SQL Lab"
)

View File

@ -0,0 +1,63 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# flake8: noqa
import os
from copy import copy
from superset.config import * # type: ignore
AUTH_USER_REGISTRATION_ROLE = "alpha"
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "unittests.db")
DEBUG = True
SUPERSET_WEBSERVER_PORT = 8081
# Allowing SQLALCHEMY_DATABASE_URI to be defined as an env var for
# continuous integration
if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ:
SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"]
SQL_SELECT_AS_CTA = True
SQL_MAX_ROW = 666
FEATURE_FLAGS = {"foo": "bar"}
def GET_FEATURE_FLAGS_FUNC(ff):
ff_copy = copy(ff)
ff_copy["super"] = "set"
return ff_copy
TESTING = True
SECRET_KEY = "thisismyscretkey"
WTF_CSRF_ENABLED = False
PUBLIC_ROLE_LIKE_GAMMA = True
AUTH_ROLE_PUBLIC = "Public"
EMAIL_NOTIFICATIONS = False
CACHE_CONFIG = {"CACHE_TYPE": "simple"}
class CeleryConfig(object):
BROKER_URL = "redis://localhost"
CELERY_IMPORTS = ("superset.sql_lab",)
CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}}
CONCURRENCY = 1
CELERY_CONFIG = CeleryConfig
DEFAULT_FEATURE_FLAGS = {"SQLLAB_BACKEND_PERSISTENCE": True}

14
tox.ini
View File

@ -78,6 +78,19 @@ setenv =
SUPERSET_CONFIG = tests.superset_test_config
SUPERSET_HOME = {envtmpdir}
[testenv:cypress-sqllab-backend-persist]
commands =
npm install -g npm@'>=6.5.0'
pip install -e {toxinidir}/
{toxinidir}/superset/assets/cypress_build.sh sqllab
deps =
-rrequirements.txt
-rrequirements-dev.txt
setenv =
PYTHONPATH = {toxinidir}
SUPERSET_CONFIG = tests.superset_test_config_sqllab_backend_persist
SUPERSET_HOME = {envtmpdir}
[testenv:eslint]
changedir = {toxinidir}/superset/assets
commands =
@ -142,6 +155,7 @@ envlist =
cypress-dashboard
cypress-explore
cypress-sqllab
cypress-sqllab-backend-persist
eslint
isort
javascript