2016-11-10 02:08:22 -05:00
|
|
|
"""Unit tests for Superset"""
|
2016-08-30 00:55:31 -04:00
|
|
|
import csv
|
2017-11-20 11:33:18 -05:00
|
|
|
import datetime
|
2016-02-10 12:34:09 -05:00
|
|
|
import doctest
|
2017-11-07 23:23:40 -05:00
|
|
|
import io
|
2016-08-30 00:55:31 -04:00
|
|
|
import json
|
2017-03-10 12:11:51 -05:00
|
|
|
import logging
|
2017-11-28 00:07:12 -05:00
|
|
|
import os
|
2016-09-11 10:39:07 -04:00
|
|
|
import random
|
2018-06-02 14:08:43 -04:00
|
|
|
import re
|
2017-11-28 00:07:12 -05:00
|
|
|
import string
|
2015-09-26 18:55:33 -04:00
|
|
|
import unittest
|
2016-03-28 11:09:46 -04:00
|
|
|
|
2018-09-20 14:21:11 -04:00
|
|
|
import mock
|
2017-11-20 11:33:18 -05:00
|
|
|
import pandas as pd
|
|
|
|
import psycopg2
|
2017-09-13 23:59:03 -04:00
|
|
|
import sqlalchemy as sqla
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2018-10-16 20:59:34 -04:00
|
|
|
from superset import dataframe, db, jinja_context, security_manager, sql_lab
|
2017-11-07 23:23:40 -05:00
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2018-06-28 00:35:12 -04:00
|
|
|
from superset.db_engine_specs import BaseEngineSpec
|
2017-03-10 12:11:51 -05:00
|
|
|
from superset.models import core as models
|
2017-04-04 23:15:19 -04:00
|
|
|
from superset.models.sql_lab import Query
|
2018-10-16 20:59:34 -04:00
|
|
|
from superset.utils import core as utils
|
|
|
|
from superset.utils.core import get_main_database
|
2017-03-10 12:11:51 -05:00
|
|
|
from superset.views.core import DatabaseView
|
2016-11-10 02:08:22 -05:00
|
|
|
from .base_tests import SupersetTestCase
|
2016-03-28 11:09:46 -04:00
|
|
|
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2016-11-10 02:08:22 -05:00
|
|
|
class CoreTests(SupersetTestCase):
|
2016-04-20 18:08:10 -04:00
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(CoreTests, self).__init__(*args, **kwargs)
|
2016-06-12 14:01:16 -04:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
2016-08-30 00:55:31 -04:00
|
|
|
cls.table_ids = {tbl.table_name: tbl.id for tbl in (
|
2016-04-20 18:08:10 -04:00
|
|
|
db.session
|
2017-03-10 12:11:51 -05:00
|
|
|
.query(SqlaTable)
|
2016-04-20 18:08:10 -04:00
|
|
|
.all()
|
|
|
|
)}
|
2015-09-26 18:55:33 -04:00
|
|
|
|
|
|
|
def setUp(self):
|
2017-04-04 23:15:19 -04:00
|
|
|
db.session.query(Query).delete()
|
2016-09-22 12:53:14 -04:00
|
|
|
db.session.query(models.DatasourceAccessRequest).delete()
|
2017-08-24 12:11:41 -04:00
|
|
|
db.session.query(models.Log).delete()
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2016-03-28 11:09:46 -04:00
|
|
|
def tearDown(self):
|
2017-04-04 23:15:19 -04:00
|
|
|
db.session.query(Query).delete()
|
2015-09-22 14:36:54 -04:00
|
|
|
|
2017-02-10 04:17:49 -05:00
|
|
|
def test_login(self):
|
|
|
|
resp = self.get_resp(
|
|
|
|
'/login/',
|
|
|
|
data=dict(username='admin', password='general'))
|
2018-05-21 20:49:02 -04:00
|
|
|
self.assertNotIn('User confirmation needed', resp)
|
2017-02-10 04:17:49 -05:00
|
|
|
|
|
|
|
resp = self.get_resp('/logout/', follow_redirects=True)
|
|
|
|
self.assertIn('User confirmation needed', resp)
|
|
|
|
|
|
|
|
resp = self.get_resp(
|
|
|
|
'/login/',
|
|
|
|
data=dict(username='admin', password='wrongPassword'))
|
|
|
|
self.assertIn('User confirmation needed', resp)
|
|
|
|
|
2016-10-07 19:24:39 -04:00
|
|
|
def test_slice_endpoint(self):
|
|
|
|
self.login(username='admin')
|
2017-11-14 00:06:51 -05:00
|
|
|
slc = self.get_slice('Girls', db.session)
|
2016-11-10 02:08:22 -05:00
|
|
|
resp = self.get_resp('/superset/slice/{}/'.format(slc.id))
|
2016-10-07 19:24:39 -04:00
|
|
|
assert 'Time Column' in resp
|
|
|
|
assert 'List Roles' in resp
|
|
|
|
|
|
|
|
# Testing overrides
|
|
|
|
resp = self.get_resp(
|
2016-11-10 02:08:22 -05:00
|
|
|
'/superset/slice/{}/?standalone=true'.format(slc.id))
|
2016-10-07 19:24:39 -04:00
|
|
|
assert 'List Roles' not in resp
|
|
|
|
|
2018-01-28 12:46:13 -05:00
|
|
|
def test_cache_key(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
|
|
|
|
|
|
|
viz = slc.viz
|
|
|
|
qobj = viz.query_obj()
|
|
|
|
cache_key = viz.cache_key(qobj)
|
|
|
|
self.assertEqual(cache_key, viz.cache_key(qobj))
|
|
|
|
|
|
|
|
qobj['groupby'] = []
|
|
|
|
self.assertNotEqual(cache_key, viz.cache_key(qobj))
|
|
|
|
|
2018-02-27 18:08:06 -05:00
|
|
|
def test_old_slice_json_endpoint(self):
|
2016-10-07 19:24:39 -04:00
|
|
|
self.login(username='admin')
|
2017-11-14 00:06:51 -05:00
|
|
|
slc = self.get_slice('Girls', db.session)
|
2016-10-07 19:24:39 -04:00
|
|
|
|
2017-02-16 20:28:35 -05:00
|
|
|
json_endpoint = (
|
2018-02-13 20:21:15 -05:00
|
|
|
'/superset/explore_json/{}/{}/'
|
|
|
|
.format(slc.datasource_type, slc.datasource_id)
|
2017-02-16 20:28:35 -05:00
|
|
|
)
|
2018-02-13 20:21:15 -05:00
|
|
|
resp = self.get_resp(json_endpoint, {'form_data': json.dumps(slc.viz.form_data)})
|
2016-10-07 19:24:39 -04:00
|
|
|
assert '"Jennifer"' in resp
|
|
|
|
|
2018-02-27 18:08:06 -05:00
|
|
|
def test_slice_json_endpoint(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
|
|
|
resp = self.get_resp(slc.explore_json_url)
|
|
|
|
assert '"Jennifer"' in resp
|
|
|
|
|
|
|
|
def test_old_slice_csv_endpoint(self):
|
2017-01-11 10:53:24 -05:00
|
|
|
self.login(username='admin')
|
2017-11-14 00:06:51 -05:00
|
|
|
slc = self.get_slice('Girls', db.session)
|
2017-01-11 10:53:24 -05:00
|
|
|
|
2017-02-16 20:28:35 -05:00
|
|
|
csv_endpoint = (
|
2018-02-13 20:21:15 -05:00
|
|
|
'/superset/explore_json/{}/{}/?csv=true'
|
|
|
|
.format(slc.datasource_type, slc.datasource_id)
|
2017-02-16 20:28:35 -05:00
|
|
|
)
|
2018-02-13 20:21:15 -05:00
|
|
|
resp = self.get_resp(csv_endpoint, {'form_data': json.dumps(slc.viz.form_data)})
|
2017-01-11 10:53:24 -05:00
|
|
|
assert 'Jennifer,' in resp
|
|
|
|
|
2018-02-27 18:08:06 -05:00
|
|
|
def test_slice_csv_endpoint(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
|
|
|
|
|
|
|
csv_endpoint = '/superset/explore_json/?csv=true'
|
|
|
|
resp = self.get_resp(
|
|
|
|
csv_endpoint, {'form_data': json.dumps({'slice_id': slc.id})})
|
|
|
|
assert 'Jennifer,' in resp
|
|
|
|
|
2016-09-22 12:53:14 -04:00
|
|
|
def test_admin_only_permissions(self):
|
|
|
|
def assert_admin_permission_in(role_name, assert_func):
|
2018-03-27 19:46:02 -04:00
|
|
|
role = security_manager.find_role(role_name)
|
2016-09-22 12:53:14 -04:00
|
|
|
permissions = [p.permission.name for p in role.permissions]
|
|
|
|
assert_func('can_sync_druid_source', permissions)
|
|
|
|
assert_func('can_approve', permissions)
|
|
|
|
|
|
|
|
assert_admin_permission_in('Admin', self.assertIn)
|
|
|
|
assert_admin_permission_in('Alpha', self.assertNotIn)
|
|
|
|
assert_admin_permission_in('Gamma', self.assertNotIn)
|
|
|
|
|
|
|
|
def test_admin_only_menu_views(self):
|
|
|
|
def assert_admin_view_menus_in(role_name, assert_func):
|
2018-03-27 19:46:02 -04:00
|
|
|
role = security_manager.find_role(role_name)
|
2016-09-22 12:53:14 -04:00
|
|
|
view_menus = [p.view_menu.name for p in role.permissions]
|
|
|
|
assert_func('ResetPasswordView', view_menus)
|
|
|
|
assert_func('RoleModelView', view_menus)
|
|
|
|
assert_func('Security', view_menus)
|
|
|
|
assert_func('UserDBModelView', view_menus)
|
2016-09-23 20:41:24 -04:00
|
|
|
assert_func('SQL Lab',
|
2016-09-22 12:53:14 -04:00
|
|
|
view_menus)
|
|
|
|
|
|
|
|
assert_admin_view_menus_in('Admin', self.assertIn)
|
|
|
|
assert_admin_view_menus_in('Alpha', self.assertNotIn)
|
|
|
|
assert_admin_view_menus_in('Gamma', self.assertNotIn)
|
|
|
|
|
2016-04-20 18:08:10 -04:00
|
|
|
def test_save_slice(self):
|
2016-06-02 22:17:34 -04:00
|
|
|
self.login(username='admin')
|
2017-11-14 00:06:51 -05:00
|
|
|
slice_name = 'Energy Sankey'
|
2016-11-17 14:58:33 -05:00
|
|
|
slice_id = self.get_slice(slice_name, db.session).id
|
|
|
|
db.session.commit()
|
2017-11-14 00:06:51 -05:00
|
|
|
copy_name = 'Test Sankey Save'
|
2016-04-20 18:08:10 -04:00
|
|
|
tbl_id = self.table_ids.get('energy_usage')
|
2017-11-14 00:06:51 -05:00
|
|
|
new_slice_name = 'Test Sankey Overwirte'
|
2016-04-20 18:08:10 -04:00
|
|
|
|
2017-02-16 20:28:35 -05:00
|
|
|
url = (
|
2017-11-14 00:06:51 -05:00
|
|
|
'/superset/explore/table/{}/?slice_name={}&'
|
2018-02-13 20:21:15 -05:00
|
|
|
'action={}&datasource_name=energy_usage')
|
2017-02-16 20:28:35 -05:00
|
|
|
|
|
|
|
form_data = {
|
|
|
|
'viz_type': 'sankey',
|
|
|
|
'groupby': 'target',
|
|
|
|
'metric': 'sum__value',
|
|
|
|
'row_limit': 5000,
|
|
|
|
'slice_id': slice_id,
|
|
|
|
}
|
|
|
|
# Changing name and save as a new slice
|
2017-11-07 00:15:36 -05:00
|
|
|
self.get_resp(
|
2017-02-16 20:28:35 -05:00
|
|
|
url.format(
|
|
|
|
tbl_id,
|
|
|
|
copy_name,
|
|
|
|
'saveas',
|
2017-11-08 00:32:45 -05:00
|
|
|
),
|
2018-02-13 20:21:15 -05:00
|
|
|
{'form_data': json.dumps(form_data)},
|
2017-02-16 20:28:35 -05:00
|
|
|
)
|
|
|
|
slices = db.session.query(models.Slice) \
|
|
|
|
.filter_by(slice_name=copy_name).all()
|
|
|
|
assert len(slices) == 1
|
|
|
|
new_slice_id = slices[0].id
|
|
|
|
|
|
|
|
form_data = {
|
|
|
|
'viz_type': 'sankey',
|
2018-08-15 14:25:06 -04:00
|
|
|
'groupby': 'source',
|
2017-02-16 20:28:35 -05:00
|
|
|
'metric': 'sum__value',
|
|
|
|
'row_limit': 5000,
|
|
|
|
'slice_id': new_slice_id,
|
2018-08-15 14:25:06 -04:00
|
|
|
'time_range': 'now',
|
2017-02-16 20:28:35 -05:00
|
|
|
}
|
|
|
|
# Setting the name back to its original name by overwriting new slice
|
2017-11-07 00:15:36 -05:00
|
|
|
self.get_resp(
|
2017-02-16 20:28:35 -05:00
|
|
|
url.format(
|
|
|
|
tbl_id,
|
|
|
|
new_slice_name,
|
|
|
|
'overwrite',
|
2017-11-08 00:32:45 -05:00
|
|
|
),
|
2018-02-13 20:21:15 -05:00
|
|
|
{'form_data': json.dumps(form_data)},
|
2017-02-16 20:28:35 -05:00
|
|
|
)
|
|
|
|
slc = db.session.query(models.Slice).filter_by(id=new_slice_id).first()
|
|
|
|
assert slc.slice_name == new_slice_name
|
2018-08-15 14:25:06 -04:00
|
|
|
assert slc.viz.form_data == form_data
|
2017-02-16 20:28:35 -05:00
|
|
|
db.session.delete(slc)
|
2016-11-17 14:58:33 -05:00
|
|
|
|
2016-12-16 17:23:48 -05:00
|
|
|
def test_filter_endpoint(self):
|
|
|
|
self.login(username='admin')
|
2017-11-14 00:06:51 -05:00
|
|
|
slice_name = 'Energy Sankey'
|
2016-12-16 17:23:48 -05:00
|
|
|
slice_id = self.get_slice(slice_name, db.session).id
|
|
|
|
db.session.commit()
|
|
|
|
tbl_id = self.table_ids.get('energy_usage')
|
2017-03-10 12:11:51 -05:00
|
|
|
table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id)
|
2016-12-16 17:23:48 -05:00
|
|
|
table.filter_select_enabled = True
|
|
|
|
url = (
|
2017-11-14 00:06:51 -05:00
|
|
|
'/superset/filter/table/{}/target/?viz_type=sankey&groupby=source'
|
|
|
|
'&metric=sum__value&flt_col_0=source&flt_op_0=in&flt_eq_0=&'
|
|
|
|
'slice_id={}&datasource_name=energy_usage&'
|
|
|
|
'datasource_id=1&datasource_type=table')
|
2016-12-16 17:23:48 -05:00
|
|
|
|
|
|
|
# Changing name
|
|
|
|
resp = self.get_resp(url.format(tbl_id, slice_id))
|
|
|
|
assert len(resp) > 0
|
|
|
|
assert 'Carbon Dioxide' in resp
|
|
|
|
|
[wip] dashboard builder v2 (#4528)
* [dashboard builder] Add dir structure for dashboard/v2, simplified Header, split pane, Draggable side panel
[grid] add <DashboardGrid />, <ResizableContainer />, and initial grid components.
[grid] gridComponents/ directory, add fixtures/ directory and test layout, add <Column />
[grid] working grid with gutters
[grid] design tweaks and polish, add <Tabs />
[header] add gradient header logo and favicon
[dnd] begin adding dnd functionality
[dnd] add util/isValidChild.js
[react-beautiful-dnd] iterate on dnd until blocked
[dnd] refactor to use react-dnd
[react-dnd] refactor to use composable <DashboardComponent /> structure
[dnd] factor out DashboardComponent, let components render dropInidcator and set draggableRef, add draggable tabs
[dnd] refactor to use redux, add DashboardComponent and DashboardGrid containers
[dragdroppable] rename horizontal/vertical => row/column
[builder] refactor into HoverMenu, add WithPopoverMenu
[builder] add editable header and disableDragDrop prop for Dragdroppable's
[builder] make tabs editable
[builder] add generic popover dropdown and header row style editability
[builder] add hover rowStyle dropdown, make row styles editable
[builder] add some new component icons, add popover with delete to charts
[builder] add preview icons, add popover menu to rows.
[builder] add IconButton and RowStyleDropdown
[resizable] use ResizableContainer instead of DimensionProvider, fix resize and delete bugs
[builder] fix bug with spacer
[builder] clean up, header.size => header.headerSize
[builder] support more drag/drop combinations by wrapping some components in rows upon drop. fix within list drop index. refactor some utils.
[builder][tabs] fix broken add tab button
[dashboard builder] don't pass dashboard layout to all dashboard components, improve drop indicator logic, fix delete component pure component bug
[dnd] refactor drop position logic
* fix rebase error, clean up css organization and use @less vars
* [dashboard-builder] add top-level tabs + undo-redo (#4626)
* [top-level-tabs] initial working version of top-level tabs
* [top-level-tabs] simplify redux and disable ability to displace top-level tabs with other tabs
* [top-level-tabs] improve tab drag and drop css
* [undo-redo] add redux undo redo
* [dnd] clean up dropResult shape, add new component source id + type, use css for drop indicator instead of styles and fix tab indicators.
* [top-level-tabs] add 'Collapse tab content' to delete tabs button
* [dnd] add depth validation to drag and drop logic
* [dashboard-builder] add resize action, enforce minimum width of columns, column children inherit column size when necessary, meta.rowStyle => meta.background, add background to columns
* [dashboard-builder] make sure getChildWidth returns a number
* [dashboard builder] static layout + toasts (#4763)
* [dashboard-builder] remove spacer component
* [dashboard-builder] better transparent indicator, better grid gutter logic, no dragging top-level tabs, headers are multiples of grid unit, fix row height granularity, update redux state key dashboard => dashboardLayout
* [dashboard-builder] don't blast column child dimensions on resize
* [dashboard-builder] ResizableContainer min size can't be smaller than size, fix row style, role=none on WithPopoverMenu container
* [edit mode] add edit mode to redux and propogate to all <DashboardComponent />s
* [toasts] add Toast component, ToastPresenter container and component, and toast redux actions + reducers
* [dashboard-builder] add info toast when dropResult overflows parent
* [dashboard builder] git mv to src/ post-rebase
* Dashboard builder rebased + linted (#4849)
* define dashboard redux state
* update dashboard state reducer
* dashboard layout converter + grid render
* builder pane + slice adder
* Dashboard header + slice header controls
* fix linting
* 2nd code review comments
* [dashboard builder] improve perf (#4855)
* address major perf + css issues
[dashboard builder] fix dashboard filters and some css
[dashboard builder] use VIZ_TYPES, move stricter .eslintrc to dashboard/, more css fixes
[builder] delete GridCell and GridLayout, remove some unused css. fix broken tabs.
* [builder] fix errors post-rebase
* [builder] add support for custom DragDroppable drag layer and add AddSliceDragPreview
* [AddSliceDragPreview] fix type check
* [dashboard builder] add prettier and update all files
* [dashboard builder] merge v2/ directory int dashboard/
* [dashboard builder] move component/*Container => containers/*
* add sticky tabs + sidepane, better tabs perf, better container hierarchy, better chart header (#4893)
* dashboard header, slice header UI improvement
* add slider and sticky
* dashboard header, slice header UI improvement
* make builder pane floating
* [dashboard builder] add sticky top-level tabs, refactor for performant tabs
* [dashboard builder] visually distinct containers, icons for undo-redo, fix some isValidChild bugs
* [dashboard builder] better undo redo <> save changes state, notify upon reaching undo limit
* [dashboard builder] hook up edit + create component actions to saved-state pop.
* [dashboard builder] visual refinement, refactor Dashboard header content and updates into layout for undo-redo, refactor save dashboard modal to use toasts instead of notify.
* [dashboard builder] refactor chart name update logic to use layout for undo redo, save slice name changes on dashboard save
* add slider and sticky
* [dashboard builder] fix layout converter slice_id + chartId type casting, don't change grid size upon edit (perf)
* [dashboard builder] don't set version key in getInitialState
* [dashboard builder] make top level tabs addition/removal undoable, fix double sticky tabs + side panel.
* [dashboard builder] fix sticky tabs offset bug
* [dashboard builder] fix drag preview width, css polish, fix rebase issue
* [dashboard builder] fix side pane labels and hove z-index
* Markdown for dashboard (#4962)
* fix dashboard server-side unit tests (#5009)
* Dashboard save button (#4979)
* save button
* fix slices list height
* save custom css
* merge save-dash changes from dashboard v1
https://github.com/apache/incubator-superset/pull/4900
https://github.com/apache/incubator-superset/pull/5051
* [dashboard v2] check for default_filters before json_loads-ing them (#5064)
[dashboard v2] check for default_filters before json-loads-ing them
* [dashboard v2] fix bugs from rebase
* [dashboard v2] tests! (#5066)
* [dashboard v2][tests] add tests for newComponentFactory, isValidChild, dropOverflowsParent, and dnd-reorder
* [dashboard v2][tests] add tests for componentIsResizable, findParentId, getChartIdsFromLayout, newEntitiesFromDrop, and getDropPosition
* [dashboard v2][tests] add mockStore, mockState, and tests for DragDroppable, DashboardBuilder, DashboardGrid, ToastPresenter, and Toast
* [dashboard builder][tests] separate files for state tree fixtures, add ChartHolder, Chart, Divider, Header, Row tests and WithDragDropContext helper
* [dashboard v2][tests] fix dragdrop context with util/getDragDropManager, add test for menu/* and resizable/*, and new components
* [dashboard v2][tests] fix and re-write Dashboard tests, add getFormDataWithExtraFilters_spec
* [dashboard v2][tests] add reducer tests, fix lint error
* [dashboard-v2][tests] add actions/dashboardLayout_spec
* [dashboard v2] fix some prop bugs, open side pane on edit, fix slice name bug
* [dashboard v2] fix slice name save bug
* [dashboard v2] fix lint errors
* [dashboard v2] fix filters bug and add test
* [dashboard v2] fix getFormDataWithExtraFilters_spec
* [dashboard v2] logging updates (#5087)
* [dashboard v2] initial logging refactor
* [dashboard v2] clean up logger
* [logger] update explore with new log events, add refresh dashboard + refresh dashboard chart actions
* [logging] add logger_spec.js, fix reducers/dashboardState_spec + gridComponents/Chart_spec
* [dashboard v2][logging] refactor for bulk logging in python
* [logging] tweak python, fix and remove dup start_offset entries
* [dashboard v2][logging] add dashboard_first_load event
* [dashboard v2][logging] add slice_ids to dashboard pane load event
* [tests] fix npm test script
* Fix: update slices list when add/remove multiple slices (#5138)
* [dashboard v2] add v1 switch (#5126)
* [dashboard] copy all dashboard v1 into working v1 switch
* [dashboard] add functional v1 <> v2 switch with messaging
* [dashboard] add v2 logging to v1 dashboard, add read-v2-changes link, add client logging to track v1 <> v2 switches
* [dashboard] Remove default values for feedback url + v2 auto convert date
* [dashboard v2] fix misc UI/UX issues
* [dashboard v2] fix Markdown persistance issues and css, fix copy dash title, don't enforce shallow hovering with drop indicator
* [dashboard v2] improve non-shallow drop target UX, fix Markdown drop indicator, clarify slice adder filter/sort
* [dashboard v2] delete empty rows on drag or delete events that leave them without children, add test
* [dashboard v2] improve v1<>v2 switch modals, add convert to v2 badge in v1, fix unsaved changes issue in preview mode, don't auto convert column child widths for now
* [dashboard v2][dnd] add drop position cache to fix non-shallow drops
* [dashboard] fix test script with glob instead of recurse, fix tests, add temp fix for tab nesting, ignore v1 lint errors
* [dashboard] v2 badge style tweaks, add back v1 _set_dash_metadata for v1 editing
* [dashboard] fix python linting and tests
* [dashboard] lint tests
* add slice from explore view (#5141)
* Fix dashboard position row data (#5131)
* add slice_name to markdown
(cherry picked from commit 14b01f1)
* set min grid width be 1 column
* remove empty column
* check total columns count <= 12
* scan position data and fix rows
* fix dashboard url with default_filters
* [dashboard v2] better grid drop ux, fix tab bugs 🐛 (#5151)
* [dashboard v2] add empty droptarget to dashboard grid for better ux and update test
* [dashboard] reset tab index upon top-level tab deletion, fix findparentid bug
* [dashboard] update v1<>v2 modal link for tracking
* Fix: Should pass slice_can_edit flag down (#5159)
* [dash builder fix] combine markdown and slice name, slice picker height (#5165)
* combine markdown code and markdown slice name
* allow dynamic height for slice picker cell
* add word break for long datasource name
* [fix] new dashboard state (#5213)
* [dashboard v2] ui + ux fixes (#5208)
* [dashboard v2] use <Loading /> throughout, small loading gif, improve row/column visual hierarchy, add cached data pop
* [dashboard v2] lots of polish
* [dashboard v2] remove markdown padding on edit, more opaque slice drag preview, unsavedChanges=true upon moving a component, fix initial load logging.
* [dashboard v2] gray loading.gif, sticky header, undo/redo keyboard shortcuts, fix move component saved changes update, v0 double scrollbar fix
* [dashboard v2] move UndoRedoKeylisteners into Header, render only in edit mode, show visual feedback for keyboard shortcut, hide hover menu in top-level tabs
* [dashboard v2] fix grid + sidepane height issues
* [dashboard v2] add auto-resize functionality, update tests. cache findParentId results.
* [dashboard v2][tests] add getDetailedComponentWidth_spec.js
* [dashboard v2] fix lint
* [fix] layout converter fix (#5218)
* [fix] layout converter fix
* add changed_on into initial sliceEntity data
* add unit tests for SliceAdder component
* remove old fixtures file
* [dashboard v2] remove webpack-cli, fresh yarn.lock post-rebase
* [dashboard v2] lint javascript
* [dashboard v2] fix python tests
* [Fix] import/export dash in V2 (#5273)
* [dashboard v2] add markdown tests (#5275)
* [dashboard v2] add Markdown tests
* [dashboard v2][mocks] fix markdown mock
2018-06-25 12:17:22 -04:00
|
|
|
def test_slice_data(self):
|
|
|
|
# slice data should have some required attributes
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
|
|
|
slc_data_attributes = slc.data.keys()
|
|
|
|
assert('changed_on' in slc_data_attributes)
|
|
|
|
assert('modified' in slc_data_attributes)
|
|
|
|
|
2015-09-26 18:55:33 -04:00
|
|
|
def test_slices(self):
|
2016-06-20 12:18:03 -04:00
|
|
|
# Testing by hitting the two supported end points for all slices
|
2016-06-02 22:17:34 -04:00
|
|
|
self.login(username='admin')
|
2015-09-26 18:55:33 -04:00
|
|
|
Slc = models.Slice
|
2016-03-28 11:09:46 -04:00
|
|
|
urls = []
|
2015-09-26 18:55:33 -04:00
|
|
|
for slc in db.session.query(Slc).all():
|
2016-03-28 11:09:46 -04:00
|
|
|
urls += [
|
2018-02-27 18:08:06 -05:00
|
|
|
(slc.slice_name, 'explore', slc.slice_url),
|
|
|
|
(slc.slice_name, 'explore_json', slc.explore_json_url),
|
2016-03-28 11:09:46 -04:00
|
|
|
]
|
2016-06-09 21:05:58 -04:00
|
|
|
for name, method, url in urls:
|
2017-11-14 00:06:51 -05:00
|
|
|
logging.info('[{name}]/[{method}]: {url}'.format(**locals()))
|
2016-03-28 11:09:46 -04:00
|
|
|
self.client.get(url)
|
2015-09-26 18:55:33 -04:00
|
|
|
|
2017-06-13 12:44:26 -04:00
|
|
|
def test_tablemodelview_list(self):
|
2017-02-10 04:17:49 -05:00
|
|
|
self.login(username='admin')
|
|
|
|
|
|
|
|
url = '/tablemodelview/list/'
|
|
|
|
resp = self.get_resp(url)
|
|
|
|
|
2017-06-13 12:44:26 -04:00
|
|
|
# assert that a table is listed
|
2017-03-10 12:11:51 -05:00
|
|
|
table = db.session.query(SqlaTable).first()
|
2017-02-10 04:17:49 -05:00
|
|
|
assert table.name in resp
|
|
|
|
assert '/superset/explore/table/{}'.format(table.id) in resp
|
|
|
|
|
2017-06-13 12:44:26 -04:00
|
|
|
def test_add_slice(self):
|
|
|
|
self.login(username='admin')
|
2018-08-03 15:46:48 -04:00
|
|
|
# assert that /chart/add responds with 200
|
|
|
|
url = '/chart/add'
|
2017-06-13 12:44:26 -04:00
|
|
|
resp = self.client.get(url)
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
|
|
|
|
2018-02-06 15:38:07 -05:00
|
|
|
def test_get_user_slices(self):
|
|
|
|
self.login(username='admin')
|
2018-03-27 19:46:02 -04:00
|
|
|
userid = security_manager.find_user('admin').id
|
2018-02-06 15:38:07 -05:00
|
|
|
url = '/sliceaddview/api/read?_flt_0_created_by={}'.format(userid)
|
|
|
|
resp = self.client.get(url)
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
|
|
|
|
2016-12-12 19:42:38 -05:00
|
|
|
def test_slices_V2(self):
|
|
|
|
# Add explore-v2-beta role to admin user
|
|
|
|
# Test all slice urls as user with with explore-v2-beta role
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.add_role('explore-v2-beta')
|
2016-12-12 19:42:38 -05:00
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.add_user(
|
2016-12-12 19:42:38 -05:00
|
|
|
'explore_beta', 'explore_beta', ' user', 'explore_beta@airbnb.com',
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.find_role('explore-v2-beta'),
|
2016-12-12 19:42:38 -05:00
|
|
|
password='general')
|
|
|
|
self.login(username='explore_beta', password='general')
|
|
|
|
|
|
|
|
Slc = models.Slice
|
|
|
|
urls = []
|
|
|
|
for slc in db.session.query(Slc).all():
|
|
|
|
urls += [
|
|
|
|
(slc.slice_name, 'slice_url', slc.slice_url),
|
|
|
|
]
|
|
|
|
for name, method, url in urls:
|
2017-11-14 00:06:51 -05:00
|
|
|
print('[{name}]/[{method}]: {url}'.format(**locals()))
|
2016-12-12 19:42:38 -05:00
|
|
|
response = self.client.get(url)
|
|
|
|
|
2016-02-10 12:34:09 -05:00
|
|
|
def test_doctests(self):
|
2016-12-01 22:53:23 -05:00
|
|
|
modules = [utils, models, sql_lab]
|
2016-02-10 12:34:09 -05:00
|
|
|
for mod in modules:
|
|
|
|
failed, tests = doctest.testmod(mod)
|
|
|
|
if failed:
|
2017-11-14 00:06:51 -05:00
|
|
|
raise Exception('Failed a doctest')
|
2016-02-10 12:34:09 -05:00
|
|
|
|
2016-03-28 11:09:46 -04:00
|
|
|
def test_misc(self):
|
2017-11-14 00:06:51 -05:00
|
|
|
assert self.get_resp('/health') == 'OK'
|
|
|
|
assert self.get_resp('/healthcheck') == 'OK'
|
|
|
|
assert self.get_resp('/ping') == 'OK'
|
2016-03-27 17:23:33 -04:00
|
|
|
|
2017-10-16 23:15:16 -04:00
|
|
|
def test_testconn(self, username='admin'):
|
|
|
|
self.login(username=username)
|
2018-09-06 17:55:48 -04:00
|
|
|
database = get_main_database(db.session)
|
2016-09-19 18:14:00 -04:00
|
|
|
|
|
|
|
# validate that the endpoint works with the password-masked sqlalchemy uri
|
|
|
|
data = json.dumps({
|
|
|
|
'uri': database.safe_sqlalchemy_uri(),
|
2017-10-16 23:15:16 -04:00
|
|
|
'name': 'main',
|
2017-11-08 00:32:45 -05:00
|
|
|
'impersonate_user': False,
|
2016-09-19 18:14:00 -04:00
|
|
|
})
|
2017-11-12 14:09:22 -05:00
|
|
|
response = self.client.post(
|
|
|
|
'/superset/testconn',
|
|
|
|
data=data,
|
|
|
|
content_type='application/json')
|
2016-09-17 15:32:41 -04:00
|
|
|
assert response.status_code == 200
|
2017-07-15 12:52:38 -04:00
|
|
|
assert response.headers['Content-Type'] == 'application/json'
|
2016-09-17 15:32:41 -04:00
|
|
|
|
2016-09-19 18:14:00 -04:00
|
|
|
# validate that the endpoint works with the decrypted sqlalchemy uri
|
2016-09-17 15:32:41 -04:00
|
|
|
data = json.dumps({
|
2016-09-19 18:14:00 -04:00
|
|
|
'uri': database.sqlalchemy_uri_decrypted,
|
2017-10-16 23:15:16 -04:00
|
|
|
'name': 'main',
|
2017-11-08 00:32:45 -05:00
|
|
|
'impersonate_user': False,
|
2016-09-17 15:32:41 -04:00
|
|
|
})
|
2017-11-12 14:09:22 -05:00
|
|
|
response = self.client.post(
|
|
|
|
'/superset/testconn',
|
|
|
|
data=data,
|
|
|
|
content_type='application/json')
|
2016-09-17 15:32:41 -04:00
|
|
|
assert response.status_code == 200
|
2017-07-15 12:52:38 -04:00
|
|
|
assert response.headers['Content-Type'] == 'application/json'
|
2016-09-17 15:32:41 -04:00
|
|
|
|
2017-09-13 23:59:03 -04:00
|
|
|
def test_custom_password_store(self):
|
2018-09-06 17:55:48 -04:00
|
|
|
database = get_main_database(db.session)
|
2017-09-13 23:59:03 -04:00
|
|
|
conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
|
|
|
|
|
|
|
|
def custom_password_store(uri):
|
2017-11-14 00:06:51 -05:00
|
|
|
return 'password_store_test'
|
2017-09-13 23:59:03 -04:00
|
|
|
|
2018-01-09 16:14:20 -05:00
|
|
|
models.custom_password_store = custom_password_store
|
2017-09-13 23:59:03 -04:00
|
|
|
conn = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted)
|
|
|
|
if conn_pre.password:
|
2017-11-14 00:06:51 -05:00
|
|
|
assert conn.password == 'password_store_test'
|
2017-09-13 23:59:03 -04:00
|
|
|
assert conn.password != conn_pre.password
|
2018-01-09 16:14:20 -05:00
|
|
|
# Disable for password store for later tests
|
|
|
|
models.custom_password_store = None
|
2017-09-13 23:59:03 -04:00
|
|
|
|
2016-10-11 19:49:40 -04:00
|
|
|
def test_databaseview_edit(self, username='admin'):
|
2017-11-12 14:09:22 -05:00
|
|
|
# validate that sending a password-masked uri does not over-write the decrypted
|
|
|
|
# uri
|
2016-10-11 19:49:40 -04:00
|
|
|
self.login(username=username)
|
2018-09-06 17:55:48 -04:00
|
|
|
database = get_main_database(db.session)
|
2016-10-11 19:49:40 -04:00
|
|
|
sqlalchemy_uri_decrypted = database.sqlalchemy_uri_decrypted
|
|
|
|
url = 'databaseview/edit/{}'.format(database.id)
|
|
|
|
data = {k: database.__getattribute__(k) for k in DatabaseView.add_columns}
|
|
|
|
data['sqlalchemy_uri'] = database.safe_sqlalchemy_uri()
|
2016-10-19 12:17:08 -04:00
|
|
|
self.client.post(url, data=data)
|
2018-09-06 17:55:48 -04:00
|
|
|
database = get_main_database(db.session)
|
2016-10-11 19:49:40 -04:00
|
|
|
self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted)
|
2016-09-19 18:14:00 -04:00
|
|
|
|
2016-09-06 16:58:09 -04:00
|
|
|
def test_warm_up_cache(self):
|
2017-11-14 00:06:51 -05:00
|
|
|
slc = self.get_slice('Girls', db.session)
|
2016-10-19 12:17:08 -04:00
|
|
|
data = self.get_json_resp(
|
2017-01-05 13:00:39 -05:00
|
|
|
'/superset/warm_up_cache?slice_id={}'.format(slc.id))
|
|
|
|
assert data == [{'slice_id': slc.id, 'slice_name': slc.slice_name}]
|
2016-09-06 16:58:09 -04:00
|
|
|
|
2016-10-19 12:17:08 -04:00
|
|
|
data = self.get_json_resp(
|
2016-11-10 02:08:22 -05:00
|
|
|
'/superset/warm_up_cache?table_name=energy_usage&db_name=main')
|
2018-10-16 20:59:34 -04:00
|
|
|
assert len(data) > 0
|
2016-09-06 16:58:09 -04:00
|
|
|
|
2016-04-20 18:08:10 -04:00
|
|
|
def test_shortner(self):
|
2016-06-02 22:17:34 -04:00
|
|
|
self.login(username='admin')
|
2016-08-30 00:55:31 -04:00
|
|
|
data = (
|
2017-11-14 00:06:51 -05:00
|
|
|
'//superset/explore/table/1/?viz_type=sankey&groupby=source&'
|
|
|
|
'groupby=target&metric=sum__value&row_limit=5000&where=&having=&'
|
|
|
|
'flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name='
|
|
|
|
'Energy+Sankey&collapsed_fieldsets=&action=&datasource_name='
|
|
|
|
'energy_usage&datasource_id=1&datasource_type=table&'
|
|
|
|
'previous_viz_type=sankey'
|
2016-09-22 12:53:14 -04:00
|
|
|
)
|
2018-02-13 20:21:15 -05:00
|
|
|
resp = self.client.post('/r/shortner/', data=dict(data=data))
|
2018-06-02 14:08:43 -04:00
|
|
|
assert re.search(r'\/r\/[0-9]+', resp.data.decode('utf-8'))
|
2016-04-20 18:08:10 -04:00
|
|
|
|
2017-01-27 13:20:24 -05:00
|
|
|
def test_kv(self):
|
|
|
|
self.logout()
|
|
|
|
self.login(username='admin')
|
|
|
|
|
|
|
|
try:
|
|
|
|
resp = self.client.post('/kv/store/', data=dict())
|
2017-11-07 00:15:36 -05:00
|
|
|
except Exception:
|
2017-01-27 13:20:24 -05:00
|
|
|
self.assertRaises(TypeError)
|
|
|
|
|
|
|
|
value = json.dumps({'data': 'this is a test'})
|
|
|
|
resp = self.client.post('/kv/store/', data=dict(data=value))
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
|
|
|
kv = db.session.query(models.KeyValue).first()
|
|
|
|
kv_value = kv.value
|
|
|
|
self.assertEqual(json.loads(value), json.loads(kv_value))
|
|
|
|
|
|
|
|
resp = self.client.get('/kv/{}/'.format(kv.id))
|
|
|
|
self.assertEqual(resp.status_code, 200)
|
2017-11-10 15:06:22 -05:00
|
|
|
self.assertEqual(
|
|
|
|
json.loads(value),
|
2017-01-27 13:20:24 -05:00
|
|
|
json.loads(resp.data.decode('utf-8')))
|
|
|
|
|
|
|
|
try:
|
|
|
|
resp = self.client.get('/kv/10001/')
|
2017-11-07 00:15:36 -05:00
|
|
|
except Exception:
|
2017-01-27 13:20:24 -05:00
|
|
|
self.assertRaises(TypeError)
|
|
|
|
|
2016-04-26 19:44:51 -04:00
|
|
|
def test_gamma(self):
|
2016-06-02 22:17:34 -04:00
|
|
|
self.login(username='gamma')
|
2018-08-03 15:46:48 -04:00
|
|
|
assert 'List Charts' in self.get_resp('/chart/list/')
|
|
|
|
assert 'List Dashboard' in self.get_resp('/dashboard/list/')
|
2016-04-26 19:44:51 -04:00
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
def test_csv_endpoint(self):
|
2016-11-17 14:58:33 -05:00
|
|
|
self.login('admin')
|
2016-09-11 10:39:07 -04:00
|
|
|
sql = """
|
|
|
|
SELECT first_name, last_name
|
|
|
|
FROM ab_user
|
|
|
|
WHERE first_name='admin'
|
|
|
|
"""
|
2017-11-14 00:06:51 -05:00
|
|
|
client_id = '{}'.format(random.getrandbits(64))[:10]
|
2017-01-25 21:06:29 -05:00
|
|
|
self.run_sql(sql, client_id, raise_on_error=True)
|
2016-07-30 01:39:33 -04:00
|
|
|
|
2016-11-10 02:08:22 -05:00
|
|
|
resp = self.get_resp('/superset/csv/{}'.format(client_id))
|
2016-10-02 21:03:19 -04:00
|
|
|
data = csv.reader(io.StringIO(resp))
|
2016-09-11 10:39:07 -04:00
|
|
|
expected_data = csv.reader(
|
2017-11-14 00:06:51 -05:00
|
|
|
io.StringIO('first_name,last_name\nadmin, user\n'))
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2018-07-21 15:01:26 -04:00
|
|
|
sql = "SELECT first_name FROM ab_user WHERE first_name LIKE '%admin%'"
|
|
|
|
client_id = '{}'.format(random.getrandbits(64))[:10]
|
|
|
|
self.run_sql(sql, client_id, raise_on_error=True)
|
|
|
|
|
|
|
|
resp = self.get_resp('/superset/csv/{}'.format(client_id))
|
|
|
|
data = csv.reader(io.StringIO(resp))
|
|
|
|
expected_data = csv.reader(
|
|
|
|
io.StringIO('first_name\nadmin\n'))
|
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
self.assertEqual(list(expected_data), list(data))
|
|
|
|
self.logout()
|
|
|
|
|
2016-10-19 12:17:08 -04:00
|
|
|
def test_extra_table_metadata(self):
|
|
|
|
self.login('admin')
|
2018-09-06 17:55:48 -04:00
|
|
|
dbid = get_main_database(db.session).id
|
2016-10-19 12:17:08 -04:00
|
|
|
self.get_json_resp(
|
2016-11-10 02:08:22 -05:00
|
|
|
'/superset/extra_table_metadata/{dbid}/'
|
2016-10-19 12:17:08 -04:00
|
|
|
'ab_permission_view/panoramix/'.format(**locals()))
|
|
|
|
|
2016-10-26 14:09:27 -04:00
|
|
|
def test_process_template(self):
|
2018-09-06 17:55:48 -04:00
|
|
|
maindb = get_main_database(db.session)
|
2016-10-26 14:09:27 -04:00
|
|
|
sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
|
2016-11-02 16:22:07 -04:00
|
|
|
tp = jinja_context.get_template_processor(database=maindb)
|
|
|
|
rendered = tp.process_template(sql)
|
2016-10-26 14:09:27 -04:00
|
|
|
self.assertEqual("SELECT '2017-01-01T00:00:00'", rendered)
|
2017-03-14 16:23:20 -04:00
|
|
|
|
|
|
|
def test_get_template_kwarg(self):
|
2018-09-06 17:55:48 -04:00
|
|
|
maindb = get_main_database(db.session)
|
2017-11-14 00:06:51 -05:00
|
|
|
s = '{{ foo }}'
|
2017-03-14 16:23:20 -04:00
|
|
|
tp = jinja_context.get_template_processor(database=maindb, foo='bar')
|
|
|
|
rendered = tp.process_template(s)
|
2017-11-14 00:06:51 -05:00
|
|
|
self.assertEqual('bar', rendered)
|
2017-03-14 16:23:20 -04:00
|
|
|
|
|
|
|
def test_template_kwarg(self):
|
2018-09-06 17:55:48 -04:00
|
|
|
maindb = get_main_database(db.session)
|
2017-11-14 00:06:51 -05:00
|
|
|
s = '{{ foo }}'
|
2017-03-14 16:23:20 -04:00
|
|
|
tp = jinja_context.get_template_processor(database=maindb)
|
|
|
|
rendered = tp.process_template(s, foo='bar')
|
2017-11-14 00:06:51 -05:00
|
|
|
self.assertEqual('bar', rendered)
|
2016-10-26 14:09:27 -04:00
|
|
|
|
|
|
|
def test_templated_sql_json(self):
|
2016-11-17 14:58:33 -05:00
|
|
|
self.login('admin')
|
2016-10-26 14:09:27 -04:00
|
|
|
sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}' as test"
|
2017-11-14 00:06:51 -05:00
|
|
|
data = self.run_sql(sql, 'fdaklj3ws')
|
|
|
|
self.assertEqual(data['data'][0]['test'], '2017-01-01T00:00:00')
|
2016-10-26 14:09:27 -04:00
|
|
|
|
2016-11-01 02:52:37 -04:00
|
|
|
def test_table_metadata(self):
|
2018-09-06 17:55:48 -04:00
|
|
|
maindb = get_main_database(db.session)
|
2017-01-11 10:53:24 -05:00
|
|
|
backend = maindb.backend
|
2016-11-01 02:52:37 -04:00
|
|
|
data = self.get_json_resp(
|
2017-11-14 00:06:51 -05:00
|
|
|
'/superset/table/{}/ab_user/null/'.format(maindb.id))
|
2016-11-01 02:52:37 -04:00
|
|
|
self.assertEqual(data['name'], 'ab_user')
|
|
|
|
assert len(data['columns']) > 5
|
|
|
|
assert data.get('selectStar').startswith('SELECT')
|
|
|
|
|
|
|
|
# Engine specific tests
|
|
|
|
if backend in ('mysql', 'postgresql'):
|
|
|
|
self.assertEqual(data.get('primaryKey').get('type'), 'pk')
|
|
|
|
self.assertEqual(
|
|
|
|
data.get('primaryKey').get('column_names')[0], 'id')
|
|
|
|
self.assertEqual(len(data.get('foreignKeys')), 2)
|
|
|
|
if backend == 'mysql':
|
|
|
|
self.assertEqual(len(data.get('indexes')), 7)
|
|
|
|
elif backend == 'postgresql':
|
|
|
|
self.assertEqual(len(data.get('indexes')), 5)
|
|
|
|
|
2016-11-08 18:55:49 -05:00
|
|
|
def test_fetch_datasource_metadata(self):
|
|
|
|
self.login(username='admin')
|
2017-01-06 15:38:44 -05:00
|
|
|
url = (
|
2017-11-07 23:25:10 -05:00
|
|
|
'/superset/fetch_datasource_metadata?' +
|
|
|
|
'datasourceKey=1__table'
|
2017-01-06 15:38:44 -05:00
|
|
|
)
|
|
|
|
resp = self.get_json_resp(url)
|
|
|
|
keys = [
|
|
|
|
'name', 'filterable_cols', 'gb_cols', 'type', 'all_cols',
|
|
|
|
'order_by_choices', 'metrics_combo', 'granularity_sqla',
|
|
|
|
'time_grain_sqla', 'id',
|
|
|
|
]
|
|
|
|
for k in keys:
|
|
|
|
self.assertIn(k, resp.keys())
|
2016-11-01 02:52:37 -04:00
|
|
|
|
2017-04-24 23:48:25 -04:00
|
|
|
def test_user_profile(self, username='admin'):
|
|
|
|
self.login(username=username)
|
2017-11-14 00:06:51 -05:00
|
|
|
slc = self.get_slice('Girls', db.session)
|
2016-12-01 18:21:18 -05:00
|
|
|
|
|
|
|
# Setting some faves
|
|
|
|
url = '/superset/favstar/Slice/{}/select/'.format(slc.id)
|
|
|
|
resp = self.get_json_resp(url)
|
|
|
|
self.assertEqual(resp['count'], 1)
|
|
|
|
|
|
|
|
dash = (
|
|
|
|
db.session
|
|
|
|
.query(models.Dashboard)
|
2017-11-14 00:06:51 -05:00
|
|
|
.filter_by(slug='births')
|
2016-12-01 18:21:18 -05:00
|
|
|
.first()
|
|
|
|
)
|
|
|
|
url = '/superset/favstar/Dashboard/{}/select/'.format(dash.id)
|
|
|
|
resp = self.get_json_resp(url)
|
|
|
|
self.assertEqual(resp['count'], 1)
|
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
userid = security_manager.find_user('admin').id
|
2016-11-20 00:23:44 -05:00
|
|
|
resp = self.get_resp('/superset/profile/admin/')
|
|
|
|
self.assertIn('"app"', resp)
|
|
|
|
data = self.get_json_resp('/superset/recent_activity/{}/'.format(userid))
|
|
|
|
self.assertNotIn('message', data)
|
|
|
|
data = self.get_json_resp('/superset/created_slices/{}/'.format(userid))
|
|
|
|
self.assertNotIn('message', data)
|
|
|
|
data = self.get_json_resp('/superset/created_dashboards/{}/'.format(userid))
|
|
|
|
self.assertNotIn('message', data)
|
|
|
|
data = self.get_json_resp('/superset/fave_slices/{}/'.format(userid))
|
|
|
|
self.assertNotIn('message', data)
|
|
|
|
data = self.get_json_resp('/superset/fave_dashboards/{}/'.format(userid))
|
|
|
|
self.assertNotIn('message', data)
|
2017-11-12 14:09:22 -05:00
|
|
|
data = self.get_json_resp(
|
|
|
|
'/superset/fave_dashboards_by_username/{}/'.format(username))
|
2017-04-24 23:48:25 -04:00
|
|
|
self.assertNotIn('message', data)
|
2016-11-20 00:23:44 -05:00
|
|
|
|
2017-08-24 12:11:41 -04:00
|
|
|
def test_slice_id_is_always_logged_correctly_on_web_request(self):
|
|
|
|
# superset/explore case
|
|
|
|
slc = db.session.query(models.Slice).filter_by(slice_name='Girls').one()
|
|
|
|
qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
|
2018-08-15 14:25:06 -04:00
|
|
|
self.get_resp(slc.slice_url, {'form_data': json.dumps(slc.form_data)})
|
2017-08-24 12:11:41 -04:00
|
|
|
self.assertEqual(1, qry.count())
|
|
|
|
|
|
|
|
def test_slice_id_is_always_logged_correctly_on_ajax_request(self):
|
|
|
|
# superset/explore_json case
|
2017-11-14 00:06:51 -05:00
|
|
|
self.login(username='admin')
|
2017-08-24 12:11:41 -04:00
|
|
|
slc = db.session.query(models.Slice).filter_by(slice_name='Girls').one()
|
|
|
|
qry = db.session.query(models.Log).filter_by(slice_id=slc.id)
|
2017-11-14 00:06:51 -05:00
|
|
|
slc_url = slc.slice_url.replace('explore', 'explore_json')
|
2018-08-15 14:25:06 -04:00
|
|
|
self.get_json_resp(slc_url, {'form_data': json.dumps(slc.form_data)})
|
2017-08-24 12:11:41 -04:00
|
|
|
self.assertEqual(1, qry.count())
|
|
|
|
|
2017-09-26 12:03:03 -04:00
|
|
|
def test_slice_query_endpoint(self):
|
|
|
|
# API endpoint for query string
|
2017-11-14 00:06:51 -05:00
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
2017-09-26 12:03:03 -04:00
|
|
|
resp = self.get_resp('/superset/slice_query/{}/'.format(slc.id))
|
|
|
|
assert 'query' in resp
|
|
|
|
assert 'language' in resp
|
2017-11-09 23:23:59 -05:00
|
|
|
self.logout()
|
2016-11-01 02:52:37 -04:00
|
|
|
|
2017-09-27 20:51:04 -04:00
|
|
|
def test_viz_get_fillna_for_columns(self):
|
2017-11-14 00:06:51 -05:00
|
|
|
slc = self.get_slice('Girls', db.session)
|
2017-09-27 20:51:04 -04:00
|
|
|
q = slc.viz.query_obj()
|
|
|
|
results = slc.viz.datasource.query(q)
|
|
|
|
fillna_columns = slc.viz.get_fillna_for_columns(results.df.columns)
|
|
|
|
self.assertDictEqual(
|
|
|
|
fillna_columns,
|
2017-11-08 00:32:45 -05:00
|
|
|
{'name': ' NULL', 'sum__num': 0},
|
2017-09-27 20:51:04 -04:00
|
|
|
)
|
|
|
|
|
2017-11-28 00:07:12 -05:00
|
|
|
def test_import_csv(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
filename = 'testCSV.csv'
|
|
|
|
table_name = ''.join(
|
|
|
|
random.choice(string.ascii_uppercase) for _ in range(5))
|
|
|
|
|
|
|
|
test_file = open(filename, 'w+')
|
|
|
|
test_file.write('a,b\n')
|
|
|
|
test_file.write('john,1\n')
|
|
|
|
test_file.write('paul,2\n')
|
|
|
|
test_file.close()
|
2018-02-03 23:22:06 -05:00
|
|
|
main_db_uri = (
|
|
|
|
db.session.query(models.Database)
|
|
|
|
.filter_by(database_name='main')
|
|
|
|
.all()
|
|
|
|
)
|
2017-11-28 00:07:12 -05:00
|
|
|
|
|
|
|
test_file = open(filename, 'rb')
|
|
|
|
form_data = {
|
|
|
|
'csv_file': test_file,
|
|
|
|
'sep': ',',
|
|
|
|
'name': table_name,
|
2018-02-03 23:22:06 -05:00
|
|
|
'con': main_db_uri[0].id,
|
2017-11-28 00:07:12 -05:00
|
|
|
'if_exists': 'append',
|
|
|
|
'index_label': 'test_label',
|
2018-02-03 23:22:06 -05:00
|
|
|
'mangle_dupe_cols': False,
|
|
|
|
}
|
2017-11-28 00:07:12 -05:00
|
|
|
url = '/databaseview/list/'
|
|
|
|
add_datasource_page = self.get_resp(url)
|
|
|
|
assert 'Upload a CSV' in add_datasource_page
|
|
|
|
|
|
|
|
url = '/csvtodatabaseview/form'
|
|
|
|
form_get = self.get_resp(url)
|
|
|
|
assert 'CSV to Database configuration' in form_get
|
|
|
|
|
|
|
|
try:
|
|
|
|
# ensure uploaded successfully
|
|
|
|
form_post = self.get_resp(url, data=form_data)
|
|
|
|
assert 'CSV file \"testCSV.csv\" uploaded to table' in form_post
|
|
|
|
finally:
|
|
|
|
os.remove(filename)
|
|
|
|
|
2017-11-20 11:33:18 -05:00
|
|
|
def test_dataframe_timezone(self):
|
|
|
|
tz = psycopg2.tz.FixedOffsetTimezone(offset=60, name=None)
|
2018-01-02 23:21:33 -05:00
|
|
|
data = [
|
|
|
|
(datetime.datetime(2017, 11, 18, 21, 53, 0, 219225, tzinfo=tz),),
|
|
|
|
(datetime.datetime(2017, 11, 18, 22, 6, 30, 61810, tzinfo=tz),),
|
|
|
|
]
|
2018-06-28 00:35:12 -04:00
|
|
|
df = dataframe.SupersetDataFrame(list(data), [['data']], BaseEngineSpec)
|
2017-11-20 11:33:18 -05:00
|
|
|
data = df.data
|
|
|
|
self.assertDictEqual(
|
|
|
|
data[0],
|
2018-01-02 23:21:33 -05:00
|
|
|
{'data': pd.Timestamp('2017-11-18 21:53:00.219225+0100', tz=tz)},
|
2017-11-20 11:33:18 -05:00
|
|
|
)
|
|
|
|
self.assertDictEqual(
|
|
|
|
data[1],
|
2018-01-02 23:21:33 -05:00
|
|
|
{'data': pd.Timestamp('2017-11-18 22:06:30.061810+0100', tz=tz)},
|
2017-11-20 11:33:18 -05:00
|
|
|
)
|
|
|
|
|
2018-02-18 19:30:11 -05:00
|
|
|
def test_comments_in_sqlatable_query(self):
|
|
|
|
clean_query = "SELECT '/* val 1 */' as c1, '-- val 2' as c2 FROM tbl"
|
|
|
|
commented_query = '/* comment 1 */' + clean_query + '-- comment 2'
|
|
|
|
table = SqlaTable(sql=commented_query)
|
2018-10-13 12:38:46 -04:00
|
|
|
rendered_query = str(table.get_from_clause())
|
2018-02-18 19:30:11 -05:00
|
|
|
self.assertEqual(clean_query, rendered_query)
|
|
|
|
|
2018-02-27 18:11:01 -05:00
|
|
|
def test_slice_payload_no_data(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
2018-08-15 14:25:06 -04:00
|
|
|
json_endpoint = '/superset/explore_json/'
|
|
|
|
form_data = slc.form_data
|
|
|
|
form_data.update({
|
|
|
|
'filters': [{'col': 'state', 'op': 'in', 'val': ['N/A']}],
|
|
|
|
})
|
2018-02-27 18:11:01 -05:00
|
|
|
|
2018-08-15 14:25:06 -04:00
|
|
|
data = self.get_json_resp(
|
|
|
|
json_endpoint,
|
|
|
|
{'form_data': json.dumps(form_data)},
|
2018-02-27 18:11:01 -05:00
|
|
|
)
|
|
|
|
self.assertEqual(data['status'], utils.QueryStatus.SUCCESS)
|
2018-03-07 00:19:13 -05:00
|
|
|
self.assertEqual(data['error'], 'No data')
|
2018-02-27 18:11:01 -05:00
|
|
|
|
|
|
|
def test_slice_payload_invalid_query(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Girls', db.session)
|
2018-08-15 14:25:06 -04:00
|
|
|
form_data = slc.form_data
|
|
|
|
form_data.update({
|
|
|
|
'groupby': ['N/A'],
|
|
|
|
})
|
2018-02-27 18:11:01 -05:00
|
|
|
|
2018-08-15 14:25:06 -04:00
|
|
|
data = self.get_json_resp(
|
|
|
|
'/superset/explore_json/',
|
|
|
|
{'form_data': json.dumps(form_data)},
|
2018-02-27 18:11:01 -05:00
|
|
|
)
|
|
|
|
self.assertEqual(data['status'], utils.QueryStatus.FAILED)
|
|
|
|
assert 'KeyError' in data['stacktrace']
|
|
|
|
|
2018-03-07 00:19:13 -05:00
|
|
|
def test_slice_payload_viz_markdown(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
slc = self.get_slice('Title', db.session)
|
|
|
|
|
|
|
|
url = slc.get_explore_url(base_url='/superset/explore_json')
|
|
|
|
data = self.get_json_resp(url)
|
|
|
|
self.assertEqual(data['status'], None)
|
|
|
|
self.assertEqual(data['error'], None)
|
|
|
|
|
2018-09-20 14:21:11 -04:00
|
|
|
@mock.patch('superset.security.SupersetSecurityManager.schemas_accessible_by_user')
|
|
|
|
@mock.patch('superset.security.SupersetSecurityManager.database_access')
|
|
|
|
@mock.patch('superset.security.SupersetSecurityManager.all_datasource_access')
|
|
|
|
def test_schemas_access_for_csv_upload_endpoint(self,
|
|
|
|
mock_all_datasource_access,
|
|
|
|
mock_database_access,
|
|
|
|
mock_schemas_accessible):
|
|
|
|
mock_all_datasource_access.return_value = False
|
|
|
|
mock_database_access.return_value = False
|
|
|
|
mock_schemas_accessible.return_value = ['this_schema_is_allowed_too']
|
|
|
|
database_name = 'fake_db_100'
|
|
|
|
db_id = 100
|
|
|
|
extra = """{
|
|
|
|
"schemas_allowed_for_csv_upload":
|
|
|
|
["this_schema_is_allowed", "this_schema_is_allowed_too"]
|
|
|
|
}"""
|
|
|
|
|
|
|
|
self.login(username='admin')
|
|
|
|
dbobj = self.get_or_create(
|
|
|
|
cls=models.Database,
|
|
|
|
criteria={'database_name': database_name},
|
|
|
|
session=db.session,
|
|
|
|
id=db_id,
|
|
|
|
extra=extra)
|
|
|
|
data = self.get_json_resp(
|
2018-10-31 16:23:26 -04:00
|
|
|
url='/superset/schemas_access_for_csv_upload?db_id={db_id}'
|
2018-09-20 14:21:11 -04:00
|
|
|
.format(db_id=dbobj.id))
|
|
|
|
assert data == ['this_schema_is_allowed_too']
|
|
|
|
|
2018-10-08 13:32:40 -04:00
|
|
|
def test_select_star(self):
|
|
|
|
self.login(username='admin')
|
|
|
|
resp = self.get_resp('/superset/select_star/1/birth_names')
|
|
|
|
self.assertIn('gender', resp)
|
|
|
|
|
2017-09-27 20:51:04 -04:00
|
|
|
|
2015-09-26 18:55:33 -04:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|