2016-11-10 02:08:22 -05:00
|
|
|
"""Unit tests for Superset"""
|
2016-10-19 12:17:08 -04:00
|
|
|
import json
|
2016-08-30 00:55:31 -04:00
|
|
|
import unittest
|
|
|
|
|
|
|
|
from flask_appbuilder.security.sqla import models as ab_models
|
2018-08-29 00:04:06 -04:00
|
|
|
from mock import Mock
|
|
|
|
import pandas as pd
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2018-10-16 20:59:34 -04:00
|
|
|
from superset import app, db, security_manager
|
2017-11-07 23:23:40 -05:00
|
|
|
from superset.connectors.druid.models import DruidCluster, DruidDatasource
|
|
|
|
from superset.connectors.sqla.models import SqlaTable
|
2017-03-10 12:11:51 -05:00
|
|
|
from superset.models import core as models
|
2018-10-16 20:59:34 -04:00
|
|
|
from superset.utils.core import get_main_database
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2017-11-14 00:06:51 -05:00
|
|
|
BASE_DIR = app.config.get('BASE_DIR')
|
2016-08-30 00:55:31 -04:00
|
|
|
|
|
|
|
|
2016-11-10 02:08:22 -05:00
|
|
|
class SupersetTestCase(unittest.TestCase):
|
2016-08-30 00:55:31 -04:00
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2016-11-10 02:08:22 -05:00
|
|
|
super(SupersetTestCase, self).__init__(*args, **kwargs)
|
2016-08-30 00:55:31 -04:00
|
|
|
self.client = app.test_client()
|
2016-09-22 12:53:14 -04:00
|
|
|
self.maxDiff = None
|
2016-11-17 14:58:33 -05:00
|
|
|
|
2018-10-16 20:59:34 -04:00
|
|
|
@classmethod
|
|
|
|
def create_druid_test_objects(cls):
|
2016-09-22 12:53:14 -04:00
|
|
|
# create druid cluster and druid datasources
|
|
|
|
session = db.session
|
2017-03-10 12:11:51 -05:00
|
|
|
cluster = (
|
|
|
|
session.query(DruidCluster)
|
2017-11-14 00:06:51 -05:00
|
|
|
.filter_by(cluster_name='druid_test')
|
2017-03-10 12:11:51 -05:00
|
|
|
.first()
|
|
|
|
)
|
2016-09-22 12:53:14 -04:00
|
|
|
if not cluster:
|
2017-11-14 00:06:51 -05:00
|
|
|
cluster = DruidCluster(cluster_name='druid_test')
|
2016-09-22 12:53:14 -04:00
|
|
|
session.add(cluster)
|
|
|
|
session.commit()
|
|
|
|
|
2017-03-10 12:11:51 -05:00
|
|
|
druid_datasource1 = DruidDatasource(
|
2016-09-22 12:53:14 -04:00
|
|
|
datasource_name='druid_ds_1',
|
2017-11-08 00:32:45 -05:00
|
|
|
cluster_name='druid_test',
|
2016-09-22 12:53:14 -04:00
|
|
|
)
|
|
|
|
session.add(druid_datasource1)
|
2017-03-10 12:11:51 -05:00
|
|
|
druid_datasource2 = DruidDatasource(
|
2016-09-22 12:53:14 -04:00
|
|
|
datasource_name='druid_ds_2',
|
2017-11-08 00:32:45 -05:00
|
|
|
cluster_name='druid_test',
|
2016-09-22 12:53:14 -04:00
|
|
|
)
|
|
|
|
session.add(druid_datasource2)
|
|
|
|
session.commit()
|
|
|
|
|
2017-01-13 22:30:17 -05:00
|
|
|
def get_table(self, table_id):
|
2018-08-06 18:30:13 -04:00
|
|
|
return (
|
|
|
|
db.session
|
|
|
|
.query(SqlaTable)
|
|
|
|
.filter_by(id=table_id)
|
|
|
|
.one()
|
|
|
|
)
|
2017-01-13 22:30:17 -05:00
|
|
|
|
2018-09-20 14:21:11 -04:00
|
|
|
def get_or_create(self, cls, criteria, session, **kwargs):
|
2016-10-07 19:24:39 -04:00
|
|
|
obj = session.query(cls).filter_by(**criteria).first()
|
|
|
|
if not obj:
|
|
|
|
obj = cls(**criteria)
|
2018-09-20 14:21:11 -04:00
|
|
|
obj.__dict__.update(**kwargs)
|
|
|
|
session.add(obj)
|
|
|
|
session.commit()
|
2016-10-07 19:24:39 -04:00
|
|
|
return obj
|
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
def login(self, username='admin', password='general'):
|
2016-11-17 14:58:33 -05:00
|
|
|
resp = self.get_resp(
|
2016-08-30 00:55:31 -04:00
|
|
|
'/login/',
|
2016-11-17 14:58:33 -05:00
|
|
|
data=dict(username=username, password=password))
|
2018-05-21 20:49:02 -04:00
|
|
|
self.assertNotIn('User confirmation needed', resp)
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2016-10-07 19:24:39 -04:00
|
|
|
def get_slice(self, slice_name, session):
|
|
|
|
slc = (
|
|
|
|
session.query(models.Slice)
|
2017-11-10 15:06:22 -05:00
|
|
|
.filter_by(slice_name=slice_name)
|
|
|
|
.one()
|
2016-10-07 19:24:39 -04:00
|
|
|
)
|
|
|
|
session.expunge_all()
|
|
|
|
return slc
|
|
|
|
|
2016-10-20 18:30:09 -04:00
|
|
|
def get_table_by_name(self, name):
|
2018-08-06 18:30:13 -04:00
|
|
|
return db.session.query(SqlaTable).filter_by(table_name=name).one()
|
2016-10-20 18:30:09 -04:00
|
|
|
|
|
|
|
def get_druid_ds_by_name(self, name):
|
2017-03-10 12:11:51 -05:00
|
|
|
return db.session.query(DruidDatasource).filter_by(
|
2016-10-20 18:30:09 -04:00
|
|
|
datasource_name=name).first()
|
|
|
|
|
2018-08-29 00:04:06 -04:00
|
|
|
def get_datasource_mock(self):
|
|
|
|
datasource = Mock()
|
|
|
|
results = Mock()
|
|
|
|
results.query = Mock()
|
|
|
|
results.status = Mock()
|
|
|
|
results.error_message = None
|
|
|
|
results.df = pd.DataFrame()
|
|
|
|
datasource.type = 'table'
|
|
|
|
datasource.query = Mock(return_value=results)
|
|
|
|
mock_dttm_col = Mock()
|
|
|
|
datasource.get_col = Mock(return_value=mock_dttm_col)
|
|
|
|
datasource.query = Mock(return_value=results)
|
|
|
|
datasource.database = Mock()
|
|
|
|
datasource.database.db_engine_spec = Mock()
|
|
|
|
datasource.database.db_engine_spec.mutate_expression_label = lambda x: x
|
|
|
|
return datasource
|
|
|
|
|
2016-12-01 18:21:18 -05:00
|
|
|
def get_resp(
|
|
|
|
self, url, data=None, follow_redirects=True, raise_on_error=True):
|
2016-10-02 21:03:19 -04:00
|
|
|
"""Shortcut to get the parsed results while following redirects"""
|
2016-11-17 14:58:33 -05:00
|
|
|
if data:
|
|
|
|
resp = self.client.post(
|
|
|
|
url, data=data, follow_redirects=follow_redirects)
|
|
|
|
else:
|
|
|
|
resp = self.client.get(url, follow_redirects=follow_redirects)
|
2016-12-01 18:21:18 -05:00
|
|
|
if raise_on_error and resp.status_code > 400:
|
|
|
|
raise Exception(
|
2017-11-14 00:06:51 -05:00
|
|
|
'http request failed with code {}'.format(resp.status_code))
|
2016-12-01 18:21:18 -05:00
|
|
|
return resp.data.decode('utf-8')
|
2016-11-17 14:58:33 -05:00
|
|
|
|
2016-12-01 18:21:18 -05:00
|
|
|
def get_json_resp(
|
|
|
|
self, url, data=None, follow_redirects=True, raise_on_error=True):
|
2016-10-19 12:17:08 -04:00
|
|
|
"""Shortcut to get the parsed results while following redirects"""
|
2016-12-01 18:21:18 -05:00
|
|
|
resp = self.get_resp(url, data, follow_redirects, raise_on_error)
|
2016-10-19 12:17:08 -04:00
|
|
|
return json.loads(resp)
|
|
|
|
|
2016-09-22 12:53:14 -04:00
|
|
|
def get_access_requests(self, username, ds_type, ds_id):
|
2016-11-30 17:05:09 -05:00
|
|
|
DAR = models.DatasourceAccessRequest
|
|
|
|
return (
|
|
|
|
db.session.query(DAR)
|
2017-11-10 15:06:22 -05:00
|
|
|
.filter(
|
2018-03-27 19:46:02 -04:00
|
|
|
DAR.created_by == security_manager.find_user(username=username),
|
2016-11-30 17:05:09 -05:00
|
|
|
DAR.datasource_type == ds_type,
|
|
|
|
DAR.datasource_id == ds_id,
|
2017-11-10 15:06:22 -05:00
|
|
|
)
|
|
|
|
.first()
|
2016-11-30 17:05:09 -05:00
|
|
|
)
|
2016-09-22 12:53:14 -04:00
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
def logout(self):
|
|
|
|
self.client.get('/logout/', follow_redirects=True)
|
|
|
|
|
2016-11-17 14:58:33 -05:00
|
|
|
def grant_public_access_to_table(self, table):
|
2018-03-27 19:46:02 -04:00
|
|
|
public_role = security_manager.find_role('Public')
|
2016-08-30 00:55:31 -04:00
|
|
|
perms = db.session.query(ab_models.PermissionView).all()
|
|
|
|
for perm in perms:
|
|
|
|
if (perm.permission.name == 'datasource_access' and
|
2016-11-17 14:58:33 -05:00
|
|
|
perm.view_menu and table.perm in perm.view_menu.name):
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.add_permission_role(public_role, perm)
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2016-11-17 14:58:33 -05:00
|
|
|
def revoke_public_access_to_table(self, table):
|
2018-03-27 19:46:02 -04:00
|
|
|
public_role = security_manager.find_role('Public')
|
2016-08-30 00:55:31 -04:00
|
|
|
perms = db.session.query(ab_models.PermissionView).all()
|
|
|
|
for perm in perms:
|
|
|
|
if (perm.permission.name == 'datasource_access' and
|
2016-11-17 14:58:33 -05:00
|
|
|
perm.view_menu and table.perm in perm.view_menu.name):
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.del_permission_role(public_role, perm)
|
2016-11-01 23:48:31 -04:00
|
|
|
|
2018-11-07 18:57:44 -05:00
|
|
|
def run_sql(self, sql, client_id=None, user_name=None, raise_on_error=False,
|
|
|
|
query_limit=None):
|
2016-11-17 14:58:33 -05:00
|
|
|
if user_name:
|
|
|
|
self.logout()
|
|
|
|
self.login(username=(user_name if user_name else 'admin'))
|
2018-09-06 17:55:48 -04:00
|
|
|
dbid = get_main_database(db.session).id
|
2016-11-17 14:58:33 -05:00
|
|
|
resp = self.get_json_resp(
|
2016-11-10 02:08:22 -05:00
|
|
|
'/superset/sql_json/',
|
2016-12-01 18:21:18 -05:00
|
|
|
raise_on_error=False,
|
2016-11-01 23:48:31 -04:00
|
|
|
data=dict(database_id=dbid, sql=sql, select_as_create_as=False,
|
2018-11-07 18:57:44 -05:00
|
|
|
client_id=client_id, queryLimit=query_limit),
|
2016-11-01 23:48:31 -04:00
|
|
|
)
|
2016-12-01 22:53:23 -05:00
|
|
|
if raise_on_error and 'error' in resp:
|
2017-11-14 00:06:51 -05:00
|
|
|
raise Exception('run_sql failed')
|
2016-11-17 14:58:33 -05:00
|
|
|
return resp
|