2018-02-25 18:06:11 -05:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-11-10 02:08:22 -05:00
|
|
|
"""Unit tests for Superset"""
|
2016-08-30 00:55:31 -04:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
|
|
|
from __future__ import print_function
|
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
|
|
from datetime import datetime
|
2016-10-02 21:03:19 -04:00
|
|
|
import json
|
2016-08-30 00:55:31 -04:00
|
|
|
import unittest
|
|
|
|
|
|
|
|
from mock import Mock, patch
|
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
from superset import db, security_manager
|
2017-10-24 17:58:15 -04:00
|
|
|
from superset.connectors.druid.models import (
|
2018-03-26 21:35:43 -04:00
|
|
|
DruidCluster, DruidColumn, DruidDatasource, DruidMetric,
|
2017-10-24 17:58:15 -04:00
|
|
|
)
|
2016-11-10 02:08:22 -05:00
|
|
|
from .base_tests import SupersetTestCase
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2017-11-10 20:52:34 -05:00
|
|
|
|
2017-09-25 21:00:46 -04:00
|
|
|
class PickableMock(Mock):
|
|
|
|
def __reduce__(self):
|
|
|
|
return (Mock, ())
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2017-11-10 20:52:34 -05:00
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
SEGMENT_METADATA = [{
|
2017-11-14 00:06:51 -05:00
|
|
|
'id': 'some_id',
|
|
|
|
'intervals': ['2013-05-13T00:00:00.000Z/2013-05-14T00:00:00.000Z'],
|
|
|
|
'columns': {
|
|
|
|
'__time': {
|
|
|
|
'type': 'LONG', 'hasMultipleValues': False,
|
2018-03-26 21:35:43 -04:00
|
|
|
'size': 407240380, 'cardinality': None, 'errorMessage': None,
|
|
|
|
},
|
2017-11-14 00:06:51 -05:00
|
|
|
'dim1': {
|
|
|
|
'type': 'STRING', 'hasMultipleValues': False,
|
2018-03-26 21:35:43 -04:00
|
|
|
'size': 100000, 'cardinality': 1944, 'errorMessage': None,
|
|
|
|
},
|
2017-11-14 00:06:51 -05:00
|
|
|
'dim2': {
|
|
|
|
'type': 'STRING', 'hasMultipleValues': True,
|
2018-03-26 21:35:43 -04:00
|
|
|
'size': 100000, 'cardinality': 1504, 'errorMessage': None,
|
|
|
|
},
|
2017-11-14 00:06:51 -05:00
|
|
|
'metric1': {
|
|
|
|
'type': 'FLOAT', 'hasMultipleValues': False,
|
2018-03-26 21:35:43 -04:00
|
|
|
'size': 100000, 'cardinality': None, 'errorMessage': None,
|
|
|
|
},
|
2017-11-10 15:06:22 -05:00
|
|
|
},
|
2017-11-14 00:06:51 -05:00
|
|
|
'aggregators': {
|
|
|
|
'metric1': {
|
|
|
|
'type': 'longSum',
|
|
|
|
'name': 'metric1',
|
2018-03-26 21:35:43 -04:00
|
|
|
'fieldName': 'metric1',
|
|
|
|
},
|
2017-11-10 15:06:22 -05:00
|
|
|
},
|
2017-11-14 00:06:51 -05:00
|
|
|
'size': 300000,
|
|
|
|
'numRows': 5000000,
|
2016-08-30 00:55:31 -04:00
|
|
|
}]
|
|
|
|
|
|
|
|
GB_RESULT_SET = [
|
2017-11-10 15:06:22 -05:00
|
|
|
{
|
2017-11-14 00:06:51 -05:00
|
|
|
'version': 'v1',
|
|
|
|
'timestamp': '2012-01-01T00:00:00.000Z',
|
|
|
|
'event': {
|
|
|
|
'dim1': 'Canada',
|
2018-04-18 01:26:21 -04:00
|
|
|
'dim2': 'boy',
|
2017-11-14 00:06:51 -05:00
|
|
|
'metric1': 12345678,
|
2017-11-10 15:06:22 -05:00
|
|
|
},
|
2017-11-08 00:32:45 -05:00
|
|
|
},
|
2017-11-10 15:06:22 -05:00
|
|
|
{
|
2017-11-14 00:06:51 -05:00
|
|
|
'version': 'v1',
|
|
|
|
'timestamp': '2012-01-01T00:00:00.000Z',
|
|
|
|
'event': {
|
|
|
|
'dim1': 'USA',
|
2018-04-18 01:26:21 -04:00
|
|
|
'dim2': 'girl',
|
2017-11-14 00:06:51 -05:00
|
|
|
'metric1': 12345678 / 2,
|
2017-11-10 15:06:22 -05:00
|
|
|
},
|
2017-11-08 00:32:45 -05:00
|
|
|
},
|
2016-08-30 00:55:31 -04:00
|
|
|
]
|
|
|
|
|
2018-05-21 14:50:04 -04:00
|
|
|
DruidCluster.get_druid_version = lambda _: '0.9.1'
|
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
|
2016-11-10 02:08:22 -05:00
|
|
|
class DruidTests(SupersetTestCase):
|
2016-08-30 00:55:31 -04:00
|
|
|
|
|
|
|
"""Testing interactions with Druid"""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(DruidTests, self).__init__(*args, **kwargs)
|
|
|
|
|
2018-02-28 13:12:17 -05:00
|
|
|
def get_test_cluster_obj(self):
|
|
|
|
return DruidCluster(
|
|
|
|
cluster_name='test_cluster',
|
|
|
|
coordinator_host='localhost',
|
|
|
|
coordinator_endpoint='druid/coordinator/v1/metadata',
|
|
|
|
coordinator_port=7979,
|
|
|
|
broker_host='localhost',
|
|
|
|
broker_port=7980,
|
2018-06-13 11:10:58 -04:00
|
|
|
broker_endpoint='druid/v2',
|
2018-02-28 13:12:17 -05:00
|
|
|
metadata_last_refreshed=datetime.now())
|
|
|
|
|
2018-03-26 21:35:43 -04:00
|
|
|
def get_cluster(self, PyDruid):
|
2016-08-30 00:55:31 -04:00
|
|
|
instance = PyDruid.return_value
|
|
|
|
instance.time_boundary.return_value = [
|
|
|
|
{'result': {'maxTime': '2016-01-01'}}]
|
|
|
|
instance.segment_metadata.return_value = SEGMENT_METADATA
|
|
|
|
|
|
|
|
cluster = (
|
|
|
|
db.session
|
|
|
|
.query(DruidCluster)
|
|
|
|
.filter_by(cluster_name='test_cluster')
|
|
|
|
.first()
|
|
|
|
)
|
|
|
|
if cluster:
|
|
|
|
db.session.delete(cluster)
|
|
|
|
db.session.commit()
|
|
|
|
|
2018-02-28 13:12:17 -05:00
|
|
|
cluster = self.get_test_cluster_obj()
|
2016-08-30 00:55:31 -04:00
|
|
|
|
|
|
|
db.session.add(cluster)
|
2017-09-25 21:00:46 -04:00
|
|
|
cluster.get_datasources = PickableMock(return_value=['test_datasource'])
|
2018-03-26 21:35:43 -04:00
|
|
|
|
|
|
|
return cluster
|
|
|
|
|
|
|
|
@patch('superset.connectors.druid.models.PyDruid')
|
|
|
|
def test_client(self, PyDruid):
|
|
|
|
self.login(username='admin')
|
|
|
|
cluster = self.get_cluster(PyDruid)
|
2016-08-30 00:55:31 -04:00
|
|
|
cluster.refresh_datasources()
|
2016-12-27 17:27:55 -05:00
|
|
|
cluster.refresh_datasources(merge_flag=True)
|
2016-08-30 00:55:31 -04:00
|
|
|
datasource_id = cluster.datasources[0].id
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
nres = [
|
|
|
|
list(v['event'].items()) + [('timestamp', v['timestamp'])]
|
|
|
|
for v in GB_RESULT_SET]
|
|
|
|
nres = [dict(v) for v in nres]
|
|
|
|
import pandas as pd
|
|
|
|
df = pd.DataFrame(nres)
|
2018-03-26 21:35:43 -04:00
|
|
|
instance = PyDruid.return_value
|
2016-08-30 00:55:31 -04:00
|
|
|
instance.export_pandas.return_value = df
|
|
|
|
instance.query_dict = {}
|
|
|
|
instance.query_builder.last_query.query_dict = {}
|
2016-09-29 12:12:43 -04:00
|
|
|
|
2018-04-07 15:55:05 -04:00
|
|
|
resp = self.get_resp(
|
|
|
|
'/superset/explore/druid/{}/'.format(datasource_id))
|
2017-11-14 00:06:51 -05:00
|
|
|
self.assertIn('test_datasource', resp)
|
2017-02-16 20:28:35 -05:00
|
|
|
form_data = {
|
|
|
|
'viz_type': 'table',
|
|
|
|
'granularity': 'one+day',
|
|
|
|
'druid_time_origin': '',
|
|
|
|
'since': '7+days+ago',
|
|
|
|
'until': 'now',
|
|
|
|
'row_limit': 5000,
|
|
|
|
'include_search': 'false',
|
|
|
|
'metrics': ['count'],
|
|
|
|
'groupby': ['dim1'],
|
|
|
|
'force': 'true',
|
|
|
|
}
|
2016-11-02 14:25:33 -04:00
|
|
|
# One groupby
|
2018-02-13 20:21:15 -05:00
|
|
|
url = ('/superset/explore_json/druid/{}/'.format(datasource_id))
|
|
|
|
resp = self.get_json_resp(url, {'form_data': json.dumps(form_data)})
|
2017-11-14 00:06:51 -05:00
|
|
|
self.assertEqual('Canada', resp['data']['records'][0]['dim1'])
|
2016-11-02 14:25:33 -04:00
|
|
|
|
2017-02-16 20:28:35 -05:00
|
|
|
form_data = {
|
|
|
|
'viz_type': 'table',
|
|
|
|
'granularity': 'one+day',
|
|
|
|
'druid_time_origin': '',
|
|
|
|
'since': '7+days+ago',
|
|
|
|
'until': 'now',
|
|
|
|
'row_limit': 5000,
|
|
|
|
'include_search': 'false',
|
|
|
|
'metrics': ['count'],
|
2018-04-18 01:26:21 -04:00
|
|
|
'groupby': ['dim1', 'dim2'],
|
2017-02-16 20:28:35 -05:00
|
|
|
'force': 'true',
|
|
|
|
}
|
2016-11-02 14:25:33 -04:00
|
|
|
# two groupby
|
2018-02-13 20:21:15 -05:00
|
|
|
url = ('/superset/explore_json/druid/{}/'.format(datasource_id))
|
|
|
|
resp = self.get_json_resp(url, {'form_data': json.dumps(form_data)})
|
2017-11-14 00:06:51 -05:00
|
|
|
self.assertEqual('Canada', resp['data']['records'][0]['dim1'])
|
2016-11-02 14:25:33 -04:00
|
|
|
|
2016-10-02 21:03:19 -04:00
|
|
|
def test_druid_sync_from_config(self):
|
2016-10-07 19:24:39 -04:00
|
|
|
CLUSTER_NAME = 'new_druid'
|
2016-10-02 21:03:19 -04:00
|
|
|
self.login()
|
2016-10-07 19:24:39 -04:00
|
|
|
cluster = self.get_or_create(
|
|
|
|
DruidCluster,
|
|
|
|
{'cluster_name': CLUSTER_NAME},
|
|
|
|
db.session)
|
|
|
|
|
|
|
|
db.session.merge(cluster)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
ds = (
|
|
|
|
db.session.query(DruidDatasource)
|
|
|
|
.filter_by(datasource_name='test_click')
|
|
|
|
.first()
|
|
|
|
)
|
|
|
|
if ds:
|
|
|
|
db.session.delete(ds)
|
2016-10-02 21:03:19 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
cfg = {
|
2017-11-14 00:06:51 -05:00
|
|
|
'user': 'admin',
|
|
|
|
'cluster': CLUSTER_NAME,
|
|
|
|
'config': {
|
|
|
|
'name': 'test_click',
|
|
|
|
'dimensions': ['affiliate_id', 'campaign', 'first_seen'],
|
|
|
|
'metrics_spec': [{'type': 'count', 'name': 'count'},
|
|
|
|
{'type': 'sum', 'name': 'sum'}],
|
|
|
|
'batch_ingestion': {
|
|
|
|
'sql': "SELECT * FROM clicks WHERE d='{{ ds }}'",
|
|
|
|
'ts_column': 'd',
|
|
|
|
'sources': [{
|
|
|
|
'table': 'clicks',
|
|
|
|
'partition': "d='{{ ds }}'",
|
2017-11-08 00:32:45 -05:00
|
|
|
}],
|
|
|
|
},
|
|
|
|
},
|
2016-10-02 21:03:19 -04:00
|
|
|
}
|
2017-11-10 20:52:34 -05:00
|
|
|
|
2016-10-07 19:24:39 -04:00
|
|
|
def check():
|
2016-11-10 02:08:22 -05:00
|
|
|
resp = self.client.post('/superset/sync_druid/', data=json.dumps(cfg))
|
2017-03-10 12:11:51 -05:00
|
|
|
druid_ds = (
|
|
|
|
db.session
|
|
|
|
.query(DruidDatasource)
|
2017-11-14 00:06:51 -05:00
|
|
|
.filter_by(datasource_name='test_click')
|
2017-03-10 12:11:51 -05:00
|
|
|
.one()
|
|
|
|
)
|
2016-10-07 19:24:39 -04:00
|
|
|
col_names = set([c.column_name for c in druid_ds.columns])
|
2017-11-14 00:06:51 -05:00
|
|
|
assert {'affiliate_id', 'campaign', 'first_seen'} == col_names
|
2016-10-07 19:24:39 -04:00
|
|
|
metric_names = {m.metric_name for m in druid_ds.metrics}
|
2017-11-14 00:06:51 -05:00
|
|
|
assert {'count', 'sum'} == metric_names
|
2016-10-07 19:24:39 -04:00
|
|
|
assert resp.status_code == 201
|
|
|
|
|
|
|
|
check()
|
|
|
|
# checking twice to make sure a second sync yields the same results
|
|
|
|
check()
|
2016-10-02 21:03:19 -04:00
|
|
|
|
2016-10-30 19:55:56 -04:00
|
|
|
# datasource exists, add new metrics and dimensions
|
2016-10-02 21:03:19 -04:00
|
|
|
cfg = {
|
2017-11-14 00:06:51 -05:00
|
|
|
'user': 'admin',
|
|
|
|
'cluster': CLUSTER_NAME,
|
|
|
|
'config': {
|
|
|
|
'name': 'test_click',
|
|
|
|
'dimensions': ['affiliate_id', 'second_seen'],
|
|
|
|
'metrics_spec': [
|
|
|
|
{'type': 'bla', 'name': 'sum'},
|
|
|
|
{'type': 'unique', 'name': 'unique'},
|
2016-10-02 21:03:19 -04:00
|
|
|
],
|
2017-11-08 00:32:45 -05:00
|
|
|
},
|
2016-10-02 21:03:19 -04:00
|
|
|
}
|
2016-11-10 02:08:22 -05:00
|
|
|
resp = self.client.post('/superset/sync_druid/', data=json.dumps(cfg))
|
2016-10-02 21:03:19 -04:00
|
|
|
druid_ds = db.session.query(DruidDatasource).filter_by(
|
2017-11-14 00:06:51 -05:00
|
|
|
datasource_name='test_click').one()
|
2016-10-02 21:03:19 -04:00
|
|
|
# columns and metrics are not deleted if config is changed as
|
|
|
|
# user could define his own dimensions / metrics and want to keep them
|
|
|
|
assert set([c.column_name for c in druid_ds.columns]) == set(
|
2017-11-14 00:06:51 -05:00
|
|
|
['affiliate_id', 'campaign', 'first_seen', 'second_seen'])
|
2016-10-02 21:03:19 -04:00
|
|
|
assert set([m.metric_name for m in druid_ds.metrics]) == set(
|
2017-11-14 00:06:51 -05:00
|
|
|
['count', 'sum', 'unique'])
|
2016-10-02 21:03:19 -04:00
|
|
|
# metric type will not be overridden, sum stays instead of bla
|
|
|
|
assert set([m.metric_type for m in druid_ds.metrics]) == set(
|
2017-11-14 00:06:51 -05:00
|
|
|
['longSum', 'sum', 'unique'])
|
2016-10-02 21:03:19 -04:00
|
|
|
assert resp.status_code == 201
|
|
|
|
|
|
|
|
def test_filter_druid_datasource(self):
|
2016-10-07 19:24:39 -04:00
|
|
|
CLUSTER_NAME = 'new_druid'
|
|
|
|
cluster = self.get_or_create(
|
|
|
|
DruidCluster,
|
|
|
|
{'cluster_name': CLUSTER_NAME},
|
|
|
|
db.session)
|
|
|
|
db.session.merge(cluster)
|
|
|
|
|
|
|
|
gamma_ds = self.get_or_create(
|
|
|
|
DruidDatasource, {'datasource_name': 'datasource_for_gamma'},
|
|
|
|
db.session)
|
|
|
|
gamma_ds.cluster = cluster
|
|
|
|
db.session.merge(gamma_ds)
|
|
|
|
|
|
|
|
no_gamma_ds = self.get_or_create(
|
|
|
|
DruidDatasource, {'datasource_name': 'datasource_not_for_gamma'},
|
|
|
|
db.session)
|
|
|
|
no_gamma_ds.cluster = cluster
|
|
|
|
db.session.merge(no_gamma_ds)
|
2016-10-02 21:03:19 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.merge_perm('datasource_access', gamma_ds.perm)
|
|
|
|
security_manager.merge_perm('datasource_access', no_gamma_ds.perm)
|
2016-12-15 08:38:34 -05:00
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
perm = security_manager.find_permission_view_menu(
|
2016-12-15 08:38:34 -05:00
|
|
|
'datasource_access', gamma_ds.get_perm())
|
2018-03-27 19:46:02 -04:00
|
|
|
security_manager.add_permission_role(security_manager.find_role('Gamma'), perm)
|
|
|
|
security_manager.get_session.commit()
|
2016-10-02 21:03:19 -04:00
|
|
|
|
|
|
|
self.login(username='gamma')
|
|
|
|
url = '/druiddatasourcemodelview/list/'
|
|
|
|
resp = self.get_resp(url)
|
2016-12-15 08:38:34 -05:00
|
|
|
self.assertIn('datasource_for_gamma', resp)
|
|
|
|
self.assertNotIn('datasource_not_for_gamma', resp)
|
2016-10-02 21:03:19 -04:00
|
|
|
|
2017-11-08 23:34:33 -05:00
|
|
|
@patch('superset.connectors.druid.models.PyDruid')
|
2017-05-22 19:41:32 -04:00
|
|
|
def test_sync_druid_perm(self, PyDruid):
|
|
|
|
self.login(username='admin')
|
|
|
|
instance = PyDruid.return_value
|
|
|
|
instance.time_boundary.return_value = [
|
|
|
|
{'result': {'maxTime': '2016-01-01'}}]
|
|
|
|
instance.segment_metadata.return_value = SEGMENT_METADATA
|
|
|
|
|
|
|
|
cluster = (
|
|
|
|
db.session
|
|
|
|
.query(DruidCluster)
|
|
|
|
.filter_by(cluster_name='test_cluster')
|
|
|
|
.first()
|
|
|
|
)
|
|
|
|
if cluster:
|
|
|
|
db.session.delete(cluster)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
cluster = DruidCluster(
|
|
|
|
cluster_name='test_cluster',
|
|
|
|
coordinator_host='localhost',
|
|
|
|
coordinator_port=7979,
|
|
|
|
broker_host='localhost',
|
|
|
|
broker_port=7980,
|
|
|
|
metadata_last_refreshed=datetime.now())
|
|
|
|
|
|
|
|
db.session.add(cluster)
|
2017-10-24 17:58:15 -04:00
|
|
|
cluster.get_datasources = PickableMock(
|
2017-11-08 00:32:45 -05:00
|
|
|
return_value=['test_datasource'],
|
2017-10-24 17:58:15 -04:00
|
|
|
)
|
2017-05-22 19:41:32 -04:00
|
|
|
|
|
|
|
cluster.refresh_datasources()
|
2017-09-25 21:00:46 -04:00
|
|
|
cluster.datasources[0].merge_flag = True
|
|
|
|
metadata = cluster.datasources[0].latest_metadata()
|
|
|
|
self.assertEqual(len(metadata), 4)
|
2017-05-22 19:41:32 -04:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
view_menu_name = cluster.datasources[0].get_perm()
|
2018-03-27 19:46:02 -04:00
|
|
|
view_menu = security_manager.find_view_menu(view_menu_name)
|
|
|
|
permission = security_manager.find_permission('datasource_access')
|
2017-05-22 19:41:32 -04:00
|
|
|
|
2018-03-27 19:46:02 -04:00
|
|
|
pv = security_manager.get_session.query(
|
|
|
|
security_manager.permissionview_model).filter_by(
|
2017-05-22 19:41:32 -04:00
|
|
|
permission=permission, view_menu=view_menu).first()
|
|
|
|
assert pv is not None
|
|
|
|
|
2018-03-26 21:35:43 -04:00
|
|
|
@patch('superset.connectors.druid.models.PyDruid')
|
|
|
|
def test_refresh_metadata(self, PyDruid):
|
|
|
|
self.login(username='admin')
|
|
|
|
cluster = self.get_cluster(PyDruid)
|
|
|
|
cluster.refresh_datasources()
|
2018-05-03 21:14:40 -04:00
|
|
|
datasource = cluster.datasources[0]
|
2018-03-26 21:35:43 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
cols = (
|
|
|
|
db.session.query(DruidColumn)
|
|
|
|
.filter(DruidColumn.datasource_id == datasource.id)
|
|
|
|
)
|
|
|
|
|
|
|
|
for col in cols:
|
|
|
|
self.assertIn(
|
|
|
|
col.column_name,
|
|
|
|
SEGMENT_METADATA[0]['columns'].keys(),
|
2018-03-26 21:35:43 -04:00
|
|
|
)
|
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
metrics = (
|
|
|
|
db.session.query(DruidMetric)
|
|
|
|
.filter(DruidMetric.datasource_id == datasource.id)
|
|
|
|
.filter(DruidMetric.metric_name.like('%__metric1'))
|
|
|
|
)
|
2018-03-26 21:35:43 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
self.assertEqual(
|
|
|
|
{metric.metric_name for metric in metrics},
|
|
|
|
{'max__metric1', 'min__metric1', 'sum__metric1'},
|
|
|
|
)
|
|
|
|
|
|
|
|
for metric in metrics:
|
|
|
|
agg, _ = metric.metric_name.split('__')
|
2018-03-26 21:35:43 -04:00
|
|
|
|
|
|
|
self.assertEqual(
|
2018-05-03 21:14:40 -04:00
|
|
|
json.loads(metric.json)['type'],
|
|
|
|
'double{}'.format(agg.capitalize()),
|
2018-03-26 21:35:43 -04:00
|
|
|
)
|
|
|
|
|
2018-04-04 20:16:53 -04:00
|
|
|
@patch('superset.connectors.druid.models.PyDruid')
|
|
|
|
def test_refresh_metadata_augment_type(self, PyDruid):
|
|
|
|
self.login(username='admin')
|
|
|
|
cluster = self.get_cluster(PyDruid)
|
|
|
|
cluster.refresh_datasources()
|
2018-03-26 21:35:43 -04:00
|
|
|
|
2018-04-04 20:16:53 -04:00
|
|
|
metadata = SEGMENT_METADATA[:]
|
|
|
|
metadata[0]['columns']['metric1']['type'] = 'LONG'
|
|
|
|
instance = PyDruid.return_value
|
|
|
|
instance.segment_metadata.return_value = metadata
|
|
|
|
cluster.refresh_datasources()
|
2018-05-03 21:14:40 -04:00
|
|
|
datasource = cluster.datasources[0]
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
column = (
|
|
|
|
db.session.query(DruidColumn)
|
|
|
|
.filter(DruidColumn.datasource_id == datasource.id)
|
|
|
|
.filter(DruidColumn.column_name == 'metric1')
|
|
|
|
).one()
|
2018-03-26 21:35:43 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
self.assertEqual(column.type, 'LONG')
|
2018-03-26 21:35:43 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
metrics = (
|
|
|
|
db.session.query(DruidMetric)
|
|
|
|
.filter(DruidMetric.datasource_id == datasource.id)
|
|
|
|
.filter(DruidMetric.metric_name.like('%__metric1'))
|
|
|
|
)
|
|
|
|
|
|
|
|
for metric in metrics:
|
|
|
|
agg, _ = metric.metric_name.split('__')
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
metric.json_obj['type'],
|
|
|
|
'long{}'.format(agg.capitalize()),
|
|
|
|
)
|
2018-03-26 21:35:43 -04:00
|
|
|
|
2018-04-04 20:16:53 -04:00
|
|
|
@patch('superset.connectors.druid.models.PyDruid')
|
|
|
|
def test_refresh_metadata_augment_verbose_name(self, PyDruid):
|
|
|
|
self.login(username='admin')
|
|
|
|
cluster = self.get_cluster(PyDruid)
|
|
|
|
cluster.refresh_datasources()
|
2018-05-03 21:14:40 -04:00
|
|
|
datasource = cluster.datasources[0]
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
metrics = (
|
|
|
|
db.session.query(DruidMetric)
|
|
|
|
.filter(DruidMetric.datasource_id == datasource.id)
|
|
|
|
.filter(DruidMetric.metric_name.like('%__metric1'))
|
|
|
|
)
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
for metric in metrics:
|
|
|
|
metric.verbose_name = metric.metric_name
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
db.session.commit()
|
2018-04-04 20:16:53 -04:00
|
|
|
|
|
|
|
# The verbose name should not change during a refresh.
|
|
|
|
cluster.refresh_datasources()
|
2018-05-03 21:14:40 -04:00
|
|
|
datasource = cluster.datasources[0]
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
metrics = (
|
|
|
|
db.session.query(DruidMetric)
|
|
|
|
.filter(DruidMetric.datasource_id == datasource.id)
|
|
|
|
.filter(DruidMetric.metric_name.like('%__metric1'))
|
|
|
|
)
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-05-03 21:14:40 -04:00
|
|
|
for metric in metrics:
|
|
|
|
self.assertEqual(metric.verbose_name, metric.metric_name)
|
2018-04-04 20:16:53 -04:00
|
|
|
|
2018-02-28 13:12:17 -05:00
|
|
|
def test_urls(self):
|
|
|
|
cluster = self.get_test_cluster_obj()
|
|
|
|
self.assertEquals(
|
|
|
|
cluster.get_base_url('localhost', '9999'), 'http://localhost:9999')
|
|
|
|
self.assertEquals(
|
|
|
|
cluster.get_base_url('http://localhost', '9999'),
|
|
|
|
'http://localhost:9999')
|
|
|
|
self.assertEquals(
|
|
|
|
cluster.get_base_url('https://localhost', '9999'),
|
|
|
|
'https://localhost:9999')
|
|
|
|
|
|
|
|
self.assertEquals(
|
2018-06-13 11:10:58 -04:00
|
|
|
cluster.get_base_broker_url(),
|
|
|
|
'http://localhost:7980/druid/v2')
|
2018-02-28 13:12:17 -05:00
|
|
|
|
2016-10-02 21:03:19 -04:00
|
|
|
|
2016-08-30 00:55:31 -04:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|