superset/panoramix/viz.py

1281 lines
37 KiB
Python
Raw Normal View History

2015-09-27 21:02:00 -04:00
from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta
2015-09-20 13:45:11 -04:00
import json
2015-09-15 15:33:26 -04:00
import uuid
2015-09-17 21:06:03 -04:00
from flask import flash, request, Markup
from markdown import markdown
from pandas.io.json import dumps
2015-09-30 00:41:49 -04:00
from werkzeug.datastructures import ImmutableMultiDict
2015-09-17 21:06:03 -04:00
from werkzeug.urls import Href
2015-09-05 01:14:07 -04:00
import numpy as np
2015-09-17 21:06:03 -04:00
import pandas as pd
2015-07-06 11:56:41 -04:00
2015-09-25 18:43:50 -04:00
from panoramix import app, utils
2015-09-30 00:41:49 -04:00
from panoramix.forms import FormFactory
2015-07-06 11:56:41 -04:00
2015-10-20 14:52:40 -04:00
from six import string_types
2015-09-25 18:43:50 -04:00
config = app.config
2015-08-03 16:37:56 -04:00
2015-07-06 11:56:41 -04:00
class BaseViz(object):
2015-09-30 16:58:32 -04:00
viz_type = None
2015-07-06 11:56:41 -04:00
verbose_name = "Base Viz"
is_timeseries = False
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'metrics', 'groupby',
)
},)
2015-09-13 22:07:54 -04:00
js_files = []
css_files = []
form_overrides = {}
2015-09-05 01:14:07 -04:00
2015-09-17 21:06:03 -04:00
def __init__(self, datasource, form_data):
2015-09-30 01:13:29 -04:00
self.orig_form_data = form_data
2015-09-29 01:38:57 -04:00
self.datasource = datasource
2015-09-30 00:41:49 -04:00
self.request = request
2015-09-30 01:13:29 -04:00
self.viz_type = form_data.get("viz_type")
2015-09-30 00:41:49 -04:00
2015-10-16 00:08:52 -04:00
# TODO refactor all form related logic out of here and into forms.py
2015-09-30 00:41:49 -04:00
ff = FormFactory(self)
form_class = ff.get_form()
defaults = form_class().data.copy()
previous_viz_type = form_data.get('previous_viz_type')
2015-09-30 00:41:49 -04:00
if isinstance(form_data, ImmutableMultiDict):
form = form_class(form_data)
else:
form = form_class(**form_data)
2015-10-04 10:36:27 -04:00
data = form.data.copy()
2015-10-03 01:54:12 -04:00
if not form.validate():
for k, v in form.errors.items():
2015-10-04 10:36:27 -04:00
if not data.get('json') and not data.get('async'):
2015-10-03 01:54:12 -04:00
flash("{}: {}".format(k, " ".join(v)), 'danger')
2015-10-16 00:08:52 -04:00
if previous_viz_type != self.viz_type:
data = {
k: form.data[k]
for k in form_data.keys()
if k in form.data}
2015-09-30 00:41:49 -04:00
defaults.update(data)
self.form_data = defaults
2015-12-14 19:38:54 -05:00
self.query = ""
2015-09-30 00:41:49 -04:00
2015-09-30 01:13:29 -04:00
self.form_data['previous_viz_type'] = self.viz_type
2015-09-28 13:32:56 -04:00
self.token = self.form_data.get(
'token', 'token_' + uuid.uuid4().hex[:8])
2015-09-17 21:06:03 -04:00
2015-09-30 00:41:49 -04:00
self.metrics = self.form_data.get('metrics') or []
2015-09-28 13:32:56 -04:00
self.groupby = self.form_data.get('groupby') or []
2015-09-30 01:13:29 -04:00
self.reassignments()
2015-09-17 21:06:03 -04:00
def get_form_override(self, fieldname, attr):
if (
fieldname in self.form_overrides and
attr in self.form_overrides[fieldname]):
s = self.form_overrides[fieldname][attr]
if attr == 'label':
s = '<label for="{fieldname}">{s}</label>'.format(**locals())
s = Markup(s)
return s
2015-12-06 23:57:38 -05:00
def fieldsetizer(self):
"""
Makes form_fields support either a list approach or a fieldsets
approach
"""
2015-12-08 02:22:48 -05:00
return self.fieldsets
2015-12-06 23:57:38 -05:00
2015-09-30 00:41:49 -04:00
@classmethod
def flat_form_fields(cls):
2015-12-08 02:22:48 -05:00
l = set()
for d in cls.fieldsets:
for obj in d['fields']:
if isinstance(obj, (tuple, list)):
l |= {a for a in obj}
2016-01-06 17:02:58 -05:00
elif obj:
2015-12-08 02:22:48 -05:00
l.add(obj)
2015-09-30 00:41:49 -04:00
return l
2015-09-30 01:13:29 -04:00
def reassignments(self):
pass
2015-09-17 21:06:03 -04:00
def get_url(self, **kwargs):
2015-09-30 20:34:55 -04:00
d = self.orig_form_data.copy()
2015-09-21 16:48:02 -04:00
if 'action' in d:
del d['action']
2015-09-17 21:06:03 -04:00
d.update(kwargs)
# Remove unchecked checkboxes because HTML is weird like that
for key in d.keys():
if d[key] == False:
del d[key]
2015-09-18 17:30:54 -04:00
href = Href(
2015-12-04 12:30:43 -05:00
'/panoramix/explore/{self.datasource.type}/'
2015-09-18 17:30:54 -04:00
'{self.datasource.id}/'.format(**locals()))
2015-09-17 21:06:03 -04:00
return href(d)
2015-07-17 03:09:47 -04:00
2015-10-03 01:54:12 -04:00
def get_df(self, query_obj=None):
if not query_obj:
query_obj = self.query_obj()
2015-08-13 21:08:04 -04:00
self.error_msg = ""
self.results = None
2015-09-15 12:17:59 -04:00
2015-10-03 01:54:12 -04:00
self.results = self.datasource.query(**query_obj)
2015-12-14 19:38:54 -05:00
self.query = self.results.query
2015-09-15 12:17:59 -04:00
df = self.results.df
2015-09-18 20:27:46 -04:00
if df is None or df.empty:
raise Exception("No data, review your incantations!")
else:
2015-09-15 12:17:59 -04:00
if 'timestamp' in df.columns:
df.timestamp = pd.to_datetime(df.timestamp, utc=False)
if self.datasource.offset:
df.timestamp += timedelta(hours=self.datasource.offset)
2016-01-11 19:01:54 -05:00
df = df.fillna(0)
2015-09-15 12:17:59 -04:00
return df
@property
def form(self):
2015-09-28 13:32:56 -04:00
return self.form_class(**self.form_data)
2015-08-13 21:08:04 -04:00
2015-09-15 12:17:59 -04:00
@property
2015-07-21 14:56:05 -04:00
def form_class(self):
2015-09-30 00:41:49 -04:00
return FormFactory(self).get_form()
2015-07-21 14:56:05 -04:00
2015-07-14 16:26:35 -04:00
def query_filters(self):
2015-09-28 13:32:56 -04:00
form_data = self.form_data
2015-07-06 11:56:41 -04:00
# Building filters
2015-08-06 01:42:42 -04:00
filters = []
2015-07-20 19:29:16 -04:00
for i in range(1, 10):
2015-09-28 13:32:56 -04:00
col = form_data.get("flt_col_" + str(i))
op = form_data.get("flt_op_" + str(i))
eq = form_data.get("flt_eq_" + str(i))
2015-07-06 11:56:41 -04:00
if col and op and eq:
2015-08-06 01:42:42 -04:00
filters.append((col, op, eq))
2015-12-22 18:52:15 -05:00
# Extra filters (coming from dashboard)
extra_filters = form_data.get('extra_filters', [])
if extra_filters:
extra_filters = json.loads(extra_filters)
2015-12-26 14:54:04 -05:00
for slice_id, slice_filters in extra_filters.items():
if slice_filters:
for col, vals in slice_filters:
if col and vals:
filters += [(col, 'in', ",".join(vals))]
2015-12-22 18:52:15 -05:00
2015-07-14 16:26:35 -04:00
return filters
2015-07-06 11:56:41 -04:00
2015-07-14 16:26:35 -04:00
def query_obj(self):
"""
Building a query object
"""
2015-09-28 13:32:56 -04:00
form_data = self.form_data
groupby = form_data.get("groupby") or []
metrics = form_data.get("metrics") or ['count']
granularity = form_data.get("granularity")
2015-09-28 13:32:56 -04:00
limit = int(form_data.get("limit", 0))
2015-08-13 00:22:02 -04:00
row_limit = int(
2015-09-28 13:32:56 -04:00
form_data.get("row_limit", config.get("ROW_LIMIT")))
since = form_data.get("since", "1 year ago")
from_dttm = utils.parse_human_datetime(since)
if from_dttm > datetime.now():
from_dttm = datetime.now() - (from_dttm-datetime.now())
2015-09-28 13:32:56 -04:00
until = form_data.get("until", "now")
2015-08-05 02:41:00 -04:00
to_dttm = utils.parse_human_datetime(until)
2015-12-22 18:35:06 -05:00
if from_dttm > to_dttm:
flash("The date range doesn't seem right.", "danger")
2015-09-15 15:33:26 -04:00
from_dttm = to_dttm # Making them identical to not raise
# extras are used to query elements specific to a datasource type
# for instance the extra where clause that applies only to Tables
extras = {
2015-11-24 14:37:20 -05:00
'where': form_data.get("where", ''),
'having': form_data.get("having", ''),
}
2015-07-14 16:26:35 -04:00
d = {
2015-08-05 02:41:00 -04:00
'granularity': granularity,
'from_dttm': from_dttm,
'to_dttm': to_dttm,
'is_timeseries': self.is_timeseries,
2015-08-05 02:41:00 -04:00
'groupby': groupby,
'metrics': metrics,
2015-08-13 00:22:02 -04:00
'row_limit': row_limit,
2015-08-06 01:42:42 -04:00
'filter': self.query_filters(),
'timeseries_limit': limit,
'extras': extras,
2015-07-14 16:26:35 -04:00
}
return d
2015-07-06 11:56:41 -04:00
def get_json(self):
payload = {
'data': json.loads(self.get_json_data()),
2015-12-14 19:38:54 -05:00
'query': self.query,
'form_data': self.form_data,
2015-12-24 11:39:33 -05:00
'json_endpoint': self.json_endpoint,
'csv_endpoint': self.csv_endpoint,
'standalone_endpoint': self.standalone_endpoint,
}
return json.dumps(payload)
2015-07-06 11:56:41 -04:00
2015-12-17 21:04:53 -05:00
def get_csv(self):
df = self.get_df()
return df.to_csv(index=False)
def get_json_data(self):
return json.dumps([])
2015-12-14 19:38:54 -05:00
@property
def json_endpoint(self):
return self.get_url(json="true")
2015-12-17 21:04:53 -05:00
@property
def csv_endpoint(self):
return self.get_url(csv="true")
2015-12-21 10:58:20 -05:00
@property
def standalone_endpoint(self):
return self.get_url(standalone="true")
2015-12-20 11:33:08 -05:00
@property
def data(self):
content = {
'viz_name': self.viz_type,
2015-12-14 19:38:54 -05:00
'json_endpoint': self.json_endpoint,
'csv_endpoint': self.csv_endpoint,
'standalone_endpoint': self.standalone_endpoint,
'token': self.token,
2015-12-14 19:38:54 -05:00
'form_data': self.form_data,
}
2015-12-20 11:33:08 -05:00
return content
@property
def json_data(self):
return dumps(self.data)
2015-07-06 11:56:41 -04:00
class TableViz(BaseViz):
2015-09-30 16:58:32 -04:00
viz_type = "table"
2015-07-06 11:56:41 -04:00
verbose_name = "Table View"
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
2015-12-18 17:42:44 -05:00
'row_limit',
('include_search', None),
2015-12-08 02:22:48 -05:00
)
2015-12-18 15:06:01 -05:00
},
{
'label': "GROUP BY",
'fields': (
'groupby',
'metrics',
)
},
{
'label': "NOT GROUPED BY",
'fields': (
'all_columns',
)
2015-12-08 02:22:48 -05:00
},)
2016-01-11 23:53:20 -05:00
css_files = [
'lib/dataTables/dataTables.bootstrap.css',
'widgets/viz_table.css',
]
2016-01-12 17:57:34 -05:00
is_timeseries = False
2015-10-04 22:36:38 -04:00
js_files = [
'lib/dataTables/jquery.dataTables.min.js',
2015-12-14 19:38:54 -05:00
'lib/dataTables/dataTables.bootstrap.js',
'widgets/viz_table.js',
]
2015-09-03 18:23:44 -04:00
def query_obj(self):
d = super(TableViz, self).query_obj()
2015-12-18 15:06:01 -05:00
fd = self.form_data
if fd.get('all_columns') and (fd.get('groupby') or fd.get('metrics')):
raise Exception(
"Choose either fields to [Group By] and [Metrics] or "
"[Columns], not both")
if fd.get('all_columns'):
d['columns'] = fd.get('all_columns')
d['groupby'] = []
2015-09-03 18:23:44 -04:00
return d
2015-09-15 12:17:59 -04:00
def get_df(self):
df = super(TableViz, self).get_df()
if (
self.form_data.get("granularity") == "all" and
'timestamp' in df):
del df['timestamp']
return df
2015-07-06 11:56:41 -04:00
2015-12-18 17:42:44 -05:00
def get_json_data(self):
df = self.get_df()
2015-12-24 11:39:33 -05:00
return json.dumps(
dict(
records=df.to_dict(orient="records"),
columns=list(df.columns),
),
default=utils.json_iso_dttm_ser,
)
2015-12-18 17:42:44 -05:00
2015-07-06 11:56:41 -04:00
2015-10-16 13:03:18 -04:00
class PivotTableViz(BaseViz):
viz_type = "pivot_table"
verbose_name = "Pivot Table"
2015-12-14 19:38:54 -05:00
css_files = [
'lib/dataTables/dataTables.bootstrap.css',
'widgets/viz_pivot_table.css']
2015-10-16 13:03:18 -04:00
is_timeseries = False
js_files = [
'lib/dataTables/jquery.dataTables.min.js',
2015-12-14 19:38:54 -05:00
'lib/dataTables/dataTables.bootstrap.js',
'widgets/viz_pivot_table.js']
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'groupby',
'columns',
'metrics',
'pandas_aggfunc',
)
},)
2015-10-16 13:03:18 -04:00
def query_obj(self):
d = super(PivotTableViz, self).query_obj()
groupby = self.form_data.get('groupby')
columns = self.form_data.get('columns')
metrics = self.form_data.get('metrics')
2015-12-06 23:57:38 -05:00
if not columns:
columns = []
if not groupby:
groupby = []
if not groupby:
2015-10-16 13:03:18 -04:00
raise Exception("Please choose at least one \"Group by\" field ")
if not metrics:
2015-10-16 13:03:18 -04:00
raise Exception("Please choose at least one metric")
if (
any(v in groupby for v in columns) or
any(v in columns for v in groupby)):
raise Exception("groupby and columns can't overlap")
2015-12-06 23:57:38 -05:00
d['groupby'] = list(set(groupby) | set(columns))
2015-10-16 13:03:18 -04:00
return d
def get_df(self):
df = super(PivotTableViz, self).get_df()
if (
self.form_data.get("granularity") == "all" and
'timestamp' in df):
del df['timestamp']
df = df.pivot_table(
index=self.form_data.get('groupby'),
columns=self.form_data.get('columns'),
values=self.form_data.get('metrics'),
aggfunc=self.form_data.get('pandas_aggfunc'),
margins=True,
)
return df
2015-12-18 15:06:01 -05:00
def get_json_data(self):
return dumps(self.get_df().to_html(
na_rep='',
classes=(
"dataframe table table-striped table-bordered "
"table-condensed table-hover")))
2015-10-16 13:03:18 -04:00
class MarkupViz(BaseViz):
2015-09-30 16:58:32 -04:00
viz_type = "markup"
verbose_name = "Markup Widget"
2015-12-14 19:38:54 -05:00
js_files = ['widgets/viz_markup.js']
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': ('markup_type', 'code')
2015-12-08 02:22:48 -05:00
},)
is_timeseries = False
def rendered(self):
markup_type = self.form_data.get("markup_type")
code = self.form_data.get("code", '')
if markup_type == "markdown":
return markdown(code)
elif markup_type == "html":
return code
2015-12-21 00:49:59 -05:00
def get_json_data(self):
return dumps(dict(html=self.rendered()))
2015-09-22 20:58:44 -04:00
class WordCloudViz(BaseViz):
2015-09-22 21:00:01 -04:00
"""
Integration with the nice library at:
https://github.com/jasondavies/d3-cloud
"""
2015-09-30 16:58:32 -04:00
viz_type = "word_cloud"
2015-09-22 20:58:44 -04:00
verbose_name = "Word Cloud"
is_timeseries = False
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity',
2015-12-08 02:22:48 -05:00
('since', 'until'),
'series', 'metric', 'limit',
2015-12-08 02:22:48 -05:00
('size_from', 'size_to'),
'rotation',
)
},)
js_files = [
2015-10-04 22:36:38 -04:00
'lib/d3.layout.cloud.js',
'widgets/viz_wordcloud.js',
]
2015-09-22 20:58:44 -04:00
2015-09-22 21:00:01 -04:00
def query_obj(self):
d = super(WordCloudViz, self).query_obj()
2016-01-11 17:42:45 -05:00
2015-09-28 13:32:56 -04:00
d['metrics'] = [self.form_data.get('metric')]
d['groupby'] = [self.form_data.get('series')]
2015-09-22 21:00:01 -04:00
return d
def get_json_data(self):
2015-09-22 21:00:01 -04:00
df = self.get_df()
# Ordering the columns
df = df[[self.form_data.get('series'), self.form_data.get('metric')]]
# Labeling the columns for uniform json schema
2015-09-22 21:00:01 -04:00
df.columns = ['text', 'size']
return df.to_json(orient="records")
2015-09-22 20:58:44 -04:00
2015-09-26 18:53:37 -04:00
class NVD3Viz(BaseViz):
2015-09-30 16:58:32 -04:00
viz_type = None
2015-09-26 18:53:37 -04:00
verbose_name = "Base NVD3 Viz"
is_timeseries = False
js_files = [
2015-10-04 22:36:38 -04:00
'lib/nvd3/nv.d3.min.js',
'widgets/viz_nvd3.js',
]
2015-10-03 01:54:12 -04:00
css_files = [
2015-10-04 22:36:38 -04:00
'lib/nvd3/nv.d3.css',
2015-10-03 01:54:12 -04:00
'widgets/viz_nvd3.css',
]
2015-09-26 18:53:37 -04:00
2015-09-27 21:02:00 -04:00
class BubbleViz(NVD3Viz):
2015-09-30 16:58:32 -04:00
viz_type = "bubble"
2015-08-13 21:08:04 -04:00
verbose_name = "Bubble Chart"
is_timeseries = False
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
2015-12-25 01:54:42 -05:00
'granularity',
2015-12-08 02:22:48 -05:00
('since', 'until'),
2015-12-25 01:54:42 -05:00
'series', 'entity',
'x', 'y',
'size', 'limit',
)
},
{
'label': 'Chart Options',
'fields': (
2015-12-08 02:22:48 -05:00
('x_log_scale', 'y_log_scale'),
('show_legend', None),
2015-12-25 01:54:42 -05:00
'max_bubble_size',
2015-12-08 02:22:48 -05:00
)
},)
2015-08-13 21:08:04 -04:00
def query_obj(self):
2015-09-28 13:32:56 -04:00
form_data = self.form_data
2015-08-13 21:08:04 -04:00
d = super(BubbleViz, self).query_obj()
2015-08-13 21:08:04 -04:00
d['groupby'] = list({
2015-09-28 13:32:56 -04:00
form_data.get('series'),
form_data.get('entity')
2015-09-17 21:06:03 -04:00
})
2015-09-28 13:32:56 -04:00
self.x_metric = form_data.get('x')
self.y_metric = form_data.get('y')
self.z_metric = form_data.get('size')
self.entity = form_data.get('entity')
self.series = form_data.get('series')
2015-09-30 20:34:55 -04:00
2015-08-13 21:08:04 -04:00
d['metrics'] = [
2015-08-14 01:04:43 -04:00
self.z_metric,
2015-08-13 21:08:04 -04:00
self.x_metric,
self.y_metric,
]
if not all(d['metrics'] + [self.entity, self.series]):
raise Exception("Pick a metric for x, y and size")
return d
2015-09-15 15:33:26 -04:00
def get_df(self):
df = super(BubbleViz, self).get_df()
df = df.fillna(0)
df['x'] = df[[self.x_metric]]
df['y'] = df[[self.y_metric]]
2015-09-27 21:02:00 -04:00
df['size'] = df[[self.z_metric]]
df['shape'] = 'circle'
df['group'] = df[[self.series]]
2015-09-15 15:33:26 -04:00
return df
def get_json_data(self):
2015-09-15 15:33:26 -04:00
df = self.get_df()
2015-09-27 21:02:00 -04:00
series = defaultdict(list)
for row in df.to_dict(orient='records'):
series[row['group']].append(row)
2015-09-30 20:34:55 -04:00
chart_data = []
2015-09-27 21:02:00 -04:00
for k, v in series.items():
2015-09-30 20:34:55 -04:00
chart_data.append({
2015-09-27 21:02:00 -04:00
'key': k,
'values': v })
2016-01-13 11:50:55 -05:00
return dumps(chart_data)
2015-08-13 21:08:04 -04:00
2015-09-20 13:45:11 -04:00
class BigNumberViz(BaseViz):
2015-09-30 16:58:32 -04:00
viz_type = "big_number"
2015-09-20 13:45:11 -04:00
verbose_name = "Big Number"
is_timeseries = True
js_files = [
'widgets/viz_bignumber.js',
]
css_files = [
'widgets/viz_bignumber.css',
]
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'metric',
'compare_lag',
'compare_suffix',
2016-01-16 03:05:55 -05:00
'y_axis_format',
2015-12-08 02:22:48 -05:00
)
},)
2016-01-16 10:17:26 -05:00
form_overrides = {
'y_axis_format': {
'label': 'Number format',
}
}
2015-09-20 13:45:11 -04:00
2015-09-30 01:13:29 -04:00
def reassignments(self):
metric = self.form_data.get('metric')
if not metric:
self.form_data['metric'] = self.orig_form_data.get('metrics')
2015-09-20 13:45:11 -04:00
def query_obj(self):
d = super(BigNumberViz, self).query_obj()
2015-09-28 13:32:56 -04:00
metric = self.form_data.get('metric')
2015-09-20 13:45:11 -04:00
if not metric:
raise Exception("Pick a metric!")
2015-09-28 13:32:56 -04:00
d['metrics'] = [self.form_data.get('metric')]
2015-09-30 01:13:29 -04:00
self.form_data['metric'] = metric
2015-09-20 13:45:11 -04:00
return d
def get_json_data(self):
2015-09-28 13:32:56 -04:00
form_data = self.form_data
2015-09-20 13:45:11 -04:00
df = self.get_df()
df = df.sort(columns=df.columns[0])
2015-09-28 13:32:56 -04:00
compare_lag = form_data.get("compare_lag", "")
2015-10-13 00:49:28 -04:00
compare_lag = int(compare_lag) if compare_lag and compare_lag.isdigit() else 0
2015-09-20 13:45:11 -04:00
d = {
'data': df.values.tolist(),
'compare_lag': compare_lag,
2015-09-28 13:32:56 -04:00
'compare_suffix': form_data.get('compare_suffix', ''),
2015-09-20 13:45:11 -04:00
}
2016-01-16 03:05:55 -05:00
return dumps(d)
2015-09-20 13:45:11 -04:00
2015-08-13 21:08:04 -04:00
2015-09-26 18:53:37 -04:00
class NVD3TimeSeriesViz(NVD3Viz):
2015-09-30 16:58:32 -04:00
viz_type = "line"
2015-09-27 21:21:50 -04:00
verbose_name = "Time Series - Line Chart"
2015-09-28 03:51:39 -04:00
sort_series = False
is_timeseries = True
2015-12-06 23:57:38 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity', ('since', 'until'),
'metrics',
'groupby', 'limit',
),
}, {
'label': 'Chart Options',
'fields': (
('show_brush', 'show_legend'),
('rich_tooltip', 'y_axis_zero'),
('y_log_scale', 'contribution'),
2016-01-16 17:01:05 -05:00
('x_axis_format', 'y_axis_format'),
('line_interpolation', 'x_axis_showminmax'),
2015-12-06 23:57:38 -05:00
),
}, {
'label': 'Advanced Analytics',
'description': (
"This section contains options "
"that allow for advanced analytical post processing "
"of query results"),
2015-12-06 23:57:38 -05:00
'fields': (
('rolling_type', 'rolling_periods'),
'time_compare',
'num_period_compare',
2016-01-06 17:02:58 -05:00
None,
('resample_how', 'resample_rule',), 'resample_fillmethod'
2015-12-06 23:57:38 -05:00
),
},
)
2015-09-26 18:53:37 -04:00
2015-10-03 01:54:12 -04:00
def get_df(self, query_obj=None):
2015-09-28 13:32:56 -04:00
form_data = self.form_data
2015-10-03 01:54:12 -04:00
df = super(NVD3TimeSeriesViz, self).get_df(query_obj)
2015-09-28 03:51:39 -04:00
df = df.fillna(0)
2015-09-30 00:41:49 -04:00
if form_data.get("granularity") == "all":
raise Exception("Pick a time granularity for your time series")
2015-09-26 18:53:37 -04:00
df = df.pivot_table(
index="timestamp",
2015-10-03 01:54:12 -04:00
columns=form_data.get('groupby'),
values=form_data.get('metrics'))
2015-09-26 18:53:37 -04:00
fm = form_data.get("resample_fillmethod")
if not fm:
fm = None
how = form_data.get("resample_how")
rule = form_data.get("resample_rule")
if how and rule:
df = df.resample(rule, how=how, fill_method=fm)
if not fm:
df = df.fillna(0)
2015-09-28 03:51:39 -04:00
if self.sort_series:
dfs = df.sum()
dfs.sort(ascending=False)
df = df[dfs.index]
2015-10-03 01:54:12 -04:00
if form_data.get("contribution"):
2015-09-28 03:51:39 -04:00
dft = df.T
df = (dft / dft.sum()).T
2015-10-03 01:54:12 -04:00
num_period_compare = form_data.get("num_period_compare")
2015-09-28 11:41:39 -04:00
if num_period_compare:
num_period_compare = int(num_period_compare)
df = (df / df.shift(num_period_compare)) - 1
2015-09-28 11:41:39 -04:00
df = df[num_period_compare:]
2015-09-28 13:32:56 -04:00
rolling_periods = form_data.get("rolling_periods")
rolling_type = form_data.get("rolling_type")
2015-11-30 15:38:09 -05:00
if rolling_type in ('mean', 'std', 'sum') and rolling_periods:
2015-09-26 18:53:37 -04:00
if rolling_type == 'mean':
2015-11-30 15:38:09 -05:00
df = pd.rolling_mean(df, int(rolling_periods), min_periods=0)
2015-09-26 18:53:37 -04:00
elif rolling_type == 'std':
2015-11-30 15:38:09 -05:00
df = pd.rolling_std(df, int(rolling_periods), min_periods=0)
2015-09-26 18:53:37 -04:00
elif rolling_type == 'sum':
2015-11-30 15:38:09 -05:00
df = pd.rolling_sum(df, int(rolling_periods), min_periods=0)
elif rolling_type == 'cumsum':
df = df.cumsum()
2015-09-26 18:53:37 -04:00
return df
2015-10-03 01:54:12 -04:00
def to_series(self, df, classed='', title_suffix=''):
2015-09-26 18:53:37 -04:00
series = df.to_dict('series')
2015-11-30 15:38:09 -05:00
chart_data = []
2015-09-28 03:51:39 -04:00
for name in df.T.index.tolist():
ys = series[name]
2015-09-26 18:53:37 -04:00
if df[name].dtype.kind not in "biufc":
continue
df['timestamp'] = pd.to_datetime(df.index, utc=False)
2015-10-20 14:52:40 -04:00
if isinstance(name, string_types):
2015-09-26 18:53:37 -04:00
series_title = name
else:
name = ["{}".format(s) for s in name]
2015-09-30 00:41:49 -04:00
if len(self.form_data.get('metrics')) > 1:
series_title = ", ".join(name)
else:
series_title = ", ".join(name[1:])
2015-10-03 01:54:12 -04:00
if title_suffix:
series_title += title_suffix
2015-09-26 18:53:37 -04:00
d = {
"key": series_title,
2015-10-03 01:54:12 -04:00
"classed": classed,
2015-09-26 18:53:37 -04:00
"values": [
2015-11-30 15:38:09 -05:00
{'x': ds, 'y': ys[ds]}
2015-09-26 18:53:37 -04:00
for i, ds in enumerate(df.timestamp)]
}
chart_data.append(d)
2015-10-03 01:54:12 -04:00
return chart_data
def get_json_data(self):
df = self.get_df()
chart_data = self.to_series(df)
time_compare = self.form_data.get('time_compare')
if time_compare:
query_object = self.query_obj()
delta = utils.parse_human_timedelta(time_compare)
query_object['inner_from_dttm'] = query_object['from_dttm']
query_object['inner_to_dttm'] = query_object['to_dttm']
query_object['from_dttm'] -= delta
query_object['to_dttm'] -= delta
2015-10-04 10:14:30 -04:00
2015-10-03 01:54:12 -04:00
df2 = self.get_df(query_object)
df2.index += delta
chart_data += self.to_series(
df2, classed='dashed', title_suffix="---")
chart_data = sorted(chart_data, key=lambda x: x['key'])
2016-01-13 11:50:55 -05:00
return dumps(chart_data)
2015-09-26 18:53:37 -04:00
class NVD3TimeSeriesBarViz(NVD3TimeSeriesViz):
2015-09-30 16:58:32 -04:00
viz_type = "bar"
2016-01-10 23:53:57 -05:00
sort_series = True
2015-09-27 21:21:50 -04:00
verbose_name = "Time Series - Bar Chart"
fieldsets = [NVD3TimeSeriesViz.fieldsets[0]] + [{
'label': 'Chart Options',
'fields': (
('show_brush', 'show_legend'),
('rich_tooltip', 'y_axis_zero'),
('y_log_scale', 'contribution'),
2016-01-16 17:01:05 -05:00
('x_axis_format', 'y_axis_format'),
('line_interpolation', 'bar_stacked'),
2016-01-16 17:01:05 -05:00
('x_axis_showminmax', None),
), }] + [NVD3TimeSeriesViz.fieldsets[2]]
2015-09-26 18:53:37 -04:00
2015-09-27 21:21:50 -04:00
class NVD3CompareTimeSeriesViz(NVD3TimeSeriesViz):
2015-09-30 16:58:32 -04:00
viz_type = 'compare'
2015-09-27 21:21:50 -04:00
verbose_name = "Time Series - Percent Change"
2015-09-27 11:52:26 -04:00
class NVD3TimeSeriesStackedViz(NVD3TimeSeriesViz):
2015-09-30 16:58:32 -04:00
viz_type = "area"
2015-09-27 21:21:50 -04:00
verbose_name = "Time Series - Stacked"
2015-09-28 03:51:39 -04:00
sort_series = True
fieldsets = [NVD3TimeSeriesViz.fieldsets[0]] + [{
'label': 'Chart Options',
'fields': (
('show_brush', 'show_legend'),
('rich_tooltip', 'y_axis_zero'),
('y_log_scale', 'contribution'),
2016-01-16 17:01:05 -05:00
('x_axis_format', 'y_axis_format'),
('x_axis_showminmax'),
('line_interpolation', 'stacked_style'),
), }] + [NVD3TimeSeriesViz.fieldsets[2]]
2015-09-27 11:52:26 -04:00
class DistributionPieViz(NVD3Viz):
2015-09-30 16:58:32 -04:00
viz_type = "pie"
2015-09-27 11:52:26 -04:00
verbose_name = "Distribution - NVD3 - Pie Chart"
is_timeseries = False
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
2016-01-11 18:38:17 -05:00
'granularity',
2015-12-08 02:22:48 -05:00
('since', 'until'),
'metrics', 'groupby',
'limit',
('donut', 'show_legend'),
)
},)
2015-07-14 16:26:35 -04:00
def query_obj(self):
d = super(DistributionPieViz, self).query_obj()
d['is_timeseries'] = False
2015-07-14 16:26:35 -04:00
return d
2015-09-15 12:17:59 -04:00
def get_df(self):
df = super(DistributionPieViz, self).get_df()
2015-07-14 16:26:35 -04:00
df = df.pivot_table(
2015-07-17 03:09:47 -04:00
index=self.groupby,
values=[self.metrics[0]])
df = df.sort(self.metrics[0], ascending=False)
2015-09-15 12:17:59 -04:00
return df
def get_json_data(self):
2015-09-15 12:17:59 -04:00
df = self.get_df()
2015-09-27 11:52:26 -04:00
df = df.reset_index()
df.columns = ['x', 'y']
2016-01-13 11:50:55 -05:00
return dumps(df.to_dict(orient="records"))
2015-09-15 12:17:59 -04:00
class DistributionBarViz(DistributionPieViz):
2015-09-30 16:58:32 -04:00
viz_type = "dist_bar"
2015-09-15 12:17:59 -04:00
verbose_name = "Distribution - Bar Chart"
is_timeseries = False
2015-12-08 02:22:48 -05:00
fieldsets = (
{
'label': None,
'fields': (
2016-01-11 18:38:17 -05:00
'granularity',
2015-12-08 02:22:48 -05:00
('since', 'until'),
2016-01-11 18:38:17 -05:00
'metrics', 'groupby',
2015-12-08 02:22:48 -05:00
'limit',
('show_legend', 'bar_stacked'),
2015-12-08 02:22:48 -05:00
)
},)
2015-09-15 12:17:59 -04:00
2015-09-27 11:52:26 -04:00
def get_df(self):
2015-09-28 03:51:39 -04:00
df = super(DistributionPieViz, self).get_df()
2015-09-27 11:52:26 -04:00
df = df.pivot_table(
index=self.groupby,
values=self.metrics)
df = df.sort(self.metrics[0], ascending=False)
return df
def get_json_data(self):
2015-09-27 11:52:26 -04:00
df = self.get_df()
series = df.to_dict('series')
2015-09-30 16:20:13 -04:00
chart_data = []
2015-09-27 11:52:26 -04:00
for name, ys in series.items():
if df[name].dtype.kind not in "biufc":
continue
df['timestamp'] = pd.to_datetime(df.index, utc=False)
2015-10-20 14:52:40 -04:00
if isinstance(name, string_types):
2015-09-27 11:52:26 -04:00
series_title = name
elif len(self.metrics) > 1:
series_title = ", ".join(name)
else:
series_title = ", ".join(name[1:])
d = {
"key": series_title,
"values": [
{'x': ds, 'y': ys[i]}
for i, ds in enumerate(df.timestamp)]
}
2015-09-30 16:20:13 -04:00
chart_data.append(d)
2016-01-13 11:50:55 -05:00
return dumps(chart_data)
2015-09-27 11:52:26 -04:00
2015-07-14 16:26:35 -04:00
2015-12-09 16:57:14 -05:00
class SunburstViz(BaseViz):
viz_type = "sunburst"
verbose_name = "Sunburst"
is_timeseries = False
js_files = [
'widgets/viz_sunburst.js']
css_files = ['widgets/viz_sunburst.css']
fieldsets = (
{
'label': None,
'fields': (
'granularity',
2015-12-09 16:57:14 -05:00
('since', 'until'),
'groupby',
'metric', 'secondary_metric',
2015-12-10 01:37:49 -05:00
'row_limit',
2015-12-09 16:57:14 -05:00
)
},)
form_overrides = {
'metric': {
'label': 'Primary Metric',
'description': (
"The primary metric is used to "
"define the arc segment sizes"),
},
'secondary_metric': {
'label': 'Secondary Metric',
'description': (
"This secondary metric is used to "
"define the color as a ratio against the primary metric"),
},
'groupby': {
'label': 'Hierarchy',
'description': "This defines the level of the hierarchy",
},
}
2015-12-09 16:57:14 -05:00
def get_df(self):
df = super(SunburstViz, self).get_df()
return df
def get_json_data(self):
df = self.get_df()
2015-12-09 21:11:45 -05:00
# if m1 == m2 duplicate the metric column
cols = self.form_data.get('groupby')
metric = self.form_data.get('metric')
secondary_metric = self.form_data.get('secondary_metric')
if metric == secondary_metric:
ndf = df[cols]
ndf['m1'] = df[metric]
ndf['m2'] = df[metric]
else:
cols += [
self.form_data['metric'], self.form_data['secondary_metric']]
ndf = df[cols]
return ndf.to_json(orient="values")
2015-12-09 16:57:14 -05:00
def query_obj(self):
qry = super(SunburstViz, self).query_obj()
qry['metrics'] = [
self.form_data['metric'], self.form_data['secondary_metric']]
return qry
2015-12-10 01:37:08 -05:00
class SankeyViz(BaseViz):
viz_type = "sankey"
verbose_name = "Sankey"
is_timeseries = False
js_files = [
'lib/d3-sankey.js',
'widgets/viz_sankey.js']
css_files = ['widgets/viz_sankey.css']
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'groupby',
'metric',
'row_limit',
)
},)
2015-12-20 16:54:40 -05:00
form_overrides = {
'groupby': {
'label': 'Source / Target',
'description': "Choose a source and a target",
},
}
2015-12-10 01:37:08 -05:00
def query_obj(self):
qry = super(SankeyViz, self).query_obj()
2015-12-20 16:54:40 -05:00
if len(qry['groupby']) != 2:
raise Exception("Pick exactly 2 columns as [Source / Target]")
2015-12-10 01:37:08 -05:00
qry['metrics'] = [
self.form_data['metric']]
return qry
def get_json_data(self):
df = self.get_df()
df.columns = ['source', 'target', 'value']
d = df.to_dict(orient='records')
return dumps(d)
2015-12-15 16:59:31 -05:00
class DirectedForceViz(BaseViz):
viz_type = "directed_force"
verbose_name = "Directed Force Layout"
is_timeseries = False
js_files = [
'widgets/viz_directed_force.js']
css_files = ['widgets/viz_directed_force.css']
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'groupby',
'metric',
'row_limit',
)
},
{
'label': 'Force Layout',
'fields': (
'link_length',
'charge',
)
},)
form_overrides = {
'groupby': {
'label': 'Source / Target',
'description': "Choose a source and a target",
},
}
def query_obj(self):
qry = super(DirectedForceViz, self).query_obj()
if len(self.form_data['groupby']) != 2:
raise Exception("Pick exactly 2 columns to 'Group By'")
qry['metrics'] = [self.form_data['metric']]
return qry
def get_json_data(self):
df = self.get_df()
df.columns = ['source', 'target', 'value']
d = df.to_dict(orient='records')
return dumps(d)
2015-12-16 14:28:27 -05:00
class WorldMapViz(BaseViz):
viz_type = "world_map"
verbose_name = "World Map"
is_timeseries = False
js_files = [
'lib/topojson.min.js',
'lib/datamaps.all.js',
'widgets/viz_world_map.js']
css_files = ['widgets/viz_world_map.css']
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'entity',
'country_fieldtype',
'metric',
)
},
{
'label': 'Bubbles',
'fields': (
('show_bubbles', None),
'secondary_metric',
2015-12-16 23:45:09 -05:00
'max_bubble_size',
2015-12-16 14:28:27 -05:00
)
})
form_overrides = {
'entity': {
'label': 'Country Field',
'description': "3 letter code of the country",
},
'metric': {
'label': 'Metric for color',
'description': ("Metric that defines the color of the country"),
},
'secondary_metric': {
'label': 'Bubble size',
'description': ("Metric that defines the size of the bubble"),
},
}
def query_obj(self):
qry = super(WorldMapViz, self).query_obj()
qry['metrics'] = [
self.form_data['metric'], self.form_data['secondary_metric']]
qry['groupby'] = [self.form_data['entity']]
return qry
def get_json_data(self):
from panoramix.data import countries
df = self.get_df()
cols = [self.form_data.get('entity')]
metric = self.form_data.get('metric')
secondary_metric = self.form_data.get('secondary_metric')
if metric == secondary_metric:
ndf = df[cols]
2015-12-16 23:45:09 -05:00
ndf['m1'] = df[metric]
ndf['m2'] = df[metric]
2015-12-16 14:28:27 -05:00
else:
cols += [metric, secondary_metric]
ndf = df[cols]
df = ndf
2015-12-16 23:45:09 -05:00
df.columns = ['country', 'm1', 'm2']
2015-12-16 14:28:27 -05:00
d = df.to_dict(orient='records')
for row in d:
country = countries.get(
self.form_data.get('country_fieldtype'), row['country'])
if country:
row['country'] = country['cca3']
row['latitude'] = country['lat']
row['longitude'] = country['lng']
2015-12-16 23:45:09 -05:00
row['name'] = country['name']
2015-12-16 14:28:27 -05:00
else:
row['country'] = "XXX"
return dumps(d)
2015-12-26 14:54:04 -05:00
class FilterBoxViz(BaseViz):
viz_type = "filter_box"
verbose_name = "Filters"
is_timeseries = False
js_files = [
'widgets/viz_filter_box.js']
2015-12-27 12:46:07 -05:00
css_files = [
'widgets/viz_filter_box.css']
2015-12-26 14:54:04 -05:00
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'groupby',
'metric',
)
},)
form_overrides = {
'groupby': {
'label': 'Filter fields',
'description': "The fields you want to filter on",
},
}
def query_obj(self):
qry = super(FilterBoxViz, self).query_obj()
groupby = self.form_data['groupby']
if len(groupby) < 1:
raise Exception("Pick at least one filter field")
qry['metrics'] = [
self.form_data['metric']]
return qry
def get_df(self):
qry = self.query_obj()
filters = [g for g in qry['groupby']]
d = {}
for flt in filters:
qry['groupby'] = [flt]
df = super(FilterBoxViz, self).get_df(qry)
2015-12-27 12:46:07 -05:00
d[flt] = [
{'id': row[0],
'text': row[0],
'filter': flt,
'metric': row[1]}
for row in df.itertuples(index=False)]
2015-12-26 14:54:04 -05:00
return d
def get_json_data(self):
d = self.get_df()
return dumps(d)
2016-01-19 03:38:15 -05:00
class IFrameViz(BaseViz):
viz_type = "iframe"
verbose_name = "iFrame"
is_timeseries = False
js_files = ['widgets/viz_iframe.js']
fieldsets = (
{
'label': None,
'fields': ('url',)
},)
2016-01-21 11:20:53 -05:00
class ParallelCoordinatesViz(BaseViz):
viz_type = "para"
verbose_name = "Parallel Coordinates"
is_timeseries = False
js_files = [
'lib/para/d3.parcoords.js',
'lib/para/divgrid.js',
'widgets/viz_para.js']
css_files = ['lib/para/d3.parcoords.css']
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
'series',
'metrics',
'secondary_metric',
'limit',
('show_datatable', None),
)
},)
def query_obj(self):
d = super(ParallelCoordinatesViz, self).query_obj()
fd = self.form_data
d['metrics'] = fd.get('metrics')
second = fd.get('secondary_metric')
if second not in d['metrics']:
d['metrics'] += [second]
d['groupby'] = [fd.get('series')]
return d
def get_json_data(self):
df = self.get_df()
df = df[[self.form_data.get('series')] + self.form_data.get('metrics')]
return df.to_json(orient="records")
2016-01-25 01:52:27 -05:00
class HeatmapViz(BaseViz):
viz_type = "heatmap"
verbose_name = "Heatmap"
is_timeseries = False
js_files = ['lib/d3.tip.js', 'widgets/viz_heatmap.js']
css_files = ['lib/d3.tip.css', 'widgets/viz_heatmap.css']
fieldsets = (
{
'label': None,
'fields': (
'granularity',
('since', 'until'),
2016-01-26 15:57:12 -05:00
'all_columns_x',
'all_columns_y',
2016-01-25 01:52:27 -05:00
'metric',
)
2016-01-27 11:50:39 -05:00
},
{
'label': 'Heatmap Options',
'fields': (
'linear_color_scheme',
('xscale_interval', 'yscale_interval'),
2016-01-27 12:45:02 -05:00
'canvas_image_rendering',
2016-01-28 16:11:45 -05:00
'normalize_across',
2016-01-27 11:50:39 -05:00
)
2016-01-25 01:52:27 -05:00
},)
def query_obj(self):
d = super(HeatmapViz, self).query_obj()
fd = self.form_data
2016-01-26 15:57:12 -05:00
d['metrics'] = [fd.get('metric')]
d['groupby'] = [fd.get('all_columns_x'), fd.get('all_columns_y')]
2016-01-25 01:52:27 -05:00
return d
def get_json_data(self):
df = self.get_df()
2016-01-27 11:50:39 -05:00
fd = self.form_data
x = fd.get('all_columns_x')
y = fd.get('all_columns_y')
v = fd.get('metric')
if x == y:
df.columns = ['x', 'y', 'v']
else:
df = df[[x, y, v]]
df.columns = ['x', 'y', 'v']
2016-01-28 16:11:45 -05:00
norm = fd.get('normalize_across')
overall = False
if norm == 'heatmap':
overall = True
else:
gb = df.groupby(norm, group_keys=False)
if len(gb) <= 1:
overall = True
else:
df['perc'] = (
gb.apply(
lambda x: (x.v - x.v.min()) / (x.v.max() - x.v.min()))
)
if overall:
v = df.v
min_ = v.min()
df['perc'] = (v - min_) / (v.max() - min_)
2016-01-25 01:52:27 -05:00
return df.to_json(orient="records")
2016-01-21 11:20:53 -05:00
2015-09-30 16:58:32 -04:00
viz_types_list = [
TableViz,
2015-10-16 13:03:18 -04:00
PivotTableViz,
2015-09-30 16:58:32 -04:00
NVD3TimeSeriesViz,
NVD3CompareTimeSeriesViz,
NVD3TimeSeriesStackedViz,
NVD3TimeSeriesBarViz,
DistributionBarViz,
DistributionPieViz,
BubbleViz,
MarkupViz,
WordCloudViz,
BigNumberViz,
2015-12-09 16:57:14 -05:00
SunburstViz,
2015-12-15 16:59:31 -05:00
DirectedForceViz,
2015-12-10 01:37:08 -05:00
SankeyViz,
2015-12-16 14:28:27 -05:00
WorldMapViz,
2015-12-26 14:54:04 -05:00
FilterBoxViz,
2016-01-19 03:38:15 -05:00
IFrameViz,
2016-01-21 11:20:53 -05:00
ParallelCoordinatesViz,
2016-01-25 01:52:27 -05:00
HeatmapViz,
2015-09-30 16:58:32 -04:00
]
2016-01-19 03:38:15 -05:00
2015-09-30 16:58:32 -04:00
viz_types = OrderedDict([(v.viz_type, v) for v in viz_types_list])