superset/panoramix/views.py

740 lines
24 KiB
Python
Raw Normal View History

from datetime import datetime
2015-07-20 19:29:16 -04:00
import json
2015-09-05 12:23:46 -04:00
import logging
2015-12-04 14:52:32 -05:00
import re
import traceback
2015-07-16 20:55:36 -04:00
from flask import request, redirect, flash, Response, render_template, Markup
2015-07-15 13:12:32 -04:00
from flask.ext.appbuilder import ModelView, CompactCRUDMixin, BaseView, expose
2015-09-24 00:21:48 -04:00
from flask.ext.appbuilder.actions import action
from flask.ext.appbuilder.models.sqla.interface import SQLAInterface
2015-09-05 12:23:46 -04:00
from flask.ext.appbuilder.security.decorators import has_access
2015-07-27 01:25:32 -04:00
from pydruid.client import doublesum
2015-09-24 00:21:48 -04:00
from sqlalchemy import create_engine
import sqlalchemy as sqla
from wtforms.validators import ValidationError
2016-02-04 02:09:52 -05:00
import pandas as pd
from sqlalchemy import select, text
from sqlalchemy.sql.expression import TextAsFrom
from panoramix import appbuilder, db, models, viz, utils, app, sm, ascii_art
2015-09-25 18:43:50 -04:00
config = app.config
2015-09-09 13:37:59 -04:00
def validate_json(form, field):
try:
json.loads(field.data)
except Exception as e:
2015-09-09 13:37:59 -04:00
logging.exception(e)
raise ValidationError("Json isn't valid")
2015-07-15 13:12:32 -04:00
2015-09-09 13:37:59 -04:00
2015-07-29 17:48:57 -04:00
class DeleteMixin(object):
2015-09-09 13:37:59 -04:00
@action(
"muldelete", "Delete", "Delete all Really?", "fa-trash", single=False)
2015-07-29 17:48:57 -04:00
def muldelete(self, items):
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
2015-09-25 19:15:13 -04:00
class PanoramixModelView(ModelView):
2016-01-12 11:50:17 -05:00
page_size = 500
2015-09-25 19:15:13 -04:00
class TableColumnInlineView(CompactCRUDMixin, PanoramixModelView):
2015-08-03 11:34:58 -04:00
datamodel = SQLAInterface(models.TableColumn)
can_delete = False
edit_columns = [
'column_name', 'description', 'groupby', 'filterable', 'table',
'count_distinct', 'sum', 'min', 'max', 'expression', 'is_dttm']
2015-10-06 14:50:04 -04:00
add_columns = edit_columns
2015-08-03 11:34:58 -04:00
list_columns = [
2015-08-11 00:12:21 -04:00
'column_name', 'type', 'groupby', 'filterable', 'count_distinct',
'sum', 'min', 'max', 'is_dttm']
2016-01-12 11:50:17 -05:00
page_size = 500
description_columns = {
'is_dttm': (
"Whether to make this column available as a "
"[Time Granularity] option, column has to be DATETIME or "
"DATETIME-like"),
}
2015-08-03 11:34:58 -04:00
appbuilder.add_view_no_menu(TableColumnInlineView)
appbuilder.add_link(
"Featured Datasets",
href='/panoramix/featured_datasets',
category='Sources',
category_icon='fa-table',
icon="fa-star")
appbuilder.add_separator("Sources")
2015-08-03 11:34:58 -04:00
2016-02-10 09:56:35 -05:00
class DruidColumnInlineView(CompactCRUDMixin, PanoramixModelView):
datamodel = SQLAInterface(models.DruidColumn)
2015-07-15 20:38:03 -04:00
edit_columns = [
2015-07-21 14:56:05 -04:00
'column_name', 'description', 'datasource', 'groupby',
'count_distinct', 'sum', 'min', 'max']
2015-07-15 20:38:03 -04:00
list_columns = [
2015-08-11 00:12:21 -04:00
'column_name', 'type', 'groupby', 'filterable', 'count_distinct',
2015-07-15 20:38:03 -04:00
'sum', 'min', 'max']
2015-07-15 13:12:32 -04:00
can_delete = False
2016-01-12 11:50:17 -05:00
page_size = 500
def post_update(self, col):
col.generate_metrics()
2016-02-10 10:18:59 -05:00
appbuilder.add_view_no_menu(DruidColumnInlineView)
2015-07-15 13:12:32 -04:00
2015-09-09 13:37:59 -04:00
2015-09-25 19:15:13 -04:00
class SqlMetricInlineView(CompactCRUDMixin, PanoramixModelView):
2015-08-05 02:41:00 -04:00
datamodel = SQLAInterface(models.SqlMetric)
2015-09-09 13:37:59 -04:00
list_columns = ['metric_name', 'verbose_name', 'metric_type']
2015-08-05 02:41:00 -04:00
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
2015-09-09 13:37:59 -04:00
'expression', 'table']
2015-08-05 02:41:00 -04:00
add_columns = edit_columns
2016-01-12 11:50:17 -05:00
page_size = 500
2015-08-05 02:41:00 -04:00
appbuilder.add_view_no_menu(SqlMetricInlineView)
2015-07-15 20:38:03 -04:00
2016-02-10 10:18:59 -05:00
class DruidMetricInlineView(CompactCRUDMixin, PanoramixModelView):
datamodel = SQLAInterface(models.DruidMetric)
2015-09-09 13:37:59 -04:00
list_columns = ['metric_name', 'verbose_name', 'metric_type']
2015-07-15 20:38:03 -04:00
edit_columns = [
2015-07-21 14:56:05 -04:00
'metric_name', 'description', 'verbose_name', 'metric_type',
'datasource', 'json']
2015-07-15 20:38:03 -04:00
add_columns = [
'metric_name', 'verbose_name', 'metric_type', 'datasource', 'json']
2016-01-12 11:50:17 -05:00
page_size = 500
validators_columns = {
'json': [validate_json],
}
2016-02-10 10:18:59 -05:00
appbuilder.add_view_no_menu(DruidMetricInlineView)
2015-07-15 13:12:32 -04:00
2015-09-25 19:15:13 -04:00
class DatabaseView(PanoramixModelView, DeleteMixin):
datamodel = SQLAInterface(models.Database)
2016-01-31 11:42:22 -05:00
list_columns = ['database_name', 'sql_link', 'created_by', 'changed_on_']
add_columns = ['database_name', 'sqlalchemy_uri']
search_exclude_columns = ('password',)
edit_columns = add_columns
2015-09-24 00:21:48 -04:00
add_template = "panoramix/models/database/add.html"
edit_template = "panoramix/models/database/edit.html"
base_order = ('changed_on','desc')
2015-09-24 00:28:01 -04:00
description_columns = {
'sqlalchemy_uri': (
"Refer to the SqlAlchemy docs for more information on how "
"to structure your URI here: "
"http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html")
}
def pre_add(self, db):
conn = sqla.engine.url.make_url(db.sqlalchemy_uri)
db.password = conn.password
conn.password = "X" * 10 if conn.password else None
db.sqlalchemy_uri = str(conn) # hides the password
def pre_update(self, db):
self.pre_add(db)
2016-01-31 11:42:22 -05:00
appbuilder.add_view(
DatabaseView,
"Databases",
icon="fa-database",
category="Sources",
2015-09-21 00:56:23 -04:00
category_icon='fa-database',)
class TableModelView(PanoramixModelView, DeleteMixin):
2015-10-04 11:27:09 -04:00
datamodel = SQLAInterface(models.SqlaTable)
2016-02-02 12:40:51 -05:00
list_columns = [
'table_link', 'database', 'sql_link', 'changed_by_', 'changed_on_']
add_columns = ['table_name', 'database', 'default_endpoint', 'offset']
edit_columns = [
'table_name', 'is_featured', 'database', 'description', 'owner',
'main_dttm_col', 'default_endpoint', 'offset']
related_views = [TableColumnInlineView, SqlMetricInlineView]
2015-11-04 20:06:17 -05:00
base_order = ('changed_on','desc')
description_columns = {
'offset': "Timezone offset (in hours) for this datasource",
'description': Markup("Supports <a href='https://daringfireball.net/projects/markdown/'>markdown</a>"),
}
def post_add(self, table):
try:
table.fetch_metadata()
except Exception as e:
flash(
"Table [{}] doesn't seem to exist, "
"couldn't fetch metadata".format(table.table_name),
"danger")
utils.merge_perm(sm, 'datasource_access', table.perm)
def post_update(self, table):
self.post_add(table)
appbuilder.add_view(
TableModelView,
"Tables",
category="Sources",
icon='fa-table',)
appbuilder.add_separator("Sources")
2016-02-10 09:56:35 -05:00
class DruidClusterModelView(PanoramixModelView, DeleteMixin):
datamodel = SQLAInterface(models.DruidCluster)
add_columns = [
'cluster_name',
'coordinator_host', 'coordinator_port', 'coordinator_endpoint',
'broker_host', 'broker_port', 'broker_endpoint',
]
edit_columns = add_columns
list_columns = ['cluster_name', 'metadata_last_refreshed']
appbuilder.add_view(
2016-02-10 09:56:35 -05:00
DruidClusterModelView,
2015-08-01 20:16:39 -04:00
"Druid Clusters",
2015-09-19 16:52:28 -04:00
icon="fa-cubes",
category="Sources",
2015-09-21 00:56:23 -04:00
category_icon='fa-database',)
2015-09-25 19:15:13 -04:00
class SliceModelView(PanoramixModelView, DeleteMixin):
2015-09-12 00:49:04 -04:00
datamodel = SQLAInterface(models.Slice)
2015-09-21 16:48:02 -04:00
can_add = False
2015-09-21 18:49:30 -04:00
list_columns = [
2016-01-23 13:15:56 -05:00
'slice_link', 'viz_type',
2016-01-24 13:09:26 -05:00
'datasource_link', 'created_by_', 'changed_on_']
edit_columns = [
'slice_name', 'description', 'viz_type', 'druid_datasource',
2015-09-21 18:49:30 -04:00
'table', 'dashboards', 'params']
base_order = ('changed_on','desc')
description_columns = {
'description': Markup("The content here can be displayed as widget headers in the dashboard view. Supports <a href='https://daringfireball.net/projects/markdown/'>markdown</a>"),
}
2015-09-12 00:49:04 -04:00
appbuilder.add_view(
SliceModelView,
"Slices",
icon="fa-bar-chart",
category="",
category_icon='',)
2015-09-25 19:15:13 -04:00
class DashboardModelView(PanoramixModelView, DeleteMixin):
2015-09-13 22:07:54 -04:00
datamodel = SQLAInterface(models.Dashboard)
2016-01-24 13:09:26 -05:00
list_columns = ['dashboard_link', 'created_by_', 'changed_on_']
edit_columns = [
'dashboard_title', 'slug', 'slices', 'position_json', 'css',
'json_metadata']
2015-09-13 22:07:54 -04:00
add_columns = edit_columns
2015-11-04 20:06:17 -05:00
base_order = ('changed_on','desc')
2015-10-12 11:17:06 -04:00
description_columns = {
'position_json': (
"This json object describes the positioning of the widgets in "
"the dashboard. It is dynamically generated when adjusting "
"the widgets size and positions by using drag & drop in "
"the dashboard view"),
'css': (
"The css for individual dashboards can be altered here, or "
"in the dashboard view where changes are immediatly "
"visible"),
'slug': "To get a readable URL for your dashboard",
2015-10-12 11:17:06 -04:00
}
def pre_add(self, obj):
obj.slug = obj.slug.strip() or None
2015-12-08 17:15:48 -05:00
if obj.slug:
obj.slug = obj.slug.replace(" ", "-")
obj.slug = re.sub(r'\W+', '', obj.slug)
def pre_update(self, obj):
self.pre_add(obj)
2015-09-13 22:07:54 -04:00
appbuilder.add_view(
DashboardModelView,
"Dashboards",
icon="fa-dashboard",
category="",
category_icon='',)
2015-12-04 14:52:32 -05:00
class LogModelView(PanoramixModelView):
datamodel = SQLAInterface(models.Log)
list_columns = ('user', 'action', 'dttm')
edit_columns = ('user', 'action', 'dttm', 'json')
base_order = ('dttm','desc')
appbuilder.add_view(
LogModelView,
"Action Log",
category="Security",
icon="fa-list-ol")
2016-02-10 09:56:35 -05:00
class DruidDatasourceModelView(PanoramixModelView, DeleteMixin):
datamodel = SQLAInterface(models.DruidDatasource)
list_columns = [
2015-12-16 14:29:28 -05:00
'datasource_link', 'cluster', 'owner',
'created_by', 'created_on',
2016-02-02 12:40:51 -05:00
'changed_by_', 'changed_on',
'offset']
2016-02-10 10:18:59 -05:00
related_views = [DruidColumnInlineView, DruidMetricInlineView]
2015-07-15 20:38:03 -04:00
edit_columns = [
'datasource_name', 'cluster', 'description', 'owner',
'is_featured', 'is_hidden', 'default_endpoint', 'offset']
2016-01-12 11:50:17 -05:00
page_size = 500
2015-07-27 01:25:32 -04:00
base_order = ('datasource_name', 'asc')
description_columns = {
'offset': "Timezone offset (in hours) for this datasource",
'description': Markup("Supports <a href='https://daringfireball.net/projects/markdown/'>markdown</a>"),
}
2015-07-15 13:12:32 -04:00
def post_add(self, datasource):
datasource.generate_metrics()
2015-10-08 20:02:27 -04:00
utils.merge_perm(sm, 'datasource_access', datasource.perm)
def post_update(self, datasource):
self.post_add(datasource)
2015-07-15 13:12:32 -04:00
appbuilder.add_view(
2016-02-10 09:56:35 -05:00
DruidDatasourceModelView,
2015-08-01 20:16:39 -04:00
"Druid Datasources",
category="Sources",
2015-09-19 16:52:28 -04:00
icon="fa-cube")
2015-07-15 13:12:32 -04:00
2015-07-27 17:16:18 -04:00
@app.route('/health')
def health():
return "OK"
@app.route('/ping')
def ping():
return "OK"
2016-01-14 00:44:05 -05:00
class R(BaseView):
@utils.log_this
@expose("/<url_id>")
def index(self, url_id):
url = db.session.query(models.Url).filter_by(id=url_id).first()
if url:
print(url.url)
return redirect('/' + url.url)
else:
flash("URL to nowhere...", "danger")
return redirect('/')
@utils.log_this
@expose("/shortner/", methods=['POST', 'GET'])
def shortner(self):
url = request.form.get('data')
obj = models.Url(url=url)
db.session.add(obj)
db.session.commit()
return("{request.headers[Host]}/r/{obj.id}".format(
request=request, obj=obj))
appbuilder.add_view_no_menu(R)
2015-07-15 13:12:32 -04:00
class Panoramix(BaseView):
2015-12-04 12:30:43 -05:00
2015-08-03 11:34:58 -04:00
@has_access
2015-12-04 12:30:43 -05:00
@expose("/explore/<datasource_type>/<datasource_id>/")
@expose("/datasource/<datasource_type>/<datasource_id>/") # Legacy url
2015-12-04 14:52:32 -05:00
@utils.log_this
2015-12-04 12:30:43 -05:00
def explore(self, datasource_type, datasource_id):
2015-10-04 12:33:08 -04:00
if datasource_type == "table":
datasource = (
db.session
.query(models.SqlaTable)
.filter_by(id=datasource_id)
.first()
)
else:
datasource = (
db.session
2016-02-10 09:56:35 -05:00
.query(models.DruidDatasource)
2015-10-04 12:33:08 -04:00
.filter_by(id=datasource_id)
.first()
)
all_datasource_access = self.appbuilder.sm.has_access(
'all_datasource_access', 'all_datasource_access')
2015-10-04 12:33:08 -04:00
datasource_access = self.appbuilder.sm.has_access(
'datasource_access', datasource.perm)
if not (all_datasource_access or datasource_access):
2015-10-04 12:33:08 -04:00
flash(
"You don't seem to have access to this datasource",
"danger")
2015-10-04 12:42:49 -04:00
return redirect('/slicemodelview/list/')
2015-09-21 16:48:02 -04:00
action = request.args.get('action')
2015-12-04 12:30:43 -05:00
if action in ('save', 'overwrite'):
2015-09-21 16:48:02 -04:00
session = db.session()
2015-10-16 00:08:52 -04:00
# TODO use form processing form wtforms
2015-09-21 16:48:02 -04:00
d = request.args.to_dict(flat=False)
del d['action']
2015-10-16 00:08:52 -04:00
del d['previous_viz_type']
2015-12-06 23:57:38 -05:00
as_list = ('metrics', 'groupby', 'columns')
2015-09-21 16:48:02 -04:00
for k in d:
v = d.get(k)
if k in as_list and not isinstance(v, list):
d[k] = [v] if v else []
if k not in as_list and isinstance(v, list):
d[k] = v[0]
table_id = druid_datasource_id = None
datasource_type = request.args.get('datasource_type')
2015-09-21 18:49:30 -04:00
if datasource_type in ('datasource', 'druid'):
2015-09-21 16:48:02 -04:00
druid_datasource_id = request.args.get('datasource_id')
2015-09-21 18:49:30 -04:00
elif datasource_type == 'table':
2015-09-21 16:48:02 -04:00
table_id = request.args.get('datasource_id')
2015-09-21 18:49:30 -04:00
2015-09-21 16:48:02 -04:00
slice_name = request.args.get('slice_name')
2015-12-04 12:30:43 -05:00
if action == "save":
slc = models.Slice()
msg = "Slice [{}] has been saved".format(slice_name)
elif action == "overwrite":
slc = (
session.query(models.Slice)
.filter_by(id=request.args.get("slice_id"))
.first()
)
msg = "Slice [{}] has been overwritten".format(slice_name)
slc.params = json.dumps(d, indent=4, sort_keys=True)
slc.datasource_name = request.args.get('datasource_name')
slc.viz_type = request.args.get('viz_type')
slc.druid_datasource_id = druid_datasource_id
slc.table_id = table_id
slc.datasource_type = datasource_type
slc.slice_name = slice_name
session.merge(slc)
2015-09-21 16:48:02 -04:00
session.commit()
2015-12-04 12:30:43 -05:00
flash(msg, "info")
return redirect(slc.slice_url)
2015-09-21 16:48:02 -04:00
2015-09-18 18:29:49 -04:00
if not datasource:
flash("The datasource seem to have been deleted", "alert")
2015-08-03 11:34:58 -04:00
viz_type = request.args.get("viz_type")
2015-09-18 18:29:49 -04:00
if not viz_type and datasource.default_endpoint:
return redirect(datasource.default_endpoint)
2015-08-03 11:34:58 -04:00
if not viz_type:
viz_type = "table"
obj = viz.viz_types[viz_type](
2015-09-18 18:29:49 -04:00
datasource,
2015-09-17 21:06:03 -04:00
form_data=request.args)
2015-12-17 21:04:53 -05:00
if request.args.get("csv") == "true":
status = 200
payload = obj.get_csv()
return Response(
payload,
status=status,
mimetype="application/csv")
slice_id = request.args.get("slice_id")
slc = None
if slice_id:
slc = (
db.session.query(models.Slice)
.filter_by(id=request.args.get("slice_id"))
.first()
)
2015-09-15 12:17:59 -04:00
if request.args.get("json") == "true":
2015-11-30 15:36:59 -05:00
status = 200
2015-09-27 11:52:26 -04:00
if config.get("DEBUG"):
payload = obj.get_json()
2015-11-30 15:36:59 -05:00
else:
try:
payload = obj.get_json()
except Exception as e:
logging.exception(e)
payload = str(e)
status = 500
2015-08-03 11:34:58 -04:00
return Response(
2015-09-15 15:33:26 -04:00
payload,
status=status,
2015-08-03 11:34:58 -04:00
mimetype="application/json")
2015-09-15 12:17:59 -04:00
else:
2015-09-27 11:52:26 -04:00
if config.get("DEBUG"):
resp = self.render_template(
"panoramix/viz.html", viz=obj, slice=slc)
2015-09-22 19:53:06 -04:00
try:
resp = self.render_template(
"panoramix/viz.html", viz=obj, slice=slc)
except Exception as e:
2015-09-25 18:43:50 -04:00
if config.get("DEBUG"):
raise(e)
return Response(
str(e),
status=500,
mimetype="application/json")
return resp
2015-08-03 11:34:58 -04:00
2015-12-13 16:29:13 -05:00
@has_access
@expose("/checkbox/<model_view>/<id_>/<attr>/<value>", methods=['GET'])
def checkbox(self, model_view, id_, attr, value):
model = None
if model_view == 'TableColumnInlineView':
model = models.TableColumn
2016-02-10 09:56:35 -05:00
elif model_view == 'DruidColumnInlineView':
model = models.DruidColumn
2015-12-13 16:29:13 -05:00
obj = db.session.query(model).filter_by(id=id_).first()
if obj:
setattr(obj, attr, value=='true')
db.session.commit()
return Response("OK", mimetype="application/json")
2015-09-17 21:06:03 -04:00
@has_access
@expose("/save_dash/<dashboard_id>/", methods=['GET', 'POST'])
def save_dash(self, dashboard_id):
data = json.loads(request.form.get('data'))
positions = data['positions']
slice_ids = [int(d['slice_id']) for d in positions]
2015-09-17 21:06:03 -04:00
session = db.session()
Dash = models.Dashboard
dash = session.query(Dash).filter_by(id=dashboard_id).first()
dash.slices = [o for o in dash.slices if o.id in slice_ids]
dash.position_json = json.dumps(data['positions'], indent=4)
dash.json_metadata = json.dumps({
'expanded_slices': data['expanded_slices'],
}, indent=4)
dash.css = data['css']
2015-09-17 21:06:03 -04:00
session.merge(dash)
session.commit()
session.close()
return "SUCCESS"
2015-09-12 00:49:04 -04:00
@has_access
2016-01-06 19:02:50 -05:00
@expose("/testconn", methods=["POST", "GET"])
2015-09-24 00:21:48 -04:00
def testconn(self):
try:
2015-09-25 18:43:50 -04:00
uri = request.form.get('uri')
2016-01-06 19:02:50 -05:00
engine = create_engine(uri)
engine.connect()
return json.dumps(engine.table_names(), indent=4)
2015-09-24 00:21:48 -04:00
except Exception as e:
return Response(
2016-01-06 19:02:50 -05:00
traceback.format_exc(),
2015-09-24 00:21:48 -04:00
status=500,
mimetype="application/json")
2015-09-12 00:49:04 -04:00
2015-09-13 02:25:43 -04:00
@has_access
2016-02-10 11:41:37 -05:00
@expose("/dashboard/<dashboard_id>/")
def dashboard(self, dashboard_id):
2015-09-13 22:07:54 -04:00
session = db.session()
qry = session.query(models.Dashboard)
2016-02-10 11:41:37 -05:00
if dashboard_id.isdigit():
qry = qry.filter_by(id=int(dashboard_id))
else:
2016-02-10 11:41:37 -05:00
qry = qry.filter_by(slug=dashboard_id)
2016-02-03 21:04:28 -05:00
templates = session.query(models.CssTemplate).all()
dash = qry.first()
# Hack to log the dashboard_id properly, even when getting a slug
@utils.log_this
def dashboard(**kwargs):
pass
dashboard(dashboard_id=dash.id)
pos_dict = {}
if dash.position_json:
pos_dict = {
2015-11-10 20:00:43 -05:00
int(o['slice_id']):o
for o in json.loads(dash.position_json)}
2015-09-13 22:07:54 -04:00
return self.render_template(
"panoramix/dashboard.html", dashboard=dash,
2016-02-03 21:04:28 -05:00
templates=templates,
2015-09-17 21:06:03 -04:00
pos_dict=pos_dict)
2015-09-13 02:25:43 -04:00
2016-01-29 13:29:23 -05:00
@has_access
@expose("/sql/<database_id>/")
@utils.log_this
def sql(self, database_id):
2016-02-04 02:09:52 -05:00
mydb = db.session.query(
models.Database).filter_by(id=database_id).first()
engine = mydb.get_sqla_engine()
tables = engine.table_names()
2016-02-04 03:26:20 -05:00
table_name=request.args.get('table_name')
2016-01-29 13:29:23 -05:00
return self.render_template(
"panoramix/sql.html",
2016-02-04 02:09:52 -05:00
tables=tables,
2016-02-04 03:26:20 -05:00
table_name=table_name,
2016-01-29 13:29:23 -05:00
db=mydb)
2016-01-31 11:42:22 -05:00
@has_access
2016-02-04 02:09:52 -05:00
@expose("/table/<database_id>/<table_name>/")
2016-01-31 11:42:22 -05:00
@utils.log_this
2016-02-04 02:09:52 -05:00
def table(self, database_id, table_name):
mydb = db.session.query(
models.Database).filter_by(id=database_id).first()
cols = mydb.get_columns(table_name)
df = pd.DataFrame([(c['name'], c['type']) for c in cols])
df.columns = ['col', 'type']
2016-01-31 11:42:22 -05:00
return self.render_template(
"panoramix/ajah.html",
2016-02-04 02:09:52 -05:00
content=df.to_html(
index=False,
na_rep='',
classes=(
"dataframe table table-striped table-bordered "
"table-condensed sql_results")))
2016-01-31 11:42:22 -05:00
@has_access
2016-02-04 02:09:52 -05:00
@expose("/select_star/<database_id>/<table_name>/")
2016-01-31 11:42:22 -05:00
@utils.log_this
2016-02-04 02:09:52 -05:00
def select_star(self, database_id, table_name):
mydb = db.session.query(
models.Database).filter_by(id=database_id).first()
t = mydb.get_table(table_name)
2016-01-31 11:42:22 -05:00
fields = ", ".join(
2016-02-04 03:26:20 -05:00
[c.name for c in t.columns] or "*")
2016-02-04 02:09:52 -05:00
s = "SELECT\n{fields}\nFROM {table_name}".format(**locals())
2016-01-31 11:42:22 -05:00
return self.render_template(
"panoramix/ajah.html",
2016-02-04 02:09:52 -05:00
content=s
)
2016-01-31 11:42:22 -05:00
2016-01-29 13:29:23 -05:00
@has_access
@expose("/runsql/", methods=['POST', 'GET'])
@utils.log_this
def runsql(self):
session = db.session()
2016-01-31 11:42:22 -05:00
limit = 1000
2016-01-29 13:29:23 -05:00
data = json.loads(request.form.get('data'))
sql = data.get('sql')
database_id = data.get('database_id')
mydb = session.query(models.Database).filter_by(id=database_id).first()
content = ""
if mydb:
eng = mydb.get_sqla_engine()
2016-01-31 11:42:22 -05:00
if limit:
sql = sql.strip().strip(';')
qry = (
select('*')
.select_from(TextAsFrom(text(sql), ['*']).alias('inner_qry'))
.limit(limit)
)
sql= str(qry.compile(eng, compile_kwargs={"literal_binds": True}))
2016-02-02 12:40:51 -05:00
try:
2016-02-04 02:09:52 -05:00
df = pd.read_sql_query(sql=sql, con=eng)
2016-02-02 12:40:51 -05:00
content = df.to_html(
index=False,
2016-02-04 02:09:52 -05:00
na_rep='',
2016-02-02 12:40:51 -05:00
classes=(
"dataframe table table-striped table-bordered "
"table-condensed sql_results"))
except Exception as e:
content = (
'<div class="alert alert-danger">'
"{}</div>"
).format(e.message)
2016-01-29 13:29:23 -05:00
session.commit()
return content
2015-07-23 02:17:51 -04:00
@has_access
2015-07-15 13:12:32 -04:00
@expose("/refresh_datasources/")
2015-07-15 20:38:03 -04:00
def refresh_datasources(self):
session = db.session()
2016-02-10 09:56:35 -05:00
for cluster in session.query(models.DruidCluster).all():
try:
cluster.refresh_datasources()
except Exception as e:
flash(
"Error while processing cluster '{}'".format(cluster),
"alert")
2016-02-11 20:57:59 -05:00
return redirect('/druidclustermodelview/list/')
cluster.metadata_last_refreshed = datetime.now()
flash(
"Refreshed metadata from cluster "
"[" + cluster.cluster_name + "]",
'info')
session.commit()
2015-07-15 13:12:32 -04:00
return redirect("/datasourcemodelview/list/")
2015-07-27 01:25:32 -04:00
@expose("/autocomplete/<datasource>/<column>/")
def autocomplete(self, datasource, column):
client = utils.get_pydruid_client()
top = client.topn(
datasource=datasource,
granularity='all',
intervals='2013-10-04/2020-10-10',
aggregations={"count": doublesum("count")},
dimension=column,
metric='count',
threshold=1000,
)
values = sorted([d[column] for d in top[0]['result']])
return json.dumps(values)
@app.errorhandler(500)
def show_traceback(self):
if config.get("SHOW_STACKTRACE"):
error_msg = traceback.format_exc()
else:
error_msg = "FATAL ERROR\n"
error_msg = (
"Stacktrace is hidden. Change the SHOW_STACKTRACE "
"configuration setting to enable it")
return render_template(
'panoramix/traceback.html',
error_msg=error_msg,
title=ascii_art.stacktrace,
art=ascii_art.error), 500
2015-12-15 19:03:39 -05:00
@has_access
@expose("/featured_datasets", methods=['GET'])
def featured_datasets(self):
2015-12-15 19:03:39 -05:00
session = db.session()
datasets_sqla = (session.query(models.SqlaTable)
.filter_by(is_featured=True).all())
2016-02-10 09:56:35 -05:00
datasets_druid = (session.query(models.DruidDatasource)
2015-12-15 19:03:39 -05:00
.filter_by(is_featured=True).all())
featured_datasets = datasets_sqla + datasets_druid
return self.render_template(
'panoramix/featured_datasets.html',
featured_datasets=featured_datasets,
utils=utils)
2015-12-15 19:03:39 -05:00
2015-07-15 13:12:32 -04:00
appbuilder.add_view_no_menu(Panoramix)
appbuilder.add_link(
2015-08-01 20:16:39 -04:00
"Refresh Druid Metadata",
2015-07-15 13:12:32 -04:00
href='/panoramix/refresh_datasources/',
category='Sources',
2015-09-21 00:56:23 -04:00
category_icon='fa-database',
2015-08-01 20:16:39 -04:00
icon="fa-cog")
2016-02-03 23:19:54 -05:00
class CssTemplateModelView(PanoramixModelView, DeleteMixin):
datamodel = SQLAInterface(models.CssTemplate)
list_columns = ['template_name']
edit_columns = ['template_name', 'css']
add_columns = edit_columns
appbuilder.add_separator("Sources")
appbuilder.add_view(
CssTemplateModelView,
"CSS Templates",
icon="fa-css3",
category="Sources",
category_icon='',)