Merge pull request #111 from mistercrunch/load_ex_dash

Loading another example amazing dash
This commit is contained in:
Maxime Beauchemin 2016-01-14 09:49:01 -08:00
commit ba30dc4c22
7 changed files with 387 additions and 159 deletions

View File

@ -55,6 +55,7 @@ def load_examples(sample):
print("Loading [World Bank's Health Nutrition and Population Stats]") print("Loading [World Bank's Health Nutrition and Population Stats]")
data.load_world_bank_health_n_pop() data.load_world_bank_health_n_pop()
print("Loading [Birth names]") print("Loading [Birth names]")
data.load_birth_names() data.load_birth_names()

View File

@ -7,6 +7,12 @@ from sqlalchemy import String, DateTime
from panoramix import app, db, models, utils from panoramix import app, db, models, utils
# Shortcuts
DB = models.Database
Slice = models.Slice
TBL = models.SqlaTable
Dash = models.Dashboard
config = app.config config = app.config
DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data') DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data')
@ -14,7 +20,6 @@ DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data')
def get_or_create_db(session): def get_or_create_db(session):
print("Creating database reference") print("Creating database reference")
DB = models.Database
dbobj = session.query(DB).filter_by(database_name='main').first() dbobj = session.query(DB).filter_by(database_name='main').first()
if not dbobj: if not dbobj:
dbobj = DB(database_name="main") dbobj = DB(database_name="main")
@ -25,13 +30,28 @@ def get_or_create_db(session):
return dbobj return dbobj
def merge_slice(slc):
o = db.session.query(Slice).filter_by(slice_name=slc.slice_name).first()
if o:
db.session.delete(o)
db.session.add(slc)
db.session.commit()
def get_slice_json(defaults, **kwargs):
d = defaults.copy()
d.update(kwargs)
return json.dumps(d, indent=4, sort_keys=True)
def load_world_bank_health_n_pop(): def load_world_bank_health_n_pop():
tbl = 'wb_health_population' tbl_name = 'wb_health_population'
with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f: with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f:
pdf = pd.read_json(f) pdf = pd.read_json(f)
pdf.columns = [col.replace('.', '_') for col in pdf.columns]
pdf.year = pd.to_datetime(pdf.year) pdf.year = pd.to_datetime(pdf.year)
pdf.to_sql( pdf.to_sql(
tbl, tbl_name,
db.engine, db.engine,
if_exists='replace', if_exists='replace',
chunksize=500, chunksize=500,
@ -42,17 +62,220 @@ def load_world_bank_health_n_pop():
'region': String(255), 'region': String(255),
}, },
index=False) index=False)
print("Creating table reference")
TBL = models.SqlaTable print("Creating table [wb_health_population] reference")
obj = db.session.query(TBL).filter_by(table_name=tbl).first() tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first()
if not obj: if not tbl:
obj = TBL(table_name='wb_health_population') tbl = TBL(table_name=tbl_name)
obj.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md')) tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md'))
obj.main_dttm_col = 'year' tbl.main_dttm_col = 'year'
obj.database = get_or_create_db(db.session) tbl.database = get_or_create_db(db.session)
db.session.merge(obj) db.session.merge(tbl)
db.session.commit()
tbl.fetch_metadata()
defaults = {
"compare_lag": "10",
"compare_suffix": "o10Y",
"datasource_id": "1",
"datasource_name": "birth_names",
"datasource_type": "table",
"limit": "25",
"granularity": "year",
"groupby": [],
"metric": 'sum__SP_POP_TOTL',
"metrics": ["sum__SP_POP_TOTL"],
"row_limit": config.get("ROW_LIMIT"),
"since": "2014-01-01",
"until": "2014-01-01",
"where": "",
"markup_type": "markdown",
"country_fieldtype": "cca3",
"secondary_metric": "sum__SP_POP_TOTL",
"entity": "country_code",
"show_bubbles": "y",
}
print("Creating slices")
slices = [
Slice(
slice_name="Region Filter",
viz_type='filter_box',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type='filter_box',
groupby=['region'],
)),
Slice(
slice_name="World's Population",
viz_type='big_number',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
since='2000',
viz_type='big_number',
compare_lag="10",
metric='sum__SP_POP_TOTL',
compare_suffix="over 10Y")),
Slice(
slice_name="Most Populated Countries",
viz_type='table',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type='table',
metrics=["sum__SP_POP_TOTL"],
groupby=['country_name'])),
Slice(
slice_name="Growth Rate",
viz_type='line',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type='line',
since="1960-01-01",
metrics=["sum__SP_POP_TOTL"],
num_period_compare="10",
groupby=['country_name'])),
Slice(
slice_name="% Rural",
viz_type='world_map',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type='world_map',
metric= "sum__SP_RUR_TOTL_ZS",
num_period_compare="10",)),
Slice(
slice_name="Life Expexctancy VS Rural %",
viz_type='bubble',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type='bubble',
since= "2011-01-01",
until= "2011-01-01",
series="region",
limit="0",
entity="country_name",
x="sum__SP_RUR_TOTL_ZS",
y="sum__SP_DYN_LE00_IN",
size="sum__SP_POP_TOTL",
max_bubble_size="50",
flt_col_1="country_code",
flt_op_1= "not in",
flt_eq_1="TCA,MNP,DMA,MHL,MCO,SXM,CYM,TUV,IMY,KNA,ASM,ADO,AMA,PLW",
num_period_compare="10",)),
Slice(
slice_name="Rural Breakdown",
viz_type='sunburst',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type='sunburst',
groupby=["region", "country_name"],
secondary_metric="sum__SP_RUR_TOTL",
since= "2011-01-01",
until= "2011-01-01",)),
Slice(
slice_name="World's Pop Growth",
viz_type='area',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
since="1960-01-01",
until="now",
viz_type='area',
groupby=["region"],)),
]
for slc in slices:
merge_slice(slc)
print("Creating a World's Health Bank dashboard")
dash_name = "World's Health Bank Dashboard"
dash = db.session.query(Dash).filter_by(dashboard_title=dash_name).first()
if dash:
db.session.delete(dash)
js = """\
[
{
"size_y": 1,
"size_x": 3,
"col": 1,
"slice_id": "269",
"row": 1
},
{
"size_y": 3,
"size_x": 3,
"col": 1,
"slice_id": "270",
"row": 2
},
{
"size_y": 7,
"size_x": 3,
"col": 10,
"slice_id": "271",
"row": 1
},
{
"size_y": 3,
"size_x": 6,
"col": 1,
"slice_id": "272",
"row": 5
},
{
"size_y": 4,
"size_x": 6,
"col": 4,
"slice_id": "273",
"row": 1
},
{
"size_y": 4,
"size_x": 6,
"col": 7,
"slice_id": "274",
"row": 8
},
{
"size_y": 3,
"size_x": 3,
"col": 7,
"slice_id": "275",
"row": 5
},
{
"size_y": 4,
"size_x": 6,
"col": 1,
"slice_id": "276",
"row": 8
}
]
"""
l = json.loads(js)
for i, pos in enumerate(l):
pos['slice_id'] = str(slices[i].id)
dash = Dash(
dashboard_title=dash_name,
position_json=json.dumps(l, indent=4),
)
for s in slices:
dash.slices.append(s)
db.session.commit() db.session.commit()
obj.fetch_metadata()
def load_birth_names(): def load_birth_names():
@ -77,20 +300,16 @@ def load_birth_names():
print("-" * 80) print("-" * 80)
print("Creating table reference") print("Creating table reference")
TBL = models.SqlaTable
obj = db.session.query(TBL).filter_by(table_name='birth_names').first() obj = db.session.query(TBL).filter_by(table_name='birth_names').first()
if not obj: if not obj:
obj = TBL(table_name = 'birth_names') obj = TBL(table_name = 'birth_names')
obj.main_dttm_col = 'ds' obj.main_dttm_col = 'ds'
obj.database = get_or_create_db(db.session) obj.database = get_or_create_db(db.session)
models.Table
db.session.merge(obj) db.session.merge(obj)
db.session.commit() db.session.commit()
obj.fetch_metadata() obj.fetch_metadata()
tbl = obj tbl = obj
print("Creating some slices")
def get_slice_json(**kwargs):
defaults = { defaults = {
"compare_lag": "10", "compare_lag": "10",
"compare_suffix": "o10Y", "compare_suffix": "o10Y",
@ -98,9 +317,6 @@ def load_birth_names():
"datasource_name": "birth_names", "datasource_name": "birth_names",
"datasource_type": "table", "datasource_type": "table",
"limit": "25", "limit": "25",
"flt_col_1": "gender",
"flt_eq_1": "",
"flt_op_1": "in",
"granularity": "ds", "granularity": "ds",
"groupby": [], "groupby": [],
"metric": 'sum__num', "metric": 'sum__num',
@ -112,119 +328,101 @@ def load_birth_names():
"where": "", "where": "",
"markup_type": "markdown", "markup_type": "markdown",
} }
d = defaults.copy()
d.update(kwargs)
return json.dumps(d, indent=4, sort_keys=True)
Slice = models.Slice
slices = []
def merge_slice(slc): print("Creating some slices")
o = db.session.query( slices = [
Slice).filter_by(slice_name=slc.slice_name).first()
if o:
db.session.delete(slc)
db.session.add(slc)
session.commit()
slices.append(slc)
merge_slice(
Slice( Slice(
slice_name="Girls", slice_name="Girls",
viz_type='table', viz_type='table',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
groupby=['name'], flt_eq_1="girl", row_limit=50))) defaults,
groupby=['name'], flt_eq_1="girl", row_limit=50)),
merge_slice(
Slice( Slice(
slice_name="Boys", slice_name="Boys",
viz_type='table', viz_type='table',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
groupby=['name'], flt_eq_1="boy", row_limit=50))) defaults,
groupby=['name'], flt_eq_1="boy", row_limit=50)),
merge_slice(
Slice( Slice(
slice_name="Participants", slice_name="Participants",
viz_type='big_number', viz_type='big_number',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
defaults,
viz_type="big_number", granularity="ds", viz_type="big_number", granularity="ds",
compare_lag="5", compare_suffix="over 5Y"))) compare_lag="5", compare_suffix="over 5Y")),
merge_slice(
Slice( Slice(
slice_name="Genders", slice_name="Genders",
viz_type='pie', viz_type='pie',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
viz_type="pie", groupby=['gender']))) defaults,
viz_type="pie", groupby=['gender'])),
merge_slice(
Slice( Slice(
slice_name="Genders by State", slice_name="Genders by State",
viz_type='dist_bar', viz_type='dist_bar',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
defaults,
flt_eq_1="other", viz_type="dist_bar", flt_eq_1="other", viz_type="dist_bar",
metrics=['sum__sum_girls', 'sum__sum_boys'], metrics=['sum__sum_girls', 'sum__sum_boys'],
groupby=['state'], flt_op_1='not in', flt_col_1='state'))) groupby=['state'], flt_op_1='not in', flt_col_1='state')),
merge_slice(
Slice( Slice(
slice_name="Trends", slice_name="Trends",
viz_type='line', viz_type='line',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
defaults,
viz_type="line", groupby=['name'], viz_type="line", groupby=['name'],
granularity='ds', rich_tooltip='y', show_legend='y'))) granularity='ds', rich_tooltip='y', show_legend='y')),
code = """
<div style="text-align:center">
<h1>Birth Names Dashboard</h1>
<p>The source dataset came from <a href="https://github.com/hadley/babynames">[here]</a></p>
<img src="http://monblog.system-linux.net/image/tux/baby-tux_overlord59-tux.png">
</div>
"""
merge_slice(
Slice( Slice(
slice_name="Title", slice_name="Title",
viz_type='markup', viz_type='markup',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
defaults,
viz_type="markup", markup_type="html", viz_type="markup", markup_type="html",
code=code))) code="""\
<div style="text-align:center">
merge_slice( <h1>Birth Names Dashboard</h1>
<p>The source dataset came from <a href="https://github.com/hadley/babynames">[here]</a></p>
<img src="http://monblog.system-linux.net/image/tux/baby-tux_overlord59-tux.png">
</div>
"""
)),
Slice( Slice(
slice_name="Name Cloud", slice_name="Name Cloud",
viz_type='word_cloud', viz_type='word_cloud',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
defaults,
viz_type="word_cloud", size_from="10", viz_type="word_cloud", size_from="10",
series='name', size_to="70", rotation="square", series='name', size_to="70", rotation="square",
limit='100'))) limit='100')),
merge_slice(
Slice( Slice(
slice_name="Pivot Table", slice_name="Pivot Table",
viz_type='pivot_table', viz_type='pivot_table',
datasource_type='table', datasource_type='table',
table=tbl, table=tbl,
params=get_slice_json( params=get_slice_json(
defaults,
viz_type="pivot_table", metrics=['sum__num'], viz_type="pivot_table", metrics=['sum__num'],
groupby=['name'], columns=['state']))) groupby=['name'], columns=['state'])),
]
for slc in slices:
merge_slice(slc)
print("Creating a dashboard") print("Creating a dashboard")
Dash = models.Dashboard
dash = session.query(Dash).filter_by(dashboard_title="Births").first() dash = session.query(Dash).filter_by(dashboard_title="Births").first()
if dash: if dash:

View File

@ -11,7 +11,6 @@ from dateutil.parser import parse
from flask import flash from flask import flash
from flask.ext.appbuilder import Model from flask.ext.appbuilder import Model
from flask.ext.appbuilder.models.mixins import AuditMixin from flask.ext.appbuilder.models.mixins import AuditMixin
import pandas as pd
from pandas import read_sql_query from pandas import read_sql_query
from pydruid import client from pydruid import client
from pydruid.utils.filters import Dimension, Filter from pydruid.utils.filters import Dimension, Filter
@ -457,8 +456,8 @@ class SqlaTable(Model, Queryable, AuditMixinNullable):
qry = qry.group_by(*groupby_exprs) qry = qry.group_by(*groupby_exprs)
time_filter = [ time_filter = [
timestamp >= from_dttm.isoformat(), timestamp >= from_dttm,
timestamp <= to_dttm.isoformat(), timestamp <= to_dttm,
] ]
inner_time_filter = copy(time_filter) inner_time_filter = copy(time_filter)
if inner_from_dttm: if inner_from_dttm:

View File

@ -108,6 +108,30 @@ function viz_sunburst(slice) {
.text("m2/m1: " + fp(d.m2/d.m1)); .text("m2/m1: " + fp(d.m2/d.m1));
var sequenceArray = getAncestors(d); var sequenceArray = getAncestors(d);
function breadcrumbPoints(d, i) {
var points = [];
points.push("0,0");
points.push(b.w + ",0");
points.push(b.w + b.t + "," + (b.h / 2));
points.push(b.w + "," + b.h);
points.push("0," + b.h);
if (i > 0) { // Leftmost breadcrumb; don't include 6th vertex.
points.push(b.t + "," + (b.h / 2));
}
return points.join(" ");
}
// Update the breadcrumb trail to show the current sequence and percentage.
function updateBreadcrumbs(nodeArray, percentageString) {
l = [];
for(var i=0; i<nodeArray.length; i++){
l.push(nodeArray[i].name)
}
s = l.join(' > ')
gMiddleText.append("text").text(s).classed("middle", true)
.attr("y", -75);
}
updateBreadcrumbs(sequenceArray, percentageString);
// Fade all the segments. // Fade all the segments.
container.selectAll("path") container.selectAll("path")

View File

@ -41,6 +41,11 @@
</a> </a>
</span> </span>
<span>{{ form.get_field("viz_type")(class_="select2") }}</span> <span>{{ form.get_field("viz_type")(class_="select2") }}</span>
<span class="alert alert-info" title="Slice Name" data-toggle="tooltip">{{ viz.form_data.slice_name }}
<a class="" href="/slicemodelview/edit/{{ viz.form_data.slice_id }}" data-toggle="tooltip" title="Edit Slice metadata">
<i class="fa fa-edit"></i>
</a>
</span>
<div class="btn-group results pull-right" role="group"> <div class="btn-group results pull-right" role="group">
<span class="btn btn-default" id="standalone" title="Standalone version, use to embed anywhere" data-toggle="tooltip"> <span class="btn btn-default" id="standalone" title="Standalone version, use to embed anywhere" data-toggle="tooltip">
<i class="fa fa-code"></i> <i class="fa fa-code"></i>

View File

@ -17,7 +17,7 @@ setup(
zip_safe=False, zip_safe=False,
scripts=['panoramix/bin/panoramix'], scripts=['panoramix/bin/panoramix'],
install_requires=[ install_requires=[
'alembic>=0.7.7, <0.8.0', 'alembic>=0.8.2, <0.9.0',
'cryptography>=1.1.1, <2.0.0', 'cryptography>=1.1.1, <2.0.0',
'flask-appbuilder>=1.4.5, <2.0.0', 'flask-appbuilder>=1.4.5, <2.0.0',
'flask-login==0.2.11', 'flask-login==0.2.11',
@ -35,7 +35,7 @@ setup(
'python-dateutil>=2.4.2, <3.0.0', 'python-dateutil>=2.4.2, <3.0.0',
'requests>=2.7.0, <3.0.0', 'requests>=2.7.0, <3.0.0',
'sqlalchemy-utils>=0.31.3, <0.32.0', 'sqlalchemy-utils>=0.31.3, <0.32.0',
'sqlalchemy==1.0.8', 'sqlalchemy>=1.0.8, <2.0.0',
'sqlparse>=0.1.16, <0.2.0', 'sqlparse>=0.1.16, <0.2.0',
'werkzeug==0.11.2, <0.12.0', 'werkzeug==0.11.2, <0.12.0',
], ],

View File

@ -28,6 +28,7 @@ class LiveTest(TestCase):
def test_slices(self): def test_slices(self):
Slc = models.Slice Slc = models.Slice
for slc in db.session.query(Slc).all(): for slc in db.session.query(Slc).all():
print(slc)
self.client.get(slc.slice_url) self.client.get(slc.slice_url)
viz = slc.viz viz = slc.viz
self.client.get(viz.get_url()) self.client.get(viz.get_url())