More examples / tests (#385)

* More examples / tests

* Fixing the dashboards positioning
This commit is contained in:
Maxime Beauchemin 2016-04-20 17:36:37 -07:00
parent d8a2b621d8
commit f4177bfa94
4 changed files with 114 additions and 22 deletions

View File

@ -109,6 +109,69 @@ def load_energy():
""")) """))
) )
merge_slice(
Slice(
slice_name="Energy Force Layout",
viz_type='directed_force',
datasource_type='table',
table=tbl,
params=textwrap.dedent("""\
{
"charge": "-500",
"collapsed_fieldsets": "",
"datasource_id": "1",
"datasource_name": "energy_usage",
"datasource_type": "table",
"flt_col_0": "source",
"flt_eq_0": "",
"flt_op_0": "in",
"groupby": [
"source",
"target"
],
"having": "",
"link_length": "200",
"metric": "sum__value",
"row_limit": "5000",
"slice_id": "229",
"slice_name": "Force",
"viz_type": "directed_force",
"where": ""
}
"""))
)
merge_slice(
Slice(
slice_name="Heatmap",
viz_type='heatmap',
datasource_type='table',
table=tbl,
params=textwrap.dedent("""\
{
"all_columns_x": "source",
"all_columns_y": "target",
"canvas_image_rendering": "pixelated",
"collapsed_fieldsets": "",
"datasource_id": "1",
"datasource_name": "energy_usage",
"datasource_type": "table",
"flt_col_0": "source",
"flt_eq_0": "",
"flt_op_0": "in",
"having": "",
"linear_color_scheme": "blue_white_yellow",
"metric": "sum__value",
"normalize_across": "heatmap",
"slice_id": "229",
"slice_name": "Heatmap",
"viz_type": "heatmap",
"where": "",
"xscale_interval": "1",
"yscale_interval": "1"
}
"""))
)
def load_world_bank_health_n_pop(): def load_world_bank_health_n_pop():
"""Loads the world bank health dataset, slices and a dashboard""" """Loads the world bank health dataset, slices and a dashboard"""
@ -287,6 +350,23 @@ def load_world_bank_health_n_pop():
viz_type='treemap', viz_type='treemap',
metrics=["sum__SP_POP_TOTL"], metrics=["sum__SP_POP_TOTL"],
groupby=["region", "country_code"],)), groupby=["region", "country_code"],)),
Slice(
slice_name="Parallel Coordinates",
viz_type='para',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
since="2011-01-01",
until="2011-01-01",
viz_type='para',
limit=100,
metrics=[
"sum__SP_POP_TOTL",
'sum__SP_RUR_TOTL_ZS',
'sum__SH_DYN_AIDS'],
secondary_metric='sum__SP_POP_TOTL',
series=["country_name"],)),
] ]
for slc in slices: for slc in slices:
merge_slice(slc) merge_slice(slc)
@ -379,7 +459,7 @@ def load_world_bank_health_n_pop():
dash.position_json = json.dumps(l, indent=4) dash.position_json = json.dumps(l, indent=4)
dash.slug = "world_health" dash.slug = "world_health"
dash.slices = slices dash.slices = slices[:-1]
db.session.merge(dash) db.session.merge(dash)
db.session.commit() db.session.commit()
@ -569,16 +649,6 @@ def load_birth_names():
defaults, defaults,
viz_type="big_number", granularity="ds", viz_type="big_number", granularity="ds",
compare_lag="5", compare_suffix="over 5Y")), compare_lag="5", compare_suffix="over 5Y")),
Slice(
slice_name="Number of Girls",
viz_type='big_number_total',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type="big_number_total", granularity="ds",
flt_col_1='gender', flt_eq_1='girl',
subheader='total female participants')),
Slice( Slice(
slice_name="Genders", slice_name="Genders",
viz_type='pie', viz_type='pie',
@ -643,6 +713,16 @@ def load_birth_names():
defaults, defaults,
viz_type="pivot_table", metrics=['sum__num'], viz_type="pivot_table", metrics=['sum__num'],
groupby=['name'], columns=['state'])), groupby=['name'], columns=['state'])),
Slice(
slice_name="Number of Girls",
viz_type='big_number_total',
datasource_type='table',
table=tbl,
params=get_slice_json(
defaults,
viz_type="big_number_total", granularity="ds",
flt_col_1='gender', flt_eq_1='girl',
subheader='total female participants')),
] ]
for slc in slices: for slc in slices:
merge_slice(slc) merge_slice(slc)
@ -725,6 +805,6 @@ def load_birth_names():
dash.dashboard_title = "Births" dash.dashboard_title = "Births"
dash.position_json = json.dumps(l, indent=4) dash.position_json = json.dumps(l, indent=4)
dash.slug = "births" dash.slug = "births"
dash.slices = slices dash.slices = slices[:-1]
db.session.merge(dash) db.session.merge(dash)
db.session.commit() db.session.commit()

View File

@ -18,7 +18,7 @@ import requests
import sqlalchemy as sqla import sqlalchemy as sqla
import sqlparse import sqlparse
from dateutil.parser import parse from dateutil.parser import parse
from flask import flash, request, g from flask import request, g
from flask.ext.appbuilder import Model from flask.ext.appbuilder import Model
from flask.ext.appbuilder.models.mixins import AuditMixin from flask.ext.appbuilder.models.mixins import AuditMixin
from pydruid.client import PyDruid from pydruid.client import PyDruid
@ -36,6 +36,7 @@ from sqlalchemy_utils import EncryptedType
from caravel import app, db, get_session, utils from caravel import app, db, get_session, utils
from caravel.viz import viz_types from caravel.viz import viz_types
from caravel.utils import flasher
config = app.config config = app.config
@ -666,8 +667,8 @@ class SqlaTable(Model, Queryable, AuditMixinNullable):
try: try:
table = self.database.get_table(self.table_name, schema=self.schema) table = self.database.get_table(self.table_name, schema=self.schema)
except Exception as e: except Exception as e:
flash(str(e)) flasher(str(e))
flash( flasher(
"Table doesn't seem to exist in the specified database, " "Table doesn't seem to exist in the specified database, "
"couldn't fetch column information", "danger") "couldn't fetch column information", "danger")
return return
@ -952,9 +953,9 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable):
if not datasource: if not datasource:
datasource = cls(datasource_name=name) datasource = cls(datasource_name=name)
session.add(datasource) session.add(datasource)
logging.info("Adding new datasource [{}]".format(name)) flasher("Adding new datasource [{}]".format(name), "success")
else: else:
logging.info("Refreshing datasource [{}]".format(name)) flasher("Refreshing datasource [{}]".format(name), "info")
datasource.cluster = cluster datasource.cluster = cluster
cols = datasource.latest_metadata() cols = datasource.latest_metadata()

View File

@ -11,12 +11,23 @@ from datetime import datetime
import parsedatetime import parsedatetime
from dateutil.parser import parse from dateutil.parser import parse
from flask import Markup from flask import flash, Markup
from flask_appbuilder.security.sqla import models as ab_models from flask_appbuilder.security.sqla import models as ab_models
from markdown import markdown as md from markdown import markdown as md
from sqlalchemy.types import TypeDecorator, TEXT from sqlalchemy.types import TypeDecorator, TEXT
def flasher(msg, severity=None):
"""Flask's flash if available, logging call if not"""
try:
flash(msg, severity)
except RuntimeError:
if severity == 'danger':
logging.error(msg)
else:
logging.info(msg)
class memoized(object): # noqa class memoized(object): # noqa
"""Decorator that caches a function's return value each time it is called """Decorator that caches a function's return value each time it is called

View File

@ -16,7 +16,7 @@ from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta from datetime import datetime, timedelta
import pandas as pd import pandas as pd
import numpy as np import numpy as np
from flask import flash, request, Markup from flask import request, Markup
from markdown import markdown from markdown import markdown
from pandas.io.json import dumps from pandas.io.json import dumps
from six import string_types from six import string_types
@ -25,6 +25,7 @@ from werkzeug.urls import Href
from caravel import app, utils, cache from caravel import app, utils, cache
from caravel.forms import FormFactory from caravel.forms import FormFactory
from caravel.utils import flasher
config = app.config config = app.config
@ -68,8 +69,7 @@ class BaseViz(object):
if not form.validate(): if not form.validate():
for k, v in form.errors.items(): for k, v in form.errors.items():
if not data.get('json') and not data.get('async'): if not data.get('json') and not data.get('async'):
logging.error("{}: {}".format(k, " ".join(v))) flasher("{}: {}".format(k, " ".join(v)), 'danger')
flash("{}: {}".format(k, " ".join(v)), 'danger')
if previous_viz_type != self.viz_type: if previous_viz_type != self.viz_type:
data = { data = {
k: form.data[k] k: form.data[k]
@ -197,7 +197,7 @@ class BaseViz(object):
until = form_data.get("until", "now") until = form_data.get("until", "now")
to_dttm = utils.parse_human_datetime(until) to_dttm = utils.parse_human_datetime(until)
if from_dttm > to_dttm: if from_dttm > to_dttm:
flash("The date range doesn't seem right.", "danger") flasher("The date range doesn't seem right.", "danger")
from_dttm = to_dttm # Making them identical to not raise from_dttm = to_dttm # Making them identical to not raise
# extras are used to query elements specific to a datasource type # extras are used to query elements specific to a datasource type