From a38a8d476e04c242a154b9c509ae1b68f55e0878 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Wed, 13 Jan 2016 17:05:11 -0800 Subject: [PATCH 1/3] Loading another example amazing dash --- panoramix/bin/panoramix | 1 + panoramix/data/__init__.py | 365 ++++++++++++++++----- panoramix/static/widgets/viz_sunburst.js | 160 +++++---- panoramix/templates/panoramix/explore.html | 5 + setup.py | 4 +- 5 files changed, 381 insertions(+), 154 deletions(-) diff --git a/panoramix/bin/panoramix b/panoramix/bin/panoramix index ec508d491c..a498d428c8 100755 --- a/panoramix/bin/panoramix +++ b/panoramix/bin/panoramix @@ -55,6 +55,7 @@ def load_examples(sample): print("Loading [World Bank's Health Nutrition and Population Stats]") data.load_world_bank_health_n_pop() + print("Loading [Birth names]") data.load_birth_names() diff --git a/panoramix/data/__init__.py b/panoramix/data/__init__.py index 547854acb8..ca1159e46c 100644 --- a/panoramix/data/__init__.py +++ b/panoramix/data/__init__.py @@ -7,6 +7,12 @@ from sqlalchemy import String, DateTime from panoramix import app, db, models, utils +# Shortcuts +DB = models.Database +Slice = models.Slice +TBL = models.SqlaTable +Dash = models.Dashboard + config = app.config DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data') @@ -14,7 +20,6 @@ DATA_FOLDER = os.path.join(config.get("BASE_DIR"), 'data') def get_or_create_db(session): print("Creating database reference") - DB = models.Database dbobj = session.query(DB).filter_by(database_name='main').first() if not dbobj: dbobj = DB(database_name="main") @@ -25,6 +30,20 @@ def get_or_create_db(session): return dbobj +def merge_slice(slc): + o = db.session.query(Slice).filter_by(slice_name=slc.slice_name).first() + if o: + db.session.delete(o) + db.session.add(slc) + db.session.commit() + + +def get_slice_json(defaults, **kwargs): + d = defaults.copy() + d.update(kwargs) + return json.dumps(d, indent=4, sort_keys=True) + + def load_world_bank_health_n_pop(): tbl = 'wb_health_population' with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f: @@ -42,17 +61,220 @@ def load_world_bank_health_n_pop(): 'region': String(255), }, index=False) - print("Creating table reference") - TBL = models.SqlaTable - obj = db.session.query(TBL).filter_by(table_name=tbl).first() - if not obj: - obj = TBL(table_name='wb_health_population') - obj.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md')) - obj.main_dttm_col = 'year' - obj.database = get_or_create_db(db.session) - db.session.merge(obj) + + print("Creating table [wb_health_population] reference") + tbl = db.session.query(TBL).filter_by(table_name=tbl).first() + if not tbl: + tbl = TBL(table_name='wb_health_population') + tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md')) + tbl.main_dttm_col = 'year' + tbl.database = get_or_create_db(db.session) + db.session.merge(tbl) + db.session.commit() + #tbl.fetch_metadata() + + defaults = { + "compare_lag": "10", + "compare_suffix": "o10Y", + "datasource_id": "1", + "datasource_name": "birth_names", + "datasource_type": "table", + "limit": "25", + "granularity": "year", + "groupby": [], + "metric": 'sum__SP.POP.TOTL', + "metrics": ["sum__SP.POP.TOTL"], + "row_limit": config.get("ROW_LIMIT"), + "since": "2014-01-01", + "until": "2014-01-01", + "where": "", + "markup_type": "markdown", + "country_fieldtype": "cca3", + "secondary_metric": "sum__SP.POP.TOTL", + "entity": "country_code", + "show_bubbles": "y", + } + + print("Creating slices") + slices = [ + Slice( + slice_name="Region Filter", + viz_type='filter_box', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + viz_type='filter_box', + groupby=['region'], + )), + Slice( + slice_name="World's Population", + viz_type='big_number', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + since='2000', + viz_type='big_number', + compare_lag="10", + metric='sum__SP.POP.TOTL', + compare_suffix="over 10Y")), + Slice( + slice_name="Most Populated Countries", + viz_type='table', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + viz_type='table', + metrics=["sum__SP.POP.TOTL"], + groupby=['country_name'])), + Slice( + slice_name="Growth Rate", + viz_type='line', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + viz_type='line', + since="1960-01-01", + metrics=["sum__SP.POP.TOTL"], + num_period_compare="10", + groupby=['country_name'])), + Slice( + slice_name="% Rural", + viz_type='world_map', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + viz_type='world_map', + metric= "sum__SP.RUR.TOTL.ZS", + num_period_compare="10",)), + Slice( + slice_name="Life Expexctancy VS Rural %", + viz_type='bubble', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + viz_type='bubble', + since= "2011-01-01", + until= "2011-01-01", + series="region", + limit="0", + entity="country_name", + x="sum__SP.RUR.TOTL.ZS", + y="sum__SP.DYN.LE00.IN", + size="sum__SP.POP.TOTL", + max_bubble_size="50", + flt_col_1="country_code", + flt_op_1= "not in", + flt_eq_1="TCA,MNP,DMA,MHL,MCO,SXM,CYM,TUV,IMY,KNA,ASM,ADO,AMA,PLW", + num_period_compare="10",)), + Slice( + slice_name="Rural Breakdown", + viz_type='sunburst', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + viz_type='sunburst', + groupby=["region", "country_name"], + secondary_metric="sum__SP.RUR.TOTL", + since= "2011-01-01", + until= "2011-01-01",)), + Slice( + slice_name="World's Pop Growth", + viz_type='area', + datasource_type='table', + table=tbl, + params=get_slice_json( + defaults, + since="1960-01-01", + until="now", + viz_type='area', + groupby=["region"],)), + ] + for slc in slices: + merge_slice(slc) + + print("Creating a World's Health Bank dashboard") + dash_name = "World's Health Bank Dashboard" + dash = db.session.query(Dash).filter_by(dashboard_title=dash_name).first() + + if dash: + db.session.delete(dash) + js = """\ +[ + { + "size_y": 1, + "size_x": 3, + "col": 1, + "slice_id": "269", + "row": 1 + }, + { + "size_y": 3, + "size_x": 3, + "col": 1, + "slice_id": "270", + "row": 2 + }, + { + "size_y": 7, + "size_x": 3, + "col": 10, + "slice_id": "271", + "row": 1 + }, + { + "size_y": 3, + "size_x": 6, + "col": 1, + "slice_id": "272", + "row": 5 + }, + { + "size_y": 4, + "size_x": 6, + "col": 4, + "slice_id": "273", + "row": 1 + }, + { + "size_y": 4, + "size_x": 6, + "col": 7, + "slice_id": "274", + "row": 8 + }, + { + "size_y": 3, + "size_x": 3, + "col": 7, + "slice_id": "275", + "row": 5 + }, + { + "size_y": 4, + "size_x": 6, + "col": 1, + "slice_id": "276", + "row": 8 + } +] + """ + l = json.loads(js) + for i, pos in enumerate(l): + pos['slice_id'] = str(slices[i].id) + dash = Dash( + dashboard_title=dash_name, + position_json=json.dumps(l, indent=4), + ) + for s in slices: + dash.slices.append(s) db.session.commit() - obj.fetch_metadata() def load_birth_names(): @@ -77,154 +299,129 @@ def load_birth_names(): print("-" * 80) print("Creating table reference") - TBL = models.SqlaTable obj = db.session.query(TBL).filter_by(table_name='birth_names').first() if not obj: obj = TBL(table_name = 'birth_names') obj.main_dttm_col = 'ds' obj.database = get_or_create_db(db.session) - models.Table db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj + defaults = { + "compare_lag": "10", + "compare_suffix": "o10Y", + "datasource_id": "1", + "datasource_name": "birth_names", + "datasource_type": "table", + "limit": "25", + "granularity": "ds", + "groupby": [], + "metric": 'sum__num', + "metrics": ["sum__num"], + "row_limit": config.get("ROW_LIMIT"), + "since": "100 years", + "until": "now", + "viz_type": "table", + "where": "", + "markup_type": "markdown", + } + print("Creating some slices") - def get_slice_json(**kwargs): - defaults = { - "compare_lag": "10", - "compare_suffix": "o10Y", - "datasource_id": "1", - "datasource_name": "birth_names", - "datasource_type": "table", - "limit": "25", - "flt_col_1": "gender", - "flt_eq_1": "", - "flt_op_1": "in", - "granularity": "ds", - "groupby": [], - "metric": 'sum__num', - "metrics": ["sum__num"], - "row_limit": config.get("ROW_LIMIT"), - "since": "100 years", - "until": "now", - "viz_type": "table", - "where": "", - "markup_type": "markdown", - } - d = defaults.copy() - d.update(kwargs) - return json.dumps(d, indent=4, sort_keys=True) - Slice = models.Slice - slices = [] - - def merge_slice(slc): - o = db.session.query( - Slice).filter_by(slice_name=slc.slice_name).first() - if o: - db.session.delete(slc) - db.session.add(slc) - session.commit() - slices.append(slc) - - merge_slice( + slices = [ Slice( slice_name="Girls", viz_type='table', datasource_type='table', table=tbl, params=get_slice_json( - groupby=['name'], flt_eq_1="girl", row_limit=50))) - - merge_slice( + defaults, + groupby=['name'], flt_eq_1="girl", row_limit=50)), Slice( slice_name="Boys", viz_type='table', datasource_type='table', table=tbl, params=get_slice_json( - groupby=['name'], flt_eq_1="boy", row_limit=50))) - - merge_slice( + defaults, + groupby=['name'], flt_eq_1="boy", row_limit=50)), Slice( slice_name="Participants", viz_type='big_number', datasource_type='table', table=tbl, params=get_slice_json( + defaults, viz_type="big_number", granularity="ds", - compare_lag="5", compare_suffix="over 5Y"))) - - merge_slice( + compare_lag="5", compare_suffix="over 5Y")), Slice( slice_name="Genders", viz_type='pie', datasource_type='table', table=tbl, params=get_slice_json( - viz_type="pie", groupby=['gender']))) - - merge_slice( + defaults, + viz_type="pie", groupby=['gender'])), Slice( slice_name="Genders by State", viz_type='dist_bar', datasource_type='table', table=tbl, params=get_slice_json( + defaults, flt_eq_1="other", viz_type="dist_bar", metrics=['sum__sum_girls', 'sum__sum_boys'], - groupby=['state'], flt_op_1='not in', flt_col_1='state'))) - - merge_slice( + groupby=['state'], flt_op_1='not in', flt_col_1='state')), Slice( slice_name="Trends", viz_type='line', datasource_type='table', table=tbl, params=get_slice_json( + defaults, viz_type="line", groupby=['name'], - granularity='ds', rich_tooltip='y', show_legend='y'))) - - code = """ -
-

Birth Names Dashboard

-

The source dataset came from [here]

- -
- """ - merge_slice( + granularity='ds', rich_tooltip='y', show_legend='y')), Slice( slice_name="Title", viz_type='markup', datasource_type='table', table=tbl, params=get_slice_json( + defaults, viz_type="markup", markup_type="html", - code=code))) - - merge_slice( + code="""\ +
+

Birth Names Dashboard

+

The source dataset came from [here]

+ +
+ """ + )), Slice( slice_name="Name Cloud", viz_type='word_cloud', datasource_type='table', table=tbl, params=get_slice_json( + defaults, viz_type="word_cloud", size_from="10", series='name', size_to="70", rotation="square", - limit='100'))) - - merge_slice( + limit='100')), Slice( slice_name="Pivot Table", viz_type='pivot_table', datasource_type='table', table=tbl, params=get_slice_json( + defaults, viz_type="pivot_table", metrics=['sum__num'], - groupby=['name'], columns=['state']))) + groupby=['name'], columns=['state'])), + ] + for slc in slices: + merge_slice(slc) print("Creating a dashboard") - Dash = models.Dashboard dash = session.query(Dash).filter_by(dashboard_title="Births").first() if dash: diff --git a/panoramix/static/widgets/viz_sunburst.js b/panoramix/static/widgets/viz_sunburst.js index 528fa4dfac..4f209360d7 100644 --- a/panoramix/static/widgets/viz_sunburst.js +++ b/panoramix/static/widgets/viz_sunburst.js @@ -1,6 +1,6 @@ /* -Modified from http://bl.ocks.org/kerryrodden/7090426 -*/ + Modified from http://bl.ocks.org/kerryrodden/7090426 + */ function viz_sunburst(slice) { var container = d3.select(slice.selector); @@ -14,23 +14,23 @@ function viz_sunburst(slice) { container.select("svg").remove(); var vis = container.append("svg:svg") - .attr("width", width) - .attr("height", height) - .append("svg:g") - .attr("id", "container") - .attr("transform", "translate(" + width / 2 + "," + height / 2 + ")"); + .attr("width", width) + .attr("height", height) + .append("svg:g") + .attr("id", "container") + .attr("transform", "translate(" + width / 2 + "," + height / 2 + ")"); var gMiddleText = vis.append("svg:g").attr("id", "gMiddleText"); var partition = d3.layout.partition() - .size([2 * Math.PI, radius * radius]) - .value(function(d) { return d.m1; }); + .size([2 * Math.PI, radius * radius]) + .value(function(d) { return d.m1; }); var arc = d3.svg.arc() - .startAngle(function(d) { return d.x; }) - .endAngle(function(d) { return d.x + d.dx; }) - .innerRadius(function(d) { return Math.sqrt(d.y); }) - .outerRadius(function(d) { return Math.sqrt(d.y + d.dy); }); + .startAngle(function(d) { return d.x; }) + .endAngle(function(d) { return d.x + d.dx; }) + .innerRadius(function(d) { return Math.sqrt(d.y); }) + .outerRadius(function(d) { return Math.sqrt(d.y + d.dy); }); var ext; d3.json(slice.jsonEndpoint(), function(error, json){ @@ -50,31 +50,31 @@ function viz_sunburst(slice) { // Bounding circle underneath the sunburst, to make it easier to detect // when the mouse leaves the parent g. vis.append("svg:circle") - .attr("r", radius) - .style("opacity", 0); + .attr("r", radius) + .style("opacity", 0); // For efficiency, filter nodes to keep only those large enough to see. var nodes = partition.nodes(json) - .filter(function(d) { - return (d.dx > 0.005); // 0.005 radians = 0.29 degrees - }); + .filter(function(d) { + return (d.dx > 0.005); // 0.005 radians = 0.29 degrees + }); ext = d3.extent(nodes, function(d){return d.m2 / d.m1;}); var colorScale = d3.scale.linear() - .domain([ext[0], ext[0] + ((ext[1] - ext[0]) / 2), ext[1]]) - .range(["#00D1C1", "white","#FFB400"]); + .domain([ext[0], ext[0] + ((ext[1] - ext[0]) / 2), ext[1]]) + .range(["#00D1C1", "white","#FFB400"]); var path = vis.data([json]).selectAll("path") - .data(nodes) - .enter().append("svg:path") - .attr("display", function(d) { return d.depth ? null : "none"; }) - .attr("d", arc) - .attr("fill-rule", "evenodd") - .style("stroke", "grey") - .style("stroke-width", "1px") - .style("fill", function(d) { return colorScale(d.m2/d.m1); }) - .style("opacity", 1) - .on("mouseenter", mouseenter); + .data(nodes) + .enter().append("svg:path") + .attr("display", function(d) { return d.depth ? null : "none"; }) + .attr("d", arc) + .attr("fill-rule", "evenodd") + .style("stroke", "grey") + .style("stroke-width", "1px") + .style("fill", function(d) { return colorScale(d.m2/d.m1); }) + .style("opacity", 1) + .on("mouseenter", mouseenter); // Add the mouseleave handler to the bounding circle. @@ -82,7 +82,7 @@ function viz_sunburst(slice) { // Get total size of the tree = value of root node from partition. totalSize = path.node().__data__.value; - }; + }; f = d3.format(".3s"); fp = d3.format(".3p"); // Fade all but the current sequence, and show it in the breadcrumb trail. @@ -93,35 +93,59 @@ function viz_sunburst(slice) { gMiddleText.selectAll("*").remove(); gMiddleText.append("text") - .classed("middle", true) - .style("font-size", "50px") - .text(percentageString); + .classed("middle", true) + .style("font-size", "50px") + .text(percentageString); gMiddleText.append("text") - .classed("middle", true) - .style("font-size", "20px") - .attr("y", "25") - .text("m1: " + f(d.m1) + " | m2: " + f(d.m2)); + .classed("middle", true) + .style("font-size", "20px") + .attr("y", "25") + .text("m1: " + f(d.m1) + " | m2: " + f(d.m2)); gMiddleText.append("text") - .classed("middle", true) - .style("font-size", "15px") - .attr("y", "50") - .text("m2/m1: " + fp(d.m2/d.m1)); + .classed("middle", true) + .style("font-size", "15px") + .attr("y", "50") + .text("m2/m1: " + fp(d.m2/d.m1)); var sequenceArray = getAncestors(d); + function breadcrumbPoints(d, i) { + var points = []; + points.push("0,0"); + points.push(b.w + ",0"); + points.push(b.w + b.t + "," + (b.h / 2)); + points.push(b.w + "," + b.h); + points.push("0," + b.h); + if (i > 0) { // Leftmost breadcrumb; don't include 6th vertex. + points.push(b.t + "," + (b.h / 2)); + } + return points.join(" "); + } + + // Update the breadcrumb trail to show the current sequence and percentage. + function updateBreadcrumbs(nodeArray, percentageString) { + l = []; + for(var i=0; i ') + gMiddleText.append("text").text(s).classed("middle", true) + .attr("y", -75); + } + updateBreadcrumbs(sequenceArray, percentageString); // Fade all the segments. container.selectAll("path") - .style("stroke-width", "1px") - .style("opacity", 0.3); + .style("stroke-width", "1px") + .style("opacity", 0.3); // Then highlight only those that are an ancestor of the current segment. container.selectAll("path") - .filter(function(node) { - return (sequenceArray.indexOf(node) >= 0); - }) - .style("opacity", 1) - .style("stroke", "black") - .style("stroke-width", "2px"); + .filter(function(node) { + return (sequenceArray.indexOf(node) >= 0); + }) + .style("opacity", 1) + .style("stroke", "black") + .style("stroke-width", "2px"); } // Restore everything to full opacity when moving off the visualization. @@ -129,7 +153,7 @@ function viz_sunburst(slice) { // Hide the breadcrumb trail container.select("#trail") - .style("visibility", "hidden"); + .style("visibility", "hidden"); gMiddleText.selectAll("*").remove(); // Deactivate all segments during transition. @@ -138,14 +162,14 @@ function viz_sunburst(slice) { // Transition each segment to full opacity and then reactivate it. container.selectAll("path") - .transition() - .duration(200) - .style("opacity", 1) - .style("stroke", "grey") - .style("stroke-width", "1px") - .each("end", function() { - d3.select(this).on("mouseenter", mouseenter); - }); + .transition() + .duration(200) + .style("opacity", 1) + .style("stroke", "grey") + .style("stroke-width", "1px") + .each("end", function() { + d3.select(this).on("mouseenter", mouseenter); + }); } // Given a node in a partition layout, return an array of all of its ancestor @@ -200,15 +224,15 @@ function viz_sunburst(slice) { } function recurse(node){ if (node.children){ - var m1 = 0; - var m2 = 0; - for (var i=0; i {{ form.get_field("viz_type")(class_="select2") }} + {{ viz.form_data.slice_name }} + + + +
diff --git a/setup.py b/setup.py index b22708dffe..6648e11af2 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ setup( zip_safe=False, scripts=['panoramix/bin/panoramix'], install_requires=[ - 'alembic>=0.7.7, <0.8.0', + 'alembic>=0.8.2, <0.9.0', 'cryptography>=1.1.1, <2.0.0', 'flask-appbuilder>=1.4.5, <2.0.0', 'flask-login==0.2.11', @@ -35,7 +35,7 @@ setup( 'python-dateutil>=2.4.2, <3.0.0', 'requests>=2.7.0, <3.0.0', 'sqlalchemy-utils>=0.31.3, <0.32.0', - 'sqlalchemy==1.0.8', + 'sqlalchemy>=1.0.8, <2.0.0', 'sqlparse>=0.1.16, <0.2.0', 'werkzeug==0.11.2, <0.12.0', ], From ef5511cccf68b170a71fcbc6b884b84e928dc7f0 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Wed, 13 Jan 2016 17:19:12 -0800 Subject: [PATCH 2/3] Getting stuff to load --- panoramix/data/__init__.py | 2 +- tests/core_tests.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/panoramix/data/__init__.py b/panoramix/data/__init__.py index ca1159e46c..8e8f25ff18 100644 --- a/panoramix/data/__init__.py +++ b/panoramix/data/__init__.py @@ -71,7 +71,7 @@ def load_world_bank_health_n_pop(): tbl.database = get_or_create_db(db.session) db.session.merge(tbl) db.session.commit() - #tbl.fetch_metadata() + tbl.fetch_metadata() defaults = { "compare_lag": "10", diff --git a/tests/core_tests.py b/tests/core_tests.py index 3f9c0b56fe..e016314cc8 100644 --- a/tests/core_tests.py +++ b/tests/core_tests.py @@ -28,6 +28,7 @@ class LiveTest(TestCase): def test_slices(self): Slc = models.Slice for slc in db.session.query(Slc).all(): + print(slc) self.client.get(slc.slice_url) viz = slc.viz self.client.get(viz.get_url()) From a3727fc0919dd5004f13cc26848bd1fed9a72d60 Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Wed, 13 Jan 2016 18:16:01 -0800 Subject: [PATCH 3/3] Making sqlite work --- panoramix/data/__init__.py | 31 ++++++++++++++++--------------- panoramix/models.py | 5 ++--- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/panoramix/data/__init__.py b/panoramix/data/__init__.py index 8e8f25ff18..3d4d58f8d5 100644 --- a/panoramix/data/__init__.py +++ b/panoramix/data/__init__.py @@ -45,12 +45,13 @@ def get_slice_json(defaults, **kwargs): def load_world_bank_health_n_pop(): - tbl = 'wb_health_population' + tbl_name = 'wb_health_population' with gzip.open(os.path.join(DATA_FOLDER, 'countries.json.gz')) as f: pdf = pd.read_json(f) + pdf.columns = [col.replace('.', '_') for col in pdf.columns] pdf.year = pd.to_datetime(pdf.year) pdf.to_sql( - tbl, + tbl_name, db.engine, if_exists='replace', chunksize=500, @@ -63,9 +64,9 @@ def load_world_bank_health_n_pop(): index=False) print("Creating table [wb_health_population] reference") - tbl = db.session.query(TBL).filter_by(table_name=tbl).first() + tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first() if not tbl: - tbl = TBL(table_name='wb_health_population') + tbl = TBL(table_name=tbl_name) tbl.description = utils.readfile(os.path.join(DATA_FOLDER, 'countries.md')) tbl.main_dttm_col = 'year' tbl.database = get_or_create_db(db.session) @@ -82,15 +83,15 @@ def load_world_bank_health_n_pop(): "limit": "25", "granularity": "year", "groupby": [], - "metric": 'sum__SP.POP.TOTL', - "metrics": ["sum__SP.POP.TOTL"], + "metric": 'sum__SP_POP_TOTL', + "metrics": ["sum__SP_POP_TOTL"], "row_limit": config.get("ROW_LIMIT"), "since": "2014-01-01", "until": "2014-01-01", "where": "", "markup_type": "markdown", "country_fieldtype": "cca3", - "secondary_metric": "sum__SP.POP.TOTL", + "secondary_metric": "sum__SP_POP_TOTL", "entity": "country_code", "show_bubbles": "y", } @@ -117,7 +118,7 @@ def load_world_bank_health_n_pop(): since='2000', viz_type='big_number', compare_lag="10", - metric='sum__SP.POP.TOTL', + metric='sum__SP_POP_TOTL', compare_suffix="over 10Y")), Slice( slice_name="Most Populated Countries", @@ -127,7 +128,7 @@ def load_world_bank_health_n_pop(): params=get_slice_json( defaults, viz_type='table', - metrics=["sum__SP.POP.TOTL"], + metrics=["sum__SP_POP_TOTL"], groupby=['country_name'])), Slice( slice_name="Growth Rate", @@ -138,7 +139,7 @@ def load_world_bank_health_n_pop(): defaults, viz_type='line', since="1960-01-01", - metrics=["sum__SP.POP.TOTL"], + metrics=["sum__SP_POP_TOTL"], num_period_compare="10", groupby=['country_name'])), Slice( @@ -149,7 +150,7 @@ def load_world_bank_health_n_pop(): params=get_slice_json( defaults, viz_type='world_map', - metric= "sum__SP.RUR.TOTL.ZS", + metric= "sum__SP_RUR_TOTL_ZS", num_period_compare="10",)), Slice( slice_name="Life Expexctancy VS Rural %", @@ -164,9 +165,9 @@ def load_world_bank_health_n_pop(): series="region", limit="0", entity="country_name", - x="sum__SP.RUR.TOTL.ZS", - y="sum__SP.DYN.LE00.IN", - size="sum__SP.POP.TOTL", + x="sum__SP_RUR_TOTL_ZS", + y="sum__SP_DYN_LE00_IN", + size="sum__SP_POP_TOTL", max_bubble_size="50", flt_col_1="country_code", flt_op_1= "not in", @@ -181,7 +182,7 @@ def load_world_bank_health_n_pop(): defaults, viz_type='sunburst', groupby=["region", "country_name"], - secondary_metric="sum__SP.RUR.TOTL", + secondary_metric="sum__SP_RUR_TOTL", since= "2011-01-01", until= "2011-01-01",)), Slice( diff --git a/panoramix/models.py b/panoramix/models.py index b12a2d7b0d..ee70e844f2 100644 --- a/panoramix/models.py +++ b/panoramix/models.py @@ -11,7 +11,6 @@ from dateutil.parser import parse from flask import flash from flask.ext.appbuilder import Model from flask.ext.appbuilder.models.mixins import AuditMixin -import pandas as pd from pandas import read_sql_query from pydruid import client from pydruid.utils.filters import Dimension, Filter @@ -457,8 +456,8 @@ class SqlaTable(Model, Queryable, AuditMixinNullable): qry = qry.group_by(*groupby_exprs) time_filter = [ - timestamp >= from_dttm.isoformat(), - timestamp <= to_dttm.isoformat(), + timestamp >= from_dttm, + timestamp <= to_dttm, ] inner_time_filter = copy(time_filter) if inner_from_dttm: