diff --git a/.travis.yml b/.travis.yml index 336659866c..bd7bb7eb8a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,5 +34,4 @@ install: - pip install --upgrade pip - pip install tox tox-travis - rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION - - npm install script: tox -e $TOX_ENV diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2776da8c9f..6c681f461e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -211,6 +211,9 @@ following commands. The `dev` flag will keep the npm script running and re-run it upon any changes within the assets directory. ``` +# Copies a conf file from the frontend to the backend +npm run sync-backend + # Compiles the production / optimized js & css npm run prod diff --git a/run_tests.sh b/run_tests.sh index db7bda6b7d..55760f633b 100755 --- a/run_tests.sh +++ b/run_tests.sh @@ -7,7 +7,6 @@ rm -f .coverage export SUPERSET_CONFIG=tests.superset_test_config set -e superset/bin/superset db upgrade -superset/bin/superset db upgrade # running twice on purpose as a test superset/bin/superset version -v python setup.py nosetests coveralls diff --git a/superset/__init__.py b/superset/__init__.py index b9cc6b0041..1838c49b23 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -6,6 +6,7 @@ from __future__ import unicode_literals import logging import os +import json from logging.handlers import TimedRotatingFileHandler from flask import Flask, redirect @@ -21,6 +22,10 @@ from superset import utils APP_DIR = os.path.dirname(__file__) CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config') +with open(APP_DIR + '/static/assets/backendSync.json', 'r') as f: + frontend_config = json.load(f) + + app = Flask(__name__) app.config.from_object(CONFIG_MODULE) conf = app.config diff --git a/superset/assets/backendSync.json b/superset/assets/backendSync.json new file mode 100644 index 0000000000..5a03b67b52 --- /dev/null +++ b/superset/assets/backendSync.json @@ -0,0 +1,2278 @@ +{ + "fields": { + "datasource": { + "type": "SelectField", + "label": "Datasource", + "isLoading": true, + "clearable": false, + "default": null, + "description": "" + }, + "viz_type": { + "type": "SelectField", + "label": "Visualization Type", + "clearable": false, + "default": "table", + "choices": [ + [ + "dist_bar", + "Distribution - Bar Chart", + "/static/assets/images/viz_thumbnails/dist_bar.png" + ], + [ + "pie", + "Pie Chart", + "/static/assets/images/viz_thumbnails/pie.png" + ], + [ + "line", + "Time Series - Line Chart", + "/static/assets/images/viz_thumbnails/line.png" + ], + [ + "dual_line", + "Time Series - Dual Axis Line Chart", + "/static/assets/images/viz_thumbnails/dual_line.png" + ], + [ + "bar", + "Time Series - Bar Chart", + "/static/assets/images/viz_thumbnails/bar.png" + ], + [ + "compare", + "Time Series - Percent Change", + "/static/assets/images/viz_thumbnails/compare.png" + ], + [ + "area", + "Time Series - Stacked", + "/static/assets/images/viz_thumbnails/area.png" + ], + [ + "table", + "Table View", + "/static/assets/images/viz_thumbnails/table.png" + ], + [ + "markup", + "Markup", + "/static/assets/images/viz_thumbnails/markup.png" + ], + [ + "pivot_table", + "Pivot Table", + "/static/assets/images/viz_thumbnails/pivot_table.png" + ], + [ + "separator", + "Separator", + "/static/assets/images/viz_thumbnails/separator.png" + ], + [ + "word_cloud", + "Word Cloud", + "/static/assets/images/viz_thumbnails/word_cloud.png" + ], + [ + "treemap", + "Treemap", + "/static/assets/images/viz_thumbnails/treemap.png" + ], + [ + "cal_heatmap", + "Calendar Heatmap", + "/static/assets/images/viz_thumbnails/cal_heatmap.png" + ], + [ + "box_plot", + "Box Plot", + "/static/assets/images/viz_thumbnails/box_plot.png" + ], + [ + "bubble", + "Bubble Chart", + "/static/assets/images/viz_thumbnails/bubble.png" + ], + [ + "bullet", + "Bullet Chart", + "/static/assets/images/viz_thumbnails/bullet.png" + ], + [ + "big_number", + "Big Number with Trendline", + "/static/assets/images/viz_thumbnails/big_number.png" + ], + [ + "big_number_total", + "Big Number", + "/static/assets/images/viz_thumbnails/big_number_total.png" + ], + [ + "histogram", + "Histogram", + "/static/assets/images/viz_thumbnails/histogram.png" + ], + [ + "sunburst", + "Sunburst", + "/static/assets/images/viz_thumbnails/sunburst.png" + ], + [ + "sankey", + "Sankey", + "/static/assets/images/viz_thumbnails/sankey.png" + ], + [ + "directed_force", + "Directed Force Layout", + "/static/assets/images/viz_thumbnails/directed_force.png" + ], + [ + "world_map", + "World Map", + "/static/assets/images/viz_thumbnails/world_map.png" + ], + [ + "filter_box", + "Filter Box", + "/static/assets/images/viz_thumbnails/filter_box.png" + ], + [ + "iframe", + "iFrame", + "/static/assets/images/viz_thumbnails/iframe.png" + ], + [ + "para", + "Parallel Coordinates", + "/static/assets/images/viz_thumbnails/para.png" + ], + [ + "heatmap", + "Heatmap", + "/static/assets/images/viz_thumbnails/heatmap.png" + ], + [ + "horizon", + "Horizon", + "/static/assets/images/viz_thumbnails/horizon.png" + ], + [ + "mapbox", + "Mapbox", + "/static/assets/images/viz_thumbnails/mapbox.png" + ] + ], + "description": "The type of visualization to display" + }, + "metrics": { + "type": "SelectField", + "multi": true, + "label": "Metrics", + "validators": [ + null + ], + "description": "One or many metrics to display" + }, + "order_by_cols": { + "type": "SelectField", + "multi": true, + "label": "Ordering", + "default": [], + "description": "One or many metrics to display" + }, + "metric": { + "type": "SelectField", + "label": "Metric", + "clearable": false, + "description": "Choose the metric" + }, + "metric_2": { + "type": "SelectField", + "label": "Right Axis Metric", + "choices": [], + "default": [], + "description": "Choose a metric for right axis" + }, + "stacked_style": { + "type": "SelectField", + "label": "Stacked Style", + "choices": [ + [ + "stack", + "stack" + ], + [ + "stream", + "stream" + ], + [ + "expand", + "expand" + ] + ], + "default": "stack", + "description": "" + }, + "linear_color_scheme": { + "type": "SelectField", + "label": "Linear Color Scheme", + "choices": [ + [ + "fire", + "fire" + ], + [ + "blue_white_yellow", + "blue/white/yellow" + ], + [ + "white_black", + "white/black" + ], + [ + "black_white", + "black/white" + ] + ], + "default": "blue_white_yellow", + "description": "" + }, + "normalize_across": { + "type": "SelectField", + "label": "Normalize Across", + "choices": [ + [ + "heatmap", + "heatmap" + ], + [ + "x", + "x" + ], + [ + "y", + "y" + ] + ], + "default": "heatmap", + "description": "Color will be rendered based on a ratio of the cell against the sum of across this criteria" + }, + "horizon_color_scale": { + "type": "SelectField", + "label": "Horizon Color Scale", + "choices": [ + [ + "series", + "series" + ], + [ + "overall", + "overall" + ], + [ + "change", + "change" + ] + ], + "default": "series", + "description": "Defines how the color are attributed." + }, + "canvas_image_rendering": { + "type": "SelectField", + "label": "Rendering", + "choices": [ + [ + "pixelated", + "pixelated (Sharp)" + ], + [ + "auto", + "auto (Smooth)" + ] + ], + "default": "pixelated", + "description": "image-rendering CSS attribute of the canvas object that defines how the browser scales up the image" + }, + "xscale_interval": { + "type": "SelectField", + "label": "XScale Interval", + "choices": [ + [ + 1, + "1" + ], + [ + 2, + "2" + ], + [ + 3, + "3" + ], + [ + 4, + "4" + ], + [ + 5, + "5" + ], + [ + 6, + "6" + ], + [ + 7, + "7" + ], + [ + 8, + "8" + ], + [ + 9, + "9" + ], + [ + 10, + "10" + ], + [ + 11, + "11" + ], + [ + 12, + "12" + ], + [ + 13, + "13" + ], + [ + 14, + "14" + ], + [ + 15, + "15" + ], + [ + 16, + "16" + ], + [ + 17, + "17" + ], + [ + 18, + "18" + ], + [ + 19, + "19" + ], + [ + 20, + "20" + ], + [ + 21, + "21" + ], + [ + 22, + "22" + ], + [ + 23, + "23" + ], + [ + 24, + "24" + ], + [ + 25, + "25" + ], + [ + 26, + "26" + ], + [ + 27, + "27" + ], + [ + 28, + "28" + ], + [ + 29, + "29" + ], + [ + 30, + "30" + ], + [ + 31, + "31" + ], + [ + 32, + "32" + ], + [ + 33, + "33" + ], + [ + 34, + "34" + ], + [ + 35, + "35" + ], + [ + 36, + "36" + ], + [ + 37, + "37" + ], + [ + 38, + "38" + ], + [ + 39, + "39" + ], + [ + 40, + "40" + ], + [ + 41, + "41" + ], + [ + 42, + "42" + ], + [ + 43, + "43" + ], + [ + 44, + "44" + ], + [ + 45, + "45" + ], + [ + 46, + "46" + ], + [ + 47, + "47" + ], + [ + 48, + "48" + ], + [ + 49, + "49" + ], + [ + 50, + "50" + ] + ], + "default": "1", + "description": "Number of steps to take between ticks when displaying the X scale" + }, + "yscale_interval": { + "type": "SelectField", + "label": "YScale Interval", + "choices": [ + [ + 1, + "1" + ], + [ + 2, + "2" + ], + [ + 3, + "3" + ], + [ + 4, + "4" + ], + [ + 5, + "5" + ], + [ + 6, + "6" + ], + [ + 7, + "7" + ], + [ + 8, + "8" + ], + [ + 9, + "9" + ], + [ + 10, + "10" + ], + [ + 11, + "11" + ], + [ + 12, + "12" + ], + [ + 13, + "13" + ], + [ + 14, + "14" + ], + [ + 15, + "15" + ], + [ + 16, + "16" + ], + [ + 17, + "17" + ], + [ + 18, + "18" + ], + [ + 19, + "19" + ], + [ + 20, + "20" + ], + [ + 21, + "21" + ], + [ + 22, + "22" + ], + [ + 23, + "23" + ], + [ + 24, + "24" + ], + [ + 25, + "25" + ], + [ + 26, + "26" + ], + [ + 27, + "27" + ], + [ + 28, + "28" + ], + [ + 29, + "29" + ], + [ + 30, + "30" + ], + [ + 31, + "31" + ], + [ + 32, + "32" + ], + [ + 33, + "33" + ], + [ + 34, + "34" + ], + [ + 35, + "35" + ], + [ + 36, + "36" + ], + [ + 37, + "37" + ], + [ + 38, + "38" + ], + [ + 39, + "39" + ], + [ + 40, + "40" + ], + [ + 41, + "41" + ], + [ + 42, + "42" + ], + [ + 43, + "43" + ], + [ + 44, + "44" + ], + [ + 45, + "45" + ], + [ + 46, + "46" + ], + [ + 47, + "47" + ], + [ + 48, + "48" + ], + [ + 49, + "49" + ], + [ + 50, + "50" + ] + ], + "default": null, + "description": "Number of steps to take between ticks when displaying the Y scale" + }, + "bar_stacked": { + "type": "CheckboxField", + "label": "Stacked Bars", + "renderTrigger": true, + "default": false, + "description": null + }, + "show_markers": { + "type": "CheckboxField", + "label": "Show Markers", + "renderTrigger": true, + "default": false, + "description": "Show data points as circle markers on the lines" + }, + "show_bar_value": { + "type": "CheckboxField", + "label": "Bar Values", + "default": false, + "renderTrigger": true, + "description": "Show the value on top of the bar" + }, + "order_bars": { + "type": "CheckboxField", + "label": "Sort Bars", + "default": false, + "description": "Sort bars by x labels." + }, + "show_controls": { + "type": "CheckboxField", + "label": "Extra Controls", + "renderTrigger": true, + "default": false, + "description": "Whether to show extra controls or not. Extra controls include things like making mulitBar charts stacked or side by side." + }, + "reduce_x_ticks": { + "type": "CheckboxField", + "label": "Reduce X ticks", + "renderTrigger": true, + "default": false, + "description": "Reduces the number of X axis ticks to be rendered. If true, the x axis wont overflow and labels may be missing. If false, a minimum width will be applied to columns and the width may overflow into an horizontal scroll." + }, + "include_series": { + "type": "CheckboxField", + "label": "Include Series", + "renderTrigger": true, + "default": false, + "description": "Include series name as an axis" + }, + "secondary_metric": { + "type": "SelectField", + "label": "Color Metric", + "default": null, + "description": "A metric to use for color" + }, + "country_fieldtype": { + "type": "SelectField", + "label": "Country Field Type", + "default": "cca2", + "choices": [ + [ + "name", + "Full name" + ], + [ + "cioc", + "code International Olympic Committee (cioc)" + ], + [ + "cca2", + "code ISO 3166-1 alpha-2 (cca2)" + ], + [ + "cca3", + "code ISO 3166-1 alpha-3 (cca3)" + ] + ], + "description": "The country code standard that Superset should expect to find in the [country] column" + }, + "groupby": { + "type": "SelectField", + "multi": true, + "label": "Group by", + "default": [], + "description": "One or many fields to group by" + }, + "columns": { + "type": "SelectField", + "multi": true, + "label": "Columns", + "default": [], + "description": "One or many fields to pivot as columns" + }, + "all_columns": { + "type": "SelectField", + "multi": true, + "label": "Columns", + "default": [], + "description": "Columns to display" + }, + "all_columns_x": { + "type": "SelectField", + "label": "X", + "default": null, + "description": "Columns to display" + }, + "all_columns_y": { + "type": "SelectField", + "label": "Y", + "default": null, + "description": "Columns to display" + }, + "druid_time_origin": { + "type": "SelectField", + "freeForm": true, + "label": "Origin", + "choices": [ + [ + "", + "default" + ], + [ + "now", + "now" + ] + ], + "default": null, + "description": "Defines the origin where time buckets start, accepts natural dates as in `now`, `sunday` or `1970-01-01`" + }, + "bottom_margin": { + "type": "SelectField", + "freeForm": true, + "label": "Bottom Margin", + "choices": [ + [ + "auto", + "auto" + ], + [ + 50, + "50" + ], + [ + 75, + "75" + ], + [ + 100, + "100" + ], + [ + 125, + "125" + ], + [ + 150, + "150" + ], + [ + 200, + "200" + ] + ], + "default": "auto", + "description": "Bottom marging, in pixels, allowing for more room for axis labels" + }, + "granularity": { + "type": "SelectField", + "freeForm": true, + "label": "Time Granularity", + "default": "one day", + "choices": [ + [ + "all", + "all" + ], + [ + "5 seconds", + "5 seconds" + ], + [ + "30 seconds", + "30 seconds" + ], + [ + "1 minute", + "1 minute" + ], + [ + "5 minutes", + "5 minutes" + ], + [ + "1 hour", + "1 hour" + ], + [ + "6 hour", + "6 hour" + ], + [ + "1 day", + "1 day" + ], + [ + "7 days", + "7 days" + ], + [ + "week", + "week" + ], + [ + "week_starting_sunday", + "week_starting_sunday" + ], + [ + "week_ending_saturday", + "week_ending_saturday" + ], + [ + "month", + "month" + ] + ], + "description": "The time granularity for the visualization. Note that you can type and use simple natural language as in `10 seconds`, `1 day` or `56 weeks`" + }, + "domain_granularity": { + "type": "SelectField", + "label": "Domain", + "default": "month", + "choices": [ + [ + "hour", + "hour" + ], + [ + "day", + "day" + ], + [ + "week", + "week" + ], + [ + "month", + "month" + ], + [ + "year", + "year" + ] + ], + "description": "The time unit used for the grouping of blocks" + }, + "subdomain_granularity": { + "type": "SelectField", + "label": "Subdomain", + "default": "day", + "choices": [ + [ + "min", + "min" + ], + [ + "hour", + "hour" + ], + [ + "day", + "day" + ], + [ + "week", + "week" + ], + [ + "month", + "month" + ] + ], + "description": "The time unit for each block. Should be a smaller unit than domain_granularity. Should be larger or equal to Time Grain" + }, + "link_length": { + "type": "SelectField", + "freeForm": true, + "label": "Link Length", + "default": "200", + "choices": [ + [ + "10", + "10" + ], + [ + "25", + "25" + ], + [ + "50", + "50" + ], + [ + "75", + "75" + ], + [ + "100", + "100" + ], + [ + "150", + "150" + ], + [ + "200", + "200" + ], + [ + "250", + "250" + ] + ], + "description": "Link length in the force layout" + }, + "charge": { + "type": "SelectField", + "freeForm": true, + "label": "Charge", + "default": "-500", + "choices": [ + [ + "-50", + "-50" + ], + [ + "-75", + "-75" + ], + [ + "-100", + "-100" + ], + [ + "-150", + "-150" + ], + [ + "-200", + "-200" + ], + [ + "-250", + "-250" + ], + [ + "-500", + "-500" + ], + [ + "-1000", + "-1000" + ], + [ + "-2500", + "-2500" + ], + [ + "-5000", + "-5000" + ] + ], + "description": "Charge in the force layout" + }, + "granularity_sqla": { + "type": "SelectField", + "label": "Time Column", + "description": "The time column for the visualization. Note that you can define arbitrary expression that return a DATETIME column in the table or. Also note that the filter below is applied against this column or expression" + }, + "time_grain_sqla": { + "type": "SelectField", + "label": "Time Grain", + "description": "The time granularity for the visualization. This applies a date transformation to alter your time column and defines a new time granularity. The options here are defined on a per database engine basis in the Superset source code." + }, + "resample_rule": { + "type": "SelectField", + "freeForm": true, + "label": "Resample Rule", + "default": null, + "choices": [ + [ + "", + "" + ], + [ + "1T", + "1T" + ], + [ + "1H", + "1H" + ], + [ + "1D", + "1D" + ], + [ + "7D", + "7D" + ], + [ + "1M", + "1M" + ], + [ + "1AS", + "1AS" + ] + ], + "description": "Pandas resample rule" + }, + "resample_how": { + "type": "SelectField", + "freeForm": true, + "label": "Resample How", + "default": null, + "choices": [ + [ + "", + "" + ], + [ + "mean", + "mean" + ], + [ + "sum", + "sum" + ], + [ + "median", + "median" + ] + ], + "description": "Pandas resample how" + }, + "resample_fillmethod": { + "type": "SelectField", + "freeForm": true, + "label": "Resample Fill Method", + "default": null, + "choices": [ + [ + "", + "" + ], + [ + "ffill", + "ffill" + ], + [ + "bfill", + "bfill" + ] + ], + "description": "Pandas resample fill method" + }, + "since": { + "type": "SelectField", + "freeForm": true, + "label": "Since", + "default": "7 days ago", + "choices": [ + [ + "1 hour ago", + "1 hour ago" + ], + [ + "12 hours ago", + "12 hours ago" + ], + [ + "1 day ago", + "1 day ago" + ], + [ + "7 days ago", + "7 days ago" + ], + [ + "28 days ago", + "28 days ago" + ], + [ + "90 days ago", + "90 days ago" + ], + [ + "1 year ago", + "1 year ago" + ], + [ + "100 year ago", + "100 year ago" + ] + ], + "description": "Timestamp from filter. This supports free form typing and natural language as in `1 day ago`, `28 days` or `3 years`" + }, + "until": { + "type": "SelectField", + "freeForm": true, + "label": "Until", + "default": "now", + "choices": [ + [ + "now", + "now" + ], + [ + "1 day ago", + "1 day ago" + ], + [ + "7 days ago", + "7 days ago" + ], + [ + "28 days ago", + "28 days ago" + ], + [ + "90 days ago", + "90 days ago" + ], + [ + "1 year ago", + "1 year ago" + ] + ] + }, + "max_bubble_size": { + "type": "SelectField", + "freeForm": true, + "label": "Max Bubble Size", + "default": "25", + "choices": [ + [ + "5", + "5" + ], + [ + "10", + "10" + ], + [ + "15", + "15" + ], + [ + "25", + "25" + ], + [ + "50", + "50" + ], + [ + "75", + "75" + ], + [ + "100", + "100" + ] + ] + }, + "whisker_options": { + "type": "SelectField", + "freeForm": true, + "label": "Whisker/outlier options", + "default": "Tukey", + "description": "Determines how whiskers and outliers are calculated.", + "choices": [ + [ + "Tukey", + "Tukey" + ], + [ + "Min/max (no outliers)", + "Min/max (no outliers)" + ], + [ + "2/98 percentiles", + "2/98 percentiles" + ], + [ + "9/91 percentiles", + "9/91 percentiles" + ] + ] + }, + "treemap_ratio": { + "type": "TextField", + "label": "Ratio", + "isFloat": true, + "default": 1.618033988749895, + "description": "Target aspect ratio for treemap tiles." + }, + "number_format": { + "type": "SelectField", + "freeForm": true, + "label": "Number format", + "default": [ + ".3s", + ".3s | 12.3k" + ], + "choices": [ + [ + ".3s", + ".3s | 12.3k" + ], + [ + ".3%", + ".3% | 1234543.210%" + ], + [ + ".4r", + ".4r | 12350" + ], + [ + ".3f", + ".3f | 12345.432" + ], + [ + "+,", + "+, | +12,345.4321" + ], + [ + "$,.2f", + "$,.2f | $12,345.43" + ] + ], + "description": "D3 format syntax: https://github.com/d3/d3-format" + }, + "row_limit": { + "type": "SelectField", + "freeForm": true, + "label": "Row limit", + "default": null, + "choices": [ + [ + 10, + "10" + ], + [ + 50, + "50" + ], + [ + 100, + "100" + ], + [ + 250, + "250" + ], + [ + 500, + "500" + ], + [ + 1000, + "1000" + ], + [ + 5000, + "5000" + ], + [ + 10000, + "10000" + ], + [ + 50000, + "50000" + ] + ] + }, + "limit": { + "type": "SelectField", + "freeForm": true, + "label": "Series limit", + "choices": [ + [ + 0, + "0" + ], + [ + 5, + "5" + ], + [ + 10, + "10" + ], + [ + 25, + "25" + ], + [ + 50, + "50" + ], + [ + 100, + "100" + ], + [ + 500, + "500" + ] + ], + "default": 50, + "description": "Limits the number of time series that get displayed" + }, + "timeseries_limit_metric": { + "type": "SelectField", + "label": "Sort By", + "default": null, + "description": "Metric used to define the top series" + }, + "rolling_type": { + "type": "SelectField", + "label": "Rolling", + "default": "None", + "choices": [ + [ + "None", + "None" + ], + [ + "mean", + "mean" + ], + [ + "sum", + "sum" + ], + [ + "std", + "std" + ], + [ + "cumsum", + "cumsum" + ] + ], + "description": "Defines a rolling window function to apply, works along with the [Periods] text box" + }, + "rolling_periods": { + "type": "TextField", + "label": "Periods", + "isInt": true, + "description": "Defines the size of the rolling window function, relative to the time granularity selected" + }, + "series": { + "type": "SelectField", + "label": "Series", + "default": null, + "description": "Defines the grouping of entities. Each series is shown as a specific color on the chart and has a legend toggle" + }, + "entity": { + "type": "SelectField", + "label": "Entity", + "default": null, + "description": "This define the element to be plotted on the chart" + }, + "x": { + "type": "SelectField", + "label": "X Axis", + "default": null, + "description": "Metric assigned to the [X] axis" + }, + "y": { + "type": "SelectField", + "label": "Y Axis", + "default": null, + "description": "Metric assigned to the [Y] axis" + }, + "size": { + "type": "SelectField", + "label": "Bubble Size", + "default": null + }, + "url": { + "type": "TextField", + "label": "URL", + "description": "The URL, this field is templated, so you can integrate {{ width }} and/or {{ height }} in your URL string.", + "default": "https: //www.youtube.com/embed/JkI5rg_VcQ4" + }, + "x_axis_label": { + "type": "TextField", + "label": "X Axis Label", + "renderTrigger": true, + "default": "" + }, + "y_axis_label": { + "type": "TextField", + "label": "Y Axis Label", + "renderTrigger": true, + "default": "" + }, + "where": { + "type": "TextField", + "label": "Custom WHERE clause", + "default": "", + "description": "The text in this box gets included in your query's WHERE clause, as an AND to other criteria. You can include complex expression, parenthesis and anything else supported by the backend it is directed towards." + }, + "having": { + "type": "TextField", + "label": "Custom HAVING clause", + "default": "", + "description": "The text in this box gets included in your query's HAVING clause, as an AND to other criteria. You can include complex expression, parenthesis and anything else supported by the backend it is directed towards." + }, + "compare_lag": { + "type": "TextField", + "label": "Comparison Period Lag", + "isInt": true, + "description": "Based on granularity, number of time periods to compare against" + }, + "compare_suffix": { + "type": "TextField", + "label": "Comparison suffix", + "description": "Suffix to apply after the percentage display" + }, + "table_timestamp_format": { + "type": "SelectField", + "freeForm": true, + "label": "Table Timestamp Format", + "default": "smart_date", + "choices": [ + [ + "smart_date", + "Adaptative formating" + ], + [ + "%m/%d/%Y", + "%m/%d/%Y | 01/14/2019" + ], + [ + "%Y-%m-%d", + "%Y-%m-%d | 2019-01-14" + ], + [ + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10" + ], + [ + "%H:%M:%S", + "%H:%M:%S | 01:32:10" + ] + ], + "description": "Timestamp Format" + }, + "series_height": { + "type": "SelectField", + "freeForm": true, + "label": "Series Height", + "default": "25", + "choices": [ + [ + "10", + "10" + ], + [ + "25", + "25" + ], + [ + "40", + "40" + ], + [ + "50", + "50" + ], + [ + "75", + "75" + ], + [ + "100", + "100" + ], + [ + "150", + "150" + ], + [ + "200", + "200" + ] + ], + "description": "Pixel height of each series" + }, + "page_length": { + "type": "SelectField", + "freeForm": true, + "label": "Page Length", + "default": 0, + "choices": [ + [ + 0, + "0" + ], + [ + 10, + "10" + ], + [ + 25, + "25" + ], + [ + 40, + "40" + ], + [ + 50, + "50" + ], + [ + 75, + "75" + ], + [ + 100, + "100" + ], + [ + 150, + "150" + ], + [ + 200, + "200" + ] + ], + "description": "Rows per page, 0 means no pagination" + }, + "x_axis_format": { + "type": "SelectField", + "freeForm": true, + "label": "X axis format", + "renderTrigger": true, + "default": "smart_date", + "choices": [ + [ + "smart_date", + "Adaptative formating" + ], + [ + "%m/%d/%Y", + "%m/%d/%Y | 01/14/2019" + ], + [ + "%Y-%m-%d", + "%Y-%m-%d | 2019-01-14" + ], + [ + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10" + ], + [ + "%H:%M:%S", + "%H:%M:%S | 01:32:10" + ] + ], + "description": "D3 format syntax: https://github.com/d3/d3-format" + }, + "y_axis_format": { + "type": "SelectField", + "freeForm": true, + "label": "Y axis format", + "renderTrigger": true, + "default": ".3s", + "choices": [ + [ + ".3s", + ".3s | 12.3k" + ], + [ + ".3%", + ".3% | 1234543.210%" + ], + [ + ".4r", + ".4r | 12350" + ], + [ + ".3f", + ".3f | 12345.432" + ], + [ + "+,", + "+, | +12,345.4321" + ], + [ + "$,.2f", + "$,.2f | $12,345.43" + ] + ], + "description": "D3 format syntax: https://github.com/d3/d3-format" + }, + "y_axis_2_format": { + "type": "SelectField", + "freeForm": true, + "label": "Right axis format", + "default": ".3s", + "choices": [ + [ + ".3s", + ".3s | 12.3k" + ], + [ + ".3%", + ".3% | 1234543.210%" + ], + [ + ".4r", + ".4r | 12350" + ], + [ + ".3f", + ".3f | 12345.432" + ], + [ + "+,", + "+, | +12,345.4321" + ], + [ + "$,.2f", + "$,.2f | $12,345.43" + ] + ], + "description": "D3 format syntax: https://github.com/d3/d3-format" + }, + "markup_type": { + "type": "SelectField", + "label": "Markup Type", + "choices": [ + [ + "markdown", + "markdown" + ], + [ + "html", + "html" + ] + ], + "default": "markdown", + "description": "Pick your favorite markup language" + }, + "rotation": { + "type": "SelectField", + "label": "Rotation", + "choices": [ + [ + "random", + "random" + ], + [ + "flat", + "flat" + ], + [ + "square", + "square" + ] + ], + "default": "random", + "description": "Rotation to apply to words in the cloud" + }, + "line_interpolation": { + "type": "SelectField", + "label": "Line Style", + "renderTrigger": true, + "choices": [ + [ + "linear", + "linear" + ], + [ + "basis", + "basis" + ], + [ + "cardinal", + "cardinal" + ], + [ + "monotone", + "monotone" + ], + [ + "step-before", + "step-before" + ], + [ + "step-after", + "step-after" + ] + ], + "default": "linear", + "description": "Line interpolation as defined by d3.js" + }, + "pie_label_type": { + "type": "SelectField", + "label": "Label Type", + "default": "key", + "choices": [ + [ + "key", + "Category Name" + ], + [ + "value", + "Value" + ], + [ + "percent", + "Percentage" + ] + ], + "description": "What should be shown on the label?" + }, + "code": { + "type": "TextAreaField", + "label": "Code", + "description": "Put your code here", + "default": "" + }, + "pandas_aggfunc": { + "type": "SelectField", + "label": "Aggregation function", + "clearable": false, + "choices": [ + [ + "sum", + "sum" + ], + [ + "mean", + "mean" + ], + [ + "min", + "min" + ], + [ + "max", + "max" + ], + [ + "median", + "median" + ], + [ + "stdev", + "stdev" + ], + [ + "var", + "var" + ] + ], + "default": "sum", + "description": "Aggregate function to apply when pivoting and computing the total rows and columns" + }, + "size_from": { + "type": "TextField", + "isInt": true, + "label": "Font Size From", + "default": "20", + "description": "Font size for the smallest value in the list" + }, + "size_to": { + "type": "TextField", + "isInt": true, + "label": "Font Size To", + "default": "150", + "description": "Font size for the biggest value in the list" + }, + "show_brush": { + "type": "CheckboxField", + "label": "Range Filter", + "renderTrigger": true, + "default": false, + "description": "Whether to display the time range interactive selector" + }, + "date_filter": { + "type": "CheckboxField", + "label": "Date Filter", + "default": false, + "description": "Whether to include a time filter" + }, + "show_datatable": { + "type": "CheckboxField", + "label": "Data Table", + "default": false, + "description": "Whether to display the interactive data table" + }, + "include_search": { + "type": "CheckboxField", + "label": "Search Box", + "renderTrigger": true, + "default": false, + "description": "Whether to include a client side search box" + }, + "table_filter": { + "type": "CheckboxField", + "label": "Table Filter", + "default": false, + "description": "Whether to apply filter when table cell is clicked" + }, + "show_bubbles": { + "type": "CheckboxField", + "label": "Show Bubbles", + "default": false, + "renderTrigger": true, + "description": "Whether to display bubbles on top of countries" + }, + "show_legend": { + "type": "CheckboxField", + "label": "Legend", + "renderTrigger": true, + "default": true, + "description": "Whether to display the legend (toggles)" + }, + "x_axis_showminmax": { + "type": "CheckboxField", + "label": "X bounds", + "renderTrigger": true, + "default": true, + "description": "Whether to display the min and max values of the X axis" + }, + "rich_tooltip": { + "type": "CheckboxField", + "label": "Rich Tooltip", + "renderTrigger": true, + "default": true, + "description": "The rich tooltip shows a list of all series for that point in time" + }, + "y_axis_zero": { + "type": "CheckboxField", + "label": "Y Axis Zero", + "default": false, + "renderTrigger": true, + "description": "Force the Y axis to start at 0 instead of the minimum value" + }, + "y_log_scale": { + "type": "CheckboxField", + "label": "Y Log Scale", + "default": false, + "renderTrigger": true, + "description": "Use a log scale for the Y axis" + }, + "x_log_scale": { + "type": "CheckboxField", + "label": "X Log Scale", + "default": false, + "renderTrigger": true, + "description": "Use a log scale for the X axis" + }, + "donut": { + "type": "CheckboxField", + "label": "Donut", + "default": false, + "description": "Do you want a donut or a pie?" + }, + "labels_outside": { + "type": "CheckboxField", + "label": "Put labels outside", + "default": true, + "description": "Put the labels outside the pie?" + }, + "contribution": { + "type": "CheckboxField", + "label": "Contribution", + "default": false, + "description": "Compute the contribution to the total" + }, + "num_period_compare": { + "type": "TextField", + "label": "Period Ratio", + "default": "", + "isInt": true, + "description": "[integer] Number of period to compare against, this is relative to the granularity selected" + }, + "period_ratio_type": { + "type": "SelectField", + "label": "Period Ratio Type", + "default": "growth", + "choices": [ + [ + "factor", + "factor" + ], + [ + "growth", + "growth" + ], + [ + "value", + "value" + ] + ], + "description": "`factor` means (new/previous), `growth` is ((new/previous) - 1), `value` is (new-previous)" + }, + "time_compare": { + "type": "TextField", + "label": "Time Shift", + "default": null, + "description": "Overlay a timeseries from a relative time period. Expects relative time delta in natural language (example: 24 hours, 7 days, 56 weeks, 365 days" + }, + "subheader": { + "type": "TextField", + "label": "Subheader", + "description": "Description text that shows up below your Big Number" + }, + "mapbox_label": { + "type": "SelectField", + "multi": true, + "label": "label", + "default": [], + "description": "`count` is COUNT(*) if a group by is used. Numerical columns will be aggregated with the aggregator. Non-numerical columns will be used to label points. Leave empty to get a count of points in each cluster." + }, + "mapbox_style": { + "type": "SelectField", + "label": "Map Style", + "choices": [ + [ + "mapbox://styles/mapbox/streets-v9", + "Streets" + ], + [ + "mapbox://styles/mapbox/dark-v9", + "Dark" + ], + [ + "mapbox://styles/mapbox/light-v9", + "Light" + ], + [ + "mapbox://styles/mapbox/satellite-streets-v9", + "Satellite Streets" + ], + [ + "mapbox://styles/mapbox/satellite-v9", + "Satellite" + ], + [ + "mapbox://styles/mapbox/outdoors-v9", + "Outdoors" + ] + ], + "default": "mapbox://styles/mapbox/streets-v9", + "description": "Base layer map style" + }, + "clustering_radius": { + "type": "SelectField", + "freeForm": true, + "label": "Clustering Radius", + "default": "60", + "choices": [ + [ + "0", + "0" + ], + [ + "20", + "20" + ], + [ + "40", + "40" + ], + [ + "60", + "60" + ], + [ + "80", + "80" + ], + [ + "100", + "100" + ], + [ + "200", + "200" + ], + [ + "500", + "500" + ], + [ + "1000", + "1000" + ] + ], + "description": "The radius (in pixels) the algorithm uses to define a cluster. Choose 0 to turn off clustering, but beware that a large number of points (>1000) will cause lag." + }, + "point_radius": { + "type": "SelectField", + "label": "Point Radius", + "default": "Auto", + "description": "The radius of individual points (ones that are not in a cluster). Either a numerical column or `Auto`, which scales the point based on the largest cluster" + }, + "point_radius_unit": { + "type": "SelectField", + "label": "Point Radius Unit", + "default": "Pixels", + "choices": [ + [ + "Pixels", + "Pixels" + ], + [ + "Miles", + "Miles" + ], + [ + "Kilometers", + "Kilometers" + ] + ], + "description": "The unit of measure for the specified point radius" + }, + "global_opacity": { + "type": "TextField", + "label": "Opacity", + "default": 1, + "isFloat": true, + "description": "Opacity of all clusters, points, and labels. Between 0 and 1." + }, + "viewport_zoom": { + "type": "TextField", + "label": "Zoom", + "isFloat": true, + "default": 11, + "description": "Zoom level of the map", + "places": 8 + }, + "viewport_latitude": { + "type": "TextField", + "label": "Default latitude", + "default": 37.772123, + "isFloat": true, + "description": "Latitude of default viewport", + "places": 8 + }, + "viewport_longitude": { + "type": "TextField", + "label": "Default longitude", + "default": -122.405293, + "isFloat": true, + "description": "Longitude of default viewport", + "places": 8 + }, + "render_while_dragging": { + "type": "CheckboxField", + "label": "Live render", + "default": true, + "description": "Points and clusters will update as viewport is being changed" + }, + "mapbox_color": { + "type": "SelectField", + "freeForm": true, + "label": "RGB Color", + "default": "rgb(0, 122, 135)", + "choices": [ + [ + "rgb(0, 139, 139)", + "Dark Cyan" + ], + [ + "rgb(128, 0, 128)", + "Purple" + ], + [ + "rgb(255, 215, 0)", + "Gold" + ], + [ + "rgb(69, 69, 69)", + "Dim Gray" + ], + [ + "rgb(220, 20, 60)", + "Crimson" + ], + [ + "rgb(34, 139, 34)", + "Forest Green" + ] + ], + "description": "The color for points and clusters in RGB" + }, + "ranges": { + "type": "TextField", + "label": "Ranges", + "default": "", + "description": "Ranges to highlight with shading" + }, + "range_labels": { + "type": "TextField", + "label": "Range labels", + "default": "", + "description": "Labels for the ranges" + }, + "markers": { + "type": "TextField", + "label": "Markers", + "default": "", + "description": "List of values to mark with triangles" + }, + "marker_labels": { + "type": "TextField", + "label": "Marker labels", + "default": "", + "description": "Labels for the markers" + }, + "marker_lines": { + "type": "TextField", + "label": "Marker lines", + "default": "", + "description": "List of values to mark with lines" + }, + "marker_line_labels": { + "type": "TextField", + "label": "Marker line labels", + "default": "", + "description": "Labels for the marker lines" + }, + "filters": { + "type": "FilterField", + "label": "", + "default": [], + "description": "" + }, + "having_filters": { + "type": "FilterField", + "label": "", + "default": [], + "description": "" + }, + "slice_id": { + "type": "HiddenField", + "label": "Slice ID", + "hidden": true, + "description": "The id of the active slice" + } + } +} \ No newline at end of file diff --git a/superset/assets/javascripts/components/FaveStar.jsx b/superset/assets/javascripts/components/FaveStar.jsx index 4e6afa2883..ce19fcb945 100644 --- a/superset/assets/javascripts/components/FaveStar.jsx +++ b/superset/assets/javascripts/components/FaveStar.jsx @@ -3,7 +3,7 @@ import cx from 'classnames'; import TooltipWrapper from './TooltipWrapper'; const propTypes = { - sliceId: PropTypes.string.isRequired, + sliceId: PropTypes.number.isRequired, actions: PropTypes.object.isRequired, isStarred: PropTypes.bool.isRequired, }; diff --git a/superset/assets/javascripts/dashboard/Dashboard.jsx b/superset/assets/javascripts/dashboard/Dashboard.jsx index 08a1fe2b21..687910679d 100644 --- a/superset/assets/javascripts/dashboard/Dashboard.jsx +++ b/superset/assets/javascripts/dashboard/Dashboard.jsx @@ -13,7 +13,6 @@ import Header from './components/Header'; require('bootstrap'); require('../../stylesheets/dashboard.css'); -require('../superset-select2.js'); export function getInitialState(dashboardData, context) { const dashboard = Object.assign({ context }, utils.controllerInterface, dashboardData); @@ -83,9 +82,6 @@ function initDashboardView(dashboard) { ); $('div.grid-container').css('visibility', 'visible'); - $('.select2').select2({ - dropdownAutoWidth: true, - }); $('div.widget').click(function (e) { const $this = $(this); const $target = $(e.target); @@ -165,9 +161,7 @@ export function dashboardContainer(dashboard) { } }, effectiveExtraFilters(sliceId) { - // Summarized filter, not defined by sliceId - // returns k=field, v=array of values - const f = {}; + const f = []; const immuneSlices = this.metadata.filter_immune_slices || []; if (sliceId && immuneSlices.includes(sliceId)) { // The slice is immune to dashboard fiterls @@ -185,7 +179,11 @@ export function dashboardContainer(dashboard) { for (const filteringSliceId in this.filters) { for (const field in this.filters[filteringSliceId]) { if (!immuneToFields.includes(field)) { - f[field] = this.filters[filteringSliceId][field]; + f.push({ + col: field, + op: 'in', + val: this.filters[filteringSliceId][field], + }); } } } diff --git a/superset/assets/javascripts/dashboard/components/GridLayout.jsx b/superset/assets/javascripts/dashboard/components/GridLayout.jsx index e2a118c6da..bfbd7f64b0 100644 --- a/superset/assets/javascripts/dashboard/components/GridLayout.jsx +++ b/superset/assets/javascripts/dashboard/components/GridLayout.jsx @@ -98,7 +98,7 @@ class GridLayout extends React.Component { id={'slice_' + slice.slice_id} key={slice.slice_id} data-slice-id={slice.slice_id} - className={`widget ${slice.viz_name}`} + className={`widget ${slice.form_data.viz_type}`} > -
+
{!this.props.dashboard.context.standalone_mode && } diff --git a/superset/assets/javascripts/dashboard/components/SliceCell.jsx b/superset/assets/javascripts/dashboard/components/SliceCell.jsx index 10abe1cafc..798753def8 100644 --- a/superset/assets/javascripts/dashboard/components/SliceCell.jsx +++ b/superset/assets/javascripts/dashboard/components/SliceCell.jsx @@ -67,13 +67,13 @@ function SliceCell({ expandedSlices, removeSlice, slice }) {
-
+
loading -
+
diff --git a/superset/assets/javascripts/explore/components/DisplayQueryButton.jsx b/superset/assets/javascripts/explore/components/DisplayQueryButton.jsx deleted file mode 100644 index 990c0930cd..0000000000 --- a/superset/assets/javascripts/explore/components/DisplayQueryButton.jsx +++ /dev/null @@ -1,25 +0,0 @@ -import React, { PropTypes } from 'react'; -import ModalTrigger from './../../components/ModalTrigger'; - -const propTypes = { - query: PropTypes.string, -}; - -const defaultProps = { - query: '', -}; - -export default function DisplayQueryButton({ query }) { - const modalBody = (
{query}
); - return ( - Query} - modalTitle="Query" - modalBody={modalBody} - /> - ); -} - -DisplayQueryButton.propTypes = propTypes; -DisplayQueryButton.defaultProps = defaultProps; diff --git a/superset/assets/javascripts/explore/components/ExploreActionButtons.jsx b/superset/assets/javascripts/explore/components/ExploreActionButtons.jsx deleted file mode 100644 index b884100061..0000000000 --- a/superset/assets/javascripts/explore/components/ExploreActionButtons.jsx +++ /dev/null @@ -1,46 +0,0 @@ -import React, { PropTypes } from 'react'; -import cx from 'classnames'; -import URLShortLinkButton from './URLShortLinkButton'; -import EmbedCodeButton from './EmbedCodeButton'; -import DisplayQueryButton from './DisplayQueryButton'; - -const propTypes = { - canDownload: PropTypes.oneOfType([PropTypes.string, PropTypes.bool]).isRequired, - slice: PropTypes.object.isRequired, - query: PropTypes.string, -}; - -export default function ExploreActionButtons({ canDownload, slice, query }) { - const exportToCSVClasses = cx('btn btn-default btn-sm', { - 'disabled disabledButton': !canDownload, - }); - return ( -
- - - - - - .json - - - - .csv - - - -
- ); -} - -ExploreActionButtons.propTypes = propTypes; diff --git a/superset/assets/javascripts/explore/explore.jsx b/superset/assets/javascripts/explore/explore.jsx deleted file mode 100644 index 67a837897a..0000000000 --- a/superset/assets/javascripts/explore/explore.jsx +++ /dev/null @@ -1,403 +0,0 @@ -// Javascript for the explorer page -// Init explorer view -> load vis dependencies -> read data (from dynamic html) -> render slice -// nb: to add a new vis, you must also add a Python fn in viz.py -// -// js -const $ = window.$ = require('jquery'); -const px = require('./../modules/superset.js'); -const utils = require('./../modules/utils.js'); -const jQuery = window.jQuery = require('jquery'); // eslint-disable-line - -import React from 'react'; -import ReactDOM from 'react-dom'; -import QueryAndSaveBtns from './components/QueryAndSaveBtns.jsx'; -import ExploreActionButtons from './components/ExploreActionButtons.jsx'; - -require('jquery-ui'); -$.widget.bridge('uitooltip', $.ui.tooltip); // Shutting down jq-ui tooltips -require('bootstrap'); - -require('./../superset-select2.js'); - -// css -require('../../vendor/pygments.css'); -require('../../stylesheets/explore.css'); - -let slice; - -const getPanelClass = function (fieldPrefix) { - return (fieldPrefix === 'flt' ? 'filter' : 'having') + '_panel'; -}; - -function prepForm() { - // Assigning the right id to form elements in filters - const fixId = function ($filter, fieldPrefix, i) { - $filter.attr('id', function () { - return fieldPrefix + '_' + i; - }); - - ['col', 'op', 'eq'].forEach(function (fieldMiddle) { - const fieldName = fieldPrefix + '_' + fieldMiddle; - $filter.find('[id^=' + fieldName + '_]') - .attr('id', function () { - return fieldName + '_' + i; - }) - .attr('name', function () { - return fieldName + '_' + i; - }); - }); - }; - - ['flt', 'having'].forEach(function (fieldPrefix) { - let i = 1; - $('#' + getPanelClass(fieldPrefix) + ' #filters > div').each(function () { - fixId($(this), fieldPrefix, i); - i++; - }); - }); -} - -function query(forceUpdate, pushState) { - let force = forceUpdate; - if (force === undefined) { - force = false; - } - $('.query-and-save button').attr('disabled', 'disabled'); - if (force) { // Don't hide the alert message when the page is just loaded - $('div.alert').remove(); - } - $('#is_cached').hide(); - prepForm(); - - if (pushState !== false) { - // update the url after prepForm() fix the field ids - history.pushState({}, document.title, slice.querystring()); - } - slice.container.html(''); - slice.render(force); -} - -function saveSlice() { - const action = $('input[name=rdo_save]:checked').val(); - if (action === 'saveas') { - const sliceName = $('input[name=new_slice_name]').val(); - if (sliceName === '') { - utils.showModal({ - title: 'Error', - body: 'You must pick a name for the new slice', - }); - return; - } - document.getElementById('slice_name').value = sliceName; - } - const addToDash = $('input[name=addToDash]:checked').val(); - if (addToDash === 'existing' && $('#save_to_dashboard_id').val() === '') { - utils.showModal({ - title: 'Error', - body: 'You must pick an existing dashboard', - }); - return; - } else if (addToDash === 'new' && $('input[name=new_dashboard_name]').val() === '') { - utils.showModal({ - title: 'Error', - body: 'Please enter a name for the new dashboard', - }); - return; - } - $('#action').val(action); - prepForm(); - $('#query').submit(); -} - -function initExploreView() { - function getCollapsedFieldsets() { - let collapsedFieldsets = $('#collapsedFieldsets').val(); - - if (collapsedFieldsets !== undefined && collapsedFieldsets !== '') { - collapsedFieldsets = collapsedFieldsets.split('||'); - } else { - collapsedFieldsets = []; - } - return collapsedFieldsets; - } - - function toggleFieldset(legend, animation) { - const parent = legend.parent(); - const fieldset = parent.find('.legend_label').text(); - const collapsedFieldsets = getCollapsedFieldsets(); - let index; - - if (parent.hasClass('collapsed')) { - if (animation) { - parent.find('.panel-body').slideDown(); - } else { - parent.find('.panel-body').show(); - } - parent.removeClass('collapsed'); - parent.find('span.collapser').text('[-]'); - - // removing from array, js is overcomplicated - index = collapsedFieldsets.indexOf(fieldset); - if (index !== -1) { - collapsedFieldsets.splice(index, 1); - } - } else { // not collapsed - if (animation) { - parent.find('.panel-body').slideUp(); - } else { - parent.find('.panel-body').hide(); - } - - parent.addClass('collapsed'); - parent.find('span.collapser').text('[+]'); - index = collapsedFieldsets.indexOf(fieldset); - if (index === -1 && fieldset !== '' && fieldset !== undefined) { - collapsedFieldsets.push(fieldset); - } - } - - $('#collapsedFieldsets').val(collapsedFieldsets.join('||')); - } - - px.initFavStars(); - - $('#viz_type').change(function () { - $('#query').submit(); - }); - - $('#datasource_id').change(function () { - window.location = $(this).find('option:selected').attr('url'); - }); - - const collapsedFieldsets = getCollapsedFieldsets(); - for (let i = 0; i < collapsedFieldsets.length; i++) { - toggleFieldset($('legend:contains("' + collapsedFieldsets[i] + '")'), false); - } - function formatViz(viz) { - const url = `/static/assets/images/viz_thumbnails/${viz.id}.png`; - const noImg = '/static/assets/images/noimg.png'; - return $( - `` + - `${viz.text}` - ); - } - - $('.select2').select2({ - dropdownAutoWidth: true, - }); - $('.select2Sortable').select2({ - dropdownAutoWidth: true, - }); - $('.select2-with-images').select2({ - dropdownAutoWidth: true, - dropdownCssClass: 'bigdrop', - formatResult: formatViz, - }); - $('.select2Sortable').select2Sortable({ - bindOrder: 'sortableStop', - }); - $('form').show(); - $('[data-toggle="tooltip"]').tooltip({ container: 'body' }); - $('.ui-helper-hidden-accessible').remove(); // jQuery-ui 1.11+ creates a div for every tooltip - - function addFilter(i, fieldPrefix) { - const cp = $('#' + fieldPrefix + '0').clone(); - $(cp).appendTo('#' + getPanelClass(fieldPrefix) + ' #filters'); - $(cp).show(); - if (i !== undefined) { - $(cp).find('#' + fieldPrefix + '_eq_0').val(px.getParam(fieldPrefix + '_eq_' + i)); - $(cp).find('#' + fieldPrefix + '_op_0').val(px.getParam(fieldPrefix + '_op_' + i)); - $(cp).find('#' + fieldPrefix + '_col_0').val(px.getParam(fieldPrefix + '_col_' + i)); - } - $(cp).find('select').select2(); - $(cp).find('.remove').click(function () { - $(this) - .parent() - .parent() - .remove(); - }); - } - - function setFilters() { - ['flt', 'having'].forEach(function (prefix) { - for (let i = 1; i < 10; i++) { - const col = px.getParam(prefix + '_col_' + i); - if (col !== '') { - addFilter(i, prefix); - } - } - }); - } - setFilters(); - - $(window).bind('popstate', function () { - // Browser back button - const returnLocation = history.location || document.location; - // Could do something more lightweight here, but we're not optimizing - // for the use of the back button anyways - returnLocation.reload(); - }); - - $('#filter_panel #plus').click(function () { - addFilter(undefined, 'flt'); - }); - $('#having_panel #plus').click(function () { - addFilter(undefined, 'having'); - }); - - function createChoices(term, data) { - const filtered = $(data).filter(function () { - return this.text.localeCompare(term) === 0; - }); - if (filtered.length === 0) { - return { - id: term, - text: term, - }; - } - return {}; - } - - function initSelectionToValue(element, callback) { - callback({ - id: element.val(), - text: element.val(), - }); - } - - $('.select2_freeform').each(function () { - const parent = $(this).parent(); - const name = $(this).attr('name'); - const l = []; - let selected = ''; - for (let i = 0; i < this.options.length; i++) { - l.push({ - id: this.options[i].value, - text: this.options[i].text, - }); - if (this.options[i].selected) { - selected = this.options[i].value; - } - } - parent.append( - `` - ); - $(`input[name='${name}']`).select2({ - createSearchChoice: createChoices, - initSelection: initSelectionToValue, - dropdownAutoWidth: true, - multiple: false, - data: l, - }); - $(this).remove(); - }); - - function prepSaveDialog() { - const setButtonsState = function () { - const addToDash = $('input[name=addToDash]:checked').val(); - if (addToDash === 'existing' || addToDash === 'new') { - $('.gotodash').removeAttr('disabled'); - } else { - $('.gotodash').prop('disabled', true); - } - }; - const url = '/dashboardmodelviewasync/api/read?_flt_0_owners=' + $('#userid').val(); - $.get(url, function (data) { - const choices = []; - for (let i = 0; i < data.pks.length; i++) { - choices.push({ id: data.pks[i], text: data.result[i].dashboard_title }); - } - $('#save_to_dashboard_id').select2({ - data: choices, - dropdownAutoWidth: true, - }).on('select2-selecting', function () { - $('#addToDash_existing').prop('checked', true); - setButtonsState(); - }); - }); - - $('input[name=addToDash]').change(setButtonsState); - $("input[name='new_dashboard_name']").on('focus', function () { - $('#add_to_new_dash').prop('checked', true); - setButtonsState(); - }); - $("input[name='new_slice_name']").on('focus', function () { - $('#save_as_new').prop('checked', true); - setButtonsState(); - }); - - $('#btn_modal_save').on('click', () => saveSlice()); - - $('#btn_modal_save_goto_dash').click(() => { - document.getElementById('goto_dash').value = 'true'; - saveSlice(); - }); - } - prepSaveDialog(); -} - -function renderExploreActions() { - const exploreActionsEl = document.getElementById('js-explore-actions'); - ReactDOM.render( - , - exploreActionsEl - ); -} - -function initComponents() { - const queryAndSaveBtnsEl = document.getElementById('js-query-and-save-btns'); - ReactDOM.render( - query(true)} - />, - queryAndSaveBtnsEl - ); - renderExploreActions(); -} - -let exploreController = { - type: 'slice', - done: (sliceObj) => { - slice = sliceObj; - renderExploreActions(); - const cachedSelector = $('#is_cached'); - if (slice.data !== undefined && slice.data.is_cached) { - cachedSelector - .attr( - 'title', - `Served from data cached at ${slice.data.cached_dttm}. Click [Query] to force refresh`) - .show() - .tooltip('fixTitle'); - } else { - cachedSelector.hide(); - } - }, - error: (sliceObj) => { - slice = sliceObj; - renderExploreActions(); - }, -}; -exploreController = Object.assign({}, utils.controllerInterface, exploreController); - - -$(document).ready(function () { - const data = $('.slice').data('slice'); - - initExploreView(); - - slice = px.Slice(data, exploreController); - - // call vis render method, which issues ajax - // calls render on the slice for the first time - query(false, false); - - slice.bindResizeToWindowResize(); - - initComponents(); -}); diff --git a/superset/assets/javascripts/explorev2/actions/exploreActions.js b/superset/assets/javascripts/explorev2/actions/exploreActions.js index 29ea8e40df..7fba28a1e6 100644 --- a/superset/assets/javascripts/explorev2/actions/exploreActions.js +++ b/superset/assets/javascripts/explorev2/actions/exploreActions.js @@ -13,42 +13,88 @@ export function setDatasource(datasource) { return { type: SET_DATASOURCE, datasource }; } -export const FETCH_STARTED = 'FETCH_STARTED'; -export function fetchStarted() { - return { type: FETCH_STARTED }; +export const SET_DATASOURCES = 'SET_DATASOURCES'; +export function setDatasources(datasources) { + return { type: SET_DATASOURCES, datasources }; } -export const FETCH_SUCCEEDED = 'FETCH_SUCCEEDED'; -export function fetchSucceeded() { - return { type: FETCH_SUCCEEDED }; +export const FETCH_DATASOURCE_STARTED = 'FETCH_DATASOURCE_STARTED'; +export function fetchDatasourceStarted() { + return { type: FETCH_DATASOURCE_STARTED }; } -export const FETCH_FAILED = 'FETCH_FAILED'; -export function fetchFailed(error) { - return { type: FETCH_FAILED, error }; +export const FETCH_DATASOURCE_SUCCEEDED = 'FETCH_DATASOURCE_SUCCEEDED'; +export function fetchDatasourceSucceeded() { + return { type: FETCH_DATASOURCE_SUCCEEDED }; } -export function fetchDatasourceMetadata(datasourceId, datasourceType) { +export const FETCH_DATASOURCE_FAILED = 'FETCH_DATASOURCE_FAILED'; +export function fetchDatasourceFailed(error) { + return { type: FETCH_DATASOURCE_FAILED, error }; +} + +export const FETCH_DATASOURCES_STARTED = 'FETCH_DATASOURCES_STARTED'; +export function fetchDatasourcesStarted() { + return { type: FETCH_DATASOURCES_STARTED }; +} + +export const FETCH_DATASOURCES_SUCCEEDED = 'FETCH_DATASOURCES_SUCCEEDED'; +export function fetchDatasourcesSucceeded() { + return { type: FETCH_DATASOURCES_SUCCEEDED }; +} + +export const FETCH_DATASOURCES_FAILED = 'FETCH_DATASOURCES_FAILED'; +export function fetchDatasourcesFailed(error) { + return { type: FETCH_DATASOURCES_FAILED, error }; +} + +export const RESET_FIELDS = 'RESET_FIELDS'; +export function resetFields() { + return { type: RESET_FIELDS }; +} + +export const TRIGGER_QUERY = 'TRIGGER_QUERY'; +export function triggerQuery() { + return { type: TRIGGER_QUERY }; +} + +export function fetchDatasourceMetadata(datasourceKey, alsoTriggerQuery = false) { return function (dispatch) { - dispatch(fetchStarted()); + dispatch(fetchDatasourceStarted()); + const url = `/superset/fetch_datasource_metadata?datasourceKey=${datasourceKey}`; + $.ajax({ + type: 'GET', + url, + success: (data) => { + dispatch(setDatasource(data)); + dispatch(fetchDatasourceSucceeded()); + dispatch(resetFields()); + if (alsoTriggerQuery) { + dispatch(triggerQuery()); + } + }, + error(error) { + dispatch(fetchDatasourceFailed(error.responseJSON.error)); + }, + }); + }; +} - if (datasourceId) { - const params = [`datasource_id=${datasourceId}`, `datasource_type=${datasourceType}`]; - const url = '/superset/fetch_datasource_metadata?' + params.join('&'); - $.ajax({ - type: 'GET', - url, - success: (data) => { - dispatch(setDatasource(data)); - dispatch(fetchSucceeded()); - }, - error(error) { - dispatch(fetchFailed(error.responseJSON.error)); - }, - }); - } else { - dispatch(fetchFailed('Please select a datasource')); - } +export function fetchDatasources() { + return function (dispatch) { + dispatch(fetchDatasourcesStarted()); + const url = '/superset/datasources/'; + $.ajax({ + type: 'GET', + url, + success: (data) => { + dispatch(setDatasources(data)); + dispatch(fetchDatasourcesSucceeded()); + }, + error(error) { + dispatch(fetchDatasourcesFailed(error.responseJSON.error)); + }, + }); }; } @@ -85,8 +131,8 @@ export function setFieldValue(fieldName, value, validationErrors) { } export const CHART_UPDATE_STARTED = 'CHART_UPDATE_STARTED'; -export function chartUpdateStarted() { - return { type: CHART_UPDATE_STARTED }; +export function chartUpdateStarted(queryRequest) { + return { type: CHART_UPDATE_STARTED, queryRequest }; } export const CHART_UPDATE_SUCCEEDED = 'CHART_UPDATE_SUCCEEDED'; @@ -94,6 +140,14 @@ export function chartUpdateSucceeded(queryResponse) { return { type: CHART_UPDATE_SUCCEEDED, queryResponse }; } +export const CHART_UPDATE_STOPPED = 'CHART_UPDATE_STOPPED'; +export function chartUpdateStopped(queryRequest) { + if (queryRequest) { + queryRequest.abort(); + } + return { type: CHART_UPDATE_STOPPED }; +} + export const CHART_UPDATE_FAILED = 'CHART_UPDATE_FAILED'; export function chartUpdateFailed(queryResponse) { return { type: CHART_UPDATE_FAILED, queryResponse }; @@ -126,7 +180,7 @@ export function fetchDashboardsSucceeded(choices) { export const FETCH_DASHBOARDS_FAILED = 'FETCH_DASHBOARDS_FAILED'; export function fetchDashboardsFailed(userId) { - return { type: FETCH_FAILED, userId }; + return { type: FETCH_DASHBOARDS_FAILED, userId }; } export function fetchDashboards(userId) { @@ -177,12 +231,19 @@ export function updateChartStatus(status) { export const RUN_QUERY = 'RUN_QUERY'; export function runQuery(formData, datasourceType) { return function (dispatch) { - dispatch(updateChartStatus('loading')); const url = getExploreUrl(formData, datasourceType, 'json'); - $.getJSON(url, function (queryResponse) { + const queryRequest = $.getJSON(url, function (queryResponse) { dispatch(chartUpdateSucceeded(queryResponse)); }).fail(function (err) { - dispatch(chartUpdateFailed(err)); + if (err.statusText !== 'abort') { + dispatch(chartUpdateFailed(err.responseJSON)); + } }); + dispatch(chartUpdateStarted(queryRequest)); }; } + +export const RENDER_TRIGGERED = 'RENDER_TRIGGERED'; +export function renderTriggered() { + return { type: RENDER_TRIGGERED }; +} diff --git a/superset/assets/javascripts/explorev2/components/ChartContainer.jsx b/superset/assets/javascripts/explorev2/components/ChartContainer.jsx index a025d50c2d..22c5b7552a 100644 --- a/superset/assets/javascripts/explorev2/components/ChartContainer.jsx +++ b/superset/assets/javascripts/explorev2/components/ChartContainer.jsx @@ -1,13 +1,15 @@ import $ from 'jquery'; import React, { PropTypes } from 'react'; import { connect } from 'react-redux'; -import { Panel, Alert } from 'react-bootstrap'; +import { Panel, Alert, Collapse } from 'react-bootstrap'; import visMap from '../../../visualizations/main'; import { d3format } from '../../modules/utils'; -import ExploreActionButtons from '../../explore/components/ExploreActionButtons'; +import ExploreActionButtons from './ExploreActionButtons'; import FaveStar from '../../components/FaveStar'; import TooltipWrapper from '../../components/TooltipWrapper'; import Timer from '../../components/Timer'; +import { getExploreUrl } from '../exploreUtils'; +import { getFormDataFromFields } from '../stores/store'; const CHART_STATUS_MAP = { failed: 'danger', @@ -17,20 +19,20 @@ const CHART_STATUS_MAP = { const propTypes = { actions: PropTypes.object.isRequired, - can_download: PropTypes.bool.isRequired, - slice_id: PropTypes.string.isRequired, - slice_name: PropTypes.string.isRequired, - viz_type: PropTypes.string.isRequired, - height: PropTypes.string.isRequired, - containerId: PropTypes.string.isRequired, - query: PropTypes.string, - column_formats: PropTypes.object, - chartStatus: PropTypes.string, - isStarred: PropTypes.bool.isRequired, - chartUpdateStartTime: PropTypes.number.isRequired, - chartUpdateEndTime: PropTypes.number, alert: PropTypes.string, + can_download: PropTypes.bool.isRequired, + chartStatus: PropTypes.string, + chartUpdateEndTime: PropTypes.number, + chartUpdateStartTime: PropTypes.number.isRequired, + column_formats: PropTypes.object, + containerId: PropTypes.string.isRequired, + height: PropTypes.string.isRequired, + isStarred: PropTypes.bool.isRequired, + slice: PropTypes.object, table_name: PropTypes.string, + viz_type: PropTypes.string.isRequired, + formData: PropTypes.object, + latestQueryFormData: PropTypes.object, }; class ChartContainer extends React.PureComponent { @@ -38,14 +40,16 @@ class ChartContainer extends React.PureComponent { super(props); this.state = { selector: `#${props.containerId}`, + showStackTrace: false, }; } renderViz() { + this.props.actions.renderTriggered(); const mockSlice = this.getMockedSliceObject(); + this.setState({ mockSlice }); try { visMap[this.props.viz_type](mockSlice, this.props.queryResponse); - this.setState({ mockSlice }); } catch (e) { this.props.actions.chartRenderingFailed(e); } @@ -53,8 +57,13 @@ class ChartContainer extends React.PureComponent { componentDidUpdate(prevProps) { if ( - prevProps.queryResponse !== this.props.queryResponse || - prevProps.height !== this.props.height + ( + prevProps.queryResponse !== this.props.queryResponse || + prevProps.height !== this.props.height || + this.props.triggerRender + ) && !this.props.queryResponse.error + && this.props.chartStatus !== 'failed' + && this.props.chartStatus !== 'stopped' ) { this.renderViz(); } @@ -62,10 +71,15 @@ class ChartContainer extends React.PureComponent { getMockedSliceObject() { const props = this.props; + const getHeight = () => { + const headerHeight = this.props.standalone ? 0 : 100; + return parseInt(props.height, 10) - headerHeight; + }; return { - viewSqlQuery: props.query, + viewSqlQuery: this.props.queryResponse.query, containerId: props.containerId, selector: this.state.selector, + formData: this.props.formData, container: { html: (data) => { // this should be a callback to clear the contents of the slice container @@ -77,7 +91,7 @@ class ChartContainer extends React.PureComponent { // should call callback to adjust height of chart $(this.state.selector).css(dim, size); }, - height: () => parseInt(props.height, 10) - 100, + height: getHeight, show: () => { }, get: (n) => ($(this.state.selector).get(n)), find: (classname) => ($(this.state.selector).find(classname)), @@ -85,7 +99,7 @@ class ChartContainer extends React.PureComponent { width: () => this.chartContainerRef.getBoundingClientRect().width, - height: () => parseInt(props.height, 10) - 100, + height: getHeight, setFilter: () => { // set filter according to data in store @@ -111,9 +125,10 @@ class ChartContainer extends React.PureComponent { }, data: { - csv_endpoint: props.queryResponse.csv_endpoint, - json_endpoint: props.queryResponse.json_endpoint, - standalone_endpoint: props.queryResponse.standalone_endpoint, + csv_endpoint: getExploreUrl(this.props.formData, this.props.datasource_type, 'csv'), + json_endpoint: getExploreUrl(this.props.formData, this.props.datasource_type, 'json'), + standalone_endpoint: getExploreUrl( + this.props.formData, this.props.datasource_type, 'standalone'), }, }; @@ -125,26 +140,45 @@ class ChartContainer extends React.PureComponent { renderChartTitle() { let title; - if (this.props.slice_name) { - title = this.props.slice_name; + if (this.props.slice) { + title = this.props.slice.slice_name; } else { title = `[${this.props.table_name}] - untitled`; } return title; } + renderAlert() { + const msg = ( +
+ {this.props.alert} + +
); + return ( +
+ this.setState({ showStackTrace: !this.state.showStackTrace })} + > + {msg} + + {this.props.queryResponse && this.props.queryResponse.stacktrace && + +
+              {this.props.queryResponse.stacktrace}
+            
+
+ } +
); + } + renderChart() { if (this.props.alert) { - return ( - - {this.props.alert} - - - ); + return this.renderAlert(); } const loading = this.props.chartStatus === 'loading'; return ( @@ -170,6 +204,9 @@ class ChartContainer extends React.PureComponent { } render() { + if (this.props.standalone) { + return this.renderChart(); + } return (
{this.renderChartTitle()} - {this.props.slice_id && + {this.props.slice && @@ -195,7 +232,7 @@ class ChartContainer extends React.PureComponent { > @@ -208,16 +245,15 @@ class ChartContainer extends React.PureComponent { startTime={this.props.chartUpdateStartTime} endTime={this.props.chartUpdateEndTime} isRunning={this.props.chartStatus === 'loading'} - state={CHART_STATUS_MAP[this.props.chartStatus]} + status={CHART_STATUS_MAP[this.props.chartStatus]} style={{ fontSize: '10px', marginRight: '5px' }} /> - {this.state.mockSlice && - - } +
} @@ -232,21 +268,24 @@ class ChartContainer extends React.PureComponent { ChartContainer.propTypes = propTypes; function mapStateToProps(state) { + const formData = getFormDataFromFields(state.fields); return { - containerId: `slice-container-${state.viz.form_data.slice_id}`, - slice_id: state.viz.form_data.slice_id, - slice_name: state.viz.form_data.slice_name, - viz_type: state.viz.form_data.viz_type, - can_download: state.can_download, - chartUpdateStartTime: state.chartUpdateStartTime, - chartUpdateEndTime: state.chartUpdateEndTime, - query: state.viz.query, - column_formats: state.viz.column_formats, - chartStatus: state.chartStatus, - isStarred: state.isStarred, alert: state.chartAlert, - table_name: state.viz.form_data.datasource_name, + can_download: state.can_download, + chartStatus: state.chartStatus, + chartUpdateEndTime: state.chartUpdateEndTime, + chartUpdateStartTime: state.chartUpdateStartTime, + column_formats: state.datasource ? state.datasource.column_formats : null, + containerId: state.slice ? `slice-container-${state.slice.slice_id}` : 'slice-container', + formData, + latestQueryFormData: state.latestQueryFormData, + isStarred: state.isStarred, queryResponse: state.queryResponse, + slice: state.slice, + standalone: state.standalone, + table_name: formData.datasource_name, + viz_type: formData.viz_type, + triggerRender: state.triggerRender, }; } diff --git a/superset/assets/javascripts/explorev2/components/ControlHeader.jsx b/superset/assets/javascripts/explorev2/components/ControlHeader.jsx index 8d2ca210ce..66940da828 100644 --- a/superset/assets/javascripts/explorev2/components/ControlHeader.jsx +++ b/superset/assets/javascripts/explorev2/components/ControlHeader.jsx @@ -6,41 +6,72 @@ const propTypes = { label: PropTypes.string.isRequired, description: PropTypes.string, validationErrors: PropTypes.array, + renderTrigger: PropTypes.bool, + rightNode: PropTypes.node, }; const defaultProps = { - description: null, validationErrors: [], + renderTrigger: false, }; -export default function ControlHeader({ label, description, validationErrors }) { +export default function ControlHeader({ + label, description, validationErrors, renderTrigger, rightNode }) { const hasError = (validationErrors.length > 0); return ( - - {hasError ? - {label} : - {label} - } - {' '} - {(validationErrors.length > 0) && - - - {validationErrors.join(' ')} - - } - > - - +
+
+ + {hasError ? + {label} : + {label} + } {' '} - + {(validationErrors.length > 0) && + + + {validationErrors.join(' ')} + + } + > + + + {' '} + + } + {description && + + + {' '} + + } + {renderTrigger && + + + Takes effect on chart immediatly + + } + > + + + {' '} + + } + +
+ {rightNode && +
+ {rightNode} +
} - {description && - - } - +
+
); } diff --git a/superset/assets/javascripts/explorev2/components/ControlPanelsContainer.jsx b/superset/assets/javascripts/explorev2/components/ControlPanelsContainer.jsx index e5fdec909a..ee228a051e 100644 --- a/superset/assets/javascripts/explorev2/components/ControlPanelsContainer.jsx +++ b/superset/assets/javascripts/explorev2/components/ControlPanelsContainer.jsx @@ -4,10 +4,11 @@ import { bindActionCreators } from 'redux'; import * as actions from '../actions/exploreActions'; import { connect } from 'react-redux'; import { Panel, Alert } from 'react-bootstrap'; -import visTypes, { sectionsToRender } from '../stores/visTypes'; +import { sectionsToRender } from '../stores/visTypes'; import ControlPanelSection from './ControlPanelSection'; import FieldSetRow from './FieldSetRow'; import FieldSet from './FieldSet'; +import fields from '../stores/fields'; const propTypes = { datasource_type: PropTypes.string.isRequired, @@ -23,44 +24,19 @@ const propTypes = { class ControlPanelsContainer extends React.Component { constructor(props) { super(props); - this.fieldOverrides = this.fieldOverrides.bind(this); - this.getFieldData = this.getFieldData.bind(this); this.removeAlert = this.removeAlert.bind(this); + this.getFieldData = this.getFieldData.bind(this); } - componentWillMount() { - const datasource_id = this.props.form_data.datasource; - const datasource_type = this.props.datasource_type; - if (datasource_id) { - this.props.actions.fetchDatasourceMetadata(datasource_id, datasource_type); + getFieldData(fieldName) { + const mapF = fields[fieldName].mapStateToProps; + if (mapF) { + return Object.assign({}, this.props.fields[fieldName], mapF(this.props.exploreState)); } - } - componentWillReceiveProps(nextProps) { - if (nextProps.form_data.datasource !== this.props.form_data.datasource) { - if (nextProps.form_data.datasource) { - this.props.actions.fetchDatasourceMetadata( - nextProps.form_data.datasource, nextProps.datasource_type); - } - } - } - getFieldData(fs) { - const fieldOverrides = this.fieldOverrides(); - let fieldData = this.props.fields[fs] || {}; - if (fieldOverrides.hasOwnProperty(fs)) { - const overrideData = fieldOverrides[fs]; - fieldData = Object.assign({}, fieldData, overrideData); - } - if (fieldData.mapStateToProps) { - Object.assign(fieldData, fieldData.mapStateToProps(this.props.exploreState)); - } - return fieldData; + return this.props.fields[fieldName]; } sectionsToRender() { return sectionsToRender(this.props.form_data.viz_type, this.props.datasource_type); } - fieldOverrides() { - const viz = visTypes[this.props.form_data.viz_type]; - return viz.fieldOverrides || {}; - } removeAlert() { this.props.actions.removeControlPanelAlert(); } @@ -78,7 +54,7 @@ class ControlPanelsContainer extends React.Component { /> } - {!this.props.isDatasourceMetaLoading && this.sectionsToRender().map((section) => ( + {this.sectionsToRender().map((section) => ( ))} diff --git a/superset/assets/javascripts/explorev2/components/DisplayQueryButton.jsx b/superset/assets/javascripts/explorev2/components/DisplayQueryButton.jsx new file mode 100644 index 0000000000..769c064846 --- /dev/null +++ b/superset/assets/javascripts/explorev2/components/DisplayQueryButton.jsx @@ -0,0 +1,59 @@ +import React, { PropTypes } from 'react'; +import ModalTrigger from './../../components/ModalTrigger'; +import SyntaxHighlighter from 'react-syntax-highlighter'; +import { github } from 'react-syntax-highlighter/dist/styles'; + +const $ = window.$ = require('jquery'); + +const propTypes = { + queryEndpoint: PropTypes.string.isRequired, +}; + +export default class DisplayQueryButton extends React.PureComponent { + constructor(props) { + super(props); + this.state = { + modalBody:
,
+    };
+  }
+  beforeOpen() {
+    this.setState({
+      modalBody:
+        (Loading...),
+    });
+    $.ajax({
+      type: 'GET',
+      url: this.props.queryEndpoint,
+      success: (data) => {
+        const modalBody = data.language ?
+          
+            {data.query}
+          
+          :
+          
{data.query}
; + this.setState({ modalBody }); + }, + error(data) { + this.setState({ modalBody: (
{data.error}
) }); + }, + }); + } + render() { + return ( + Query} + modalTitle="Query" + bsSize="large" + beforeOpen={this.beforeOpen.bind(this)} + modalBody={this.state.modalBody} + /> + ); + } +} + +DisplayQueryButton.propTypes = propTypes; diff --git a/superset/assets/javascripts/explore/components/EmbedCodeButton.jsx b/superset/assets/javascripts/explorev2/components/EmbedCodeButton.jsx similarity index 100% rename from superset/assets/javascripts/explore/components/EmbedCodeButton.jsx rename to superset/assets/javascripts/explorev2/components/EmbedCodeButton.jsx diff --git a/superset/assets/javascripts/explorev2/components/ExploreActionButtons.jsx b/superset/assets/javascripts/explorev2/components/ExploreActionButtons.jsx new file mode 100644 index 0000000000..5823c93bf6 --- /dev/null +++ b/superset/assets/javascripts/explorev2/components/ExploreActionButtons.jsx @@ -0,0 +1,53 @@ +import React, { PropTypes } from 'react'; +import cx from 'classnames'; +import URLShortLinkButton from './URLShortLinkButton'; +import EmbedCodeButton from './EmbedCodeButton'; +import DisplayQueryButton from './DisplayQueryButton'; + +const propTypes = { + canDownload: PropTypes.oneOfType([PropTypes.string, PropTypes.bool]).isRequired, + slice: PropTypes.object, + queryEndpoint: PropTypes.string, +}; + +export default function ExploreActionButtons({ canDownload, slice, queryEndpoint }) { + const exportToCSVClasses = cx('btn btn-default btn-sm', { + 'disabled disabledButton': !canDownload, + }); + if (slice) { + return ( +
+ + + + + + .json + + + + .csv + + + +
+ ); + } + return ( + + ); +} + +ExploreActionButtons.propTypes = propTypes; diff --git a/superset/assets/javascripts/explorev2/components/ExploreViewContainer.jsx b/superset/assets/javascripts/explorev2/components/ExploreViewContainer.jsx index 83bdf18eae..83117a6e36 100644 --- a/superset/assets/javascripts/explorev2/components/ExploreViewContainer.jsx +++ b/superset/assets/javascripts/explorev2/components/ExploreViewContainer.jsx @@ -1,24 +1,27 @@ /* eslint camelcase: 0 */ -import React from 'react'; +import React, { PropTypes } from 'react'; import { bindActionCreators } from 'redux'; import * as actions from '../actions/exploreActions'; import { connect } from 'react-redux'; import ChartContainer from './ChartContainer'; import ControlPanelsContainer from './ControlPanelsContainer'; import SaveModal from './SaveModal'; -import QueryAndSaveBtns from '../../explore/components/QueryAndSaveBtns'; -import { autoQueryFields } from '../stores/fields'; +import QueryAndSaveBtns from './QueryAndSaveBtns'; import { getExploreUrl } from '../exploreUtils'; +import { getFormDataFromFields } from '../stores/store'; const propTypes = { - form_data: React.PropTypes.object.isRequired, - actions: React.PropTypes.object.isRequired, - datasource_type: React.PropTypes.string.isRequired, - chartStatus: React.PropTypes.string.isRequired, - fields: React.PropTypes.object.isRequired, + actions: PropTypes.object.isRequired, + datasource_type: PropTypes.string.isRequired, + chartStatus: PropTypes.string.isRequired, + fields: PropTypes.object.isRequired, + forcedHeight: PropTypes.string, + form_data: PropTypes.object.isRequired, + standalone: PropTypes.bool.isRequired, + triggerQuery: PropTypes.bool.isRequired, + queryRequest: PropTypes.object, }; - class ExploreViewContainer extends React.Component { constructor(props) { super(props); @@ -29,17 +32,23 @@ class ExploreViewContainer extends React.Component { } componentDidMount() { + this.props.actions.fetchDatasources(); window.addEventListener('resize', this.handleResize.bind(this)); - this.runQuery(); } - componentWillReceiveProps(nextProps) { - const refreshChart = Object.keys(nextProps.form_data).some((field) => ( - nextProps.form_data[field] !== this.props.form_data[field] - && autoQueryFields.indexOf(field) !== -1) - ); - if (refreshChart) { - this.onQuery(); + componentWillReceiveProps(np) { + if (np.fields.viz_type.value !== this.props.fields.viz_type.value) { + this.props.actions.resetFields(); + this.props.actions.triggerQuery(); + } + if (np.fields.datasource.value !== this.props.fields.datasource.value) { + this.props.actions.fetchDatasourceMetadata(np.form_data.datasource, true); + } + } + + componentDidUpdate() { + if (this.props.triggerQuery) { + this.runQuery(); } } @@ -48,19 +57,26 @@ class ExploreViewContainer extends React.Component { } onQuery() { + // remove alerts when query + this.props.actions.removeControlPanelAlert(); + this.props.actions.removeChartAlert(); + this.runQuery(); history.pushState( {}, document.title, - getExploreUrl(this.props.form_data, this.props.datasource_type) - ); - // remove alerts when query - this.props.actions.removeControlPanelAlert(); - this.props.actions.removeChartAlert(); + getExploreUrl(this.props.form_data)); + } + + onStop() { + this.props.actions.chartUpdateStopped(this.props.queryRequest); } getHeight() { - const navHeight = 90; + if (this.props.forcedHeight) { + return this.props.forcedHeight + 'px'; + } + const navHeight = this.props.standalone ? 0 : 90; return `${window.innerHeight - navHeight}px`; } @@ -101,8 +117,18 @@ class ExploreViewContainer extends React.Component { } return errorMessage; } + renderChartContainer() { + return ( + ); + } render() { + if (this.props.standalone) { + return this.renderChartContainer(); + } return (
}
@@ -126,7 +151,8 @@ class ExploreViewContainer extends React.Component { canAdd="True" onQuery={this.onQuery.bind(this)} onSave={this.toggleModal.bind(this)} - disabled={this.props.chartStatus === 'loading'} + onStop={this.onStop.bind(this)} + loading={this.props.chartStatus === 'loading'} errorMessage={this.renderErrorMessage()} />
@@ -134,14 +160,10 @@ class ExploreViewContainer extends React.Component { actions={this.props.actions} form_data={this.props.form_data} datasource_type={this.props.datasource_type} - onQuery={this.onQuery.bind(this)} />
- + {this.renderChartContainer()}
@@ -152,11 +174,16 @@ class ExploreViewContainer extends React.Component { ExploreViewContainer.propTypes = propTypes; function mapStateToProps(state) { + const form_data = getFormDataFromFields(state.fields); return { chartStatus: state.chartStatus, datasource_type: state.datasource_type, fields: state.fields, - form_data: state.viz.form_data, + form_data, + standalone: state.standalone, + triggerQuery: state.triggerQuery, + forcedHeight: state.forced_height, + queryRequest: state.queryRequest, }; } diff --git a/superset/assets/javascripts/explorev2/components/FieldSet.jsx b/superset/assets/javascripts/explorev2/components/FieldSet.jsx index 1c67eb9ae8..48a9d17301 100644 --- a/superset/assets/javascripts/explorev2/components/FieldSet.jsx +++ b/superset/assets/javascripts/explorev2/components/FieldSet.jsx @@ -1,17 +1,19 @@ import React, { PropTypes } from 'react'; -import TextField from './TextField'; import CheckboxField from './CheckboxField'; -import TextAreaField from './TextAreaField'; -import SelectField from './SelectField'; -import FilterField from './FilterField'; import ControlHeader from './ControlHeader'; +import FilterField from './FilterField'; +import HiddenField from './HiddenField'; +import SelectField from './SelectField'; +import TextAreaField from './TextAreaField'; +import TextField from './TextField'; const fieldMap = { - TextField, CheckboxField, - TextAreaField, - SelectField, FilterField, + HiddenField, + SelectField, + TextAreaField, + TextField, }; const fieldTypes = Object.keys(fieldMap); @@ -25,6 +27,8 @@ const propTypes = { places: PropTypes.number, validators: PropTypes.array, validationErrors: PropTypes.array, + renderTrigger: PropTypes.bool, + rightNode: PropTypes.node, value: PropTypes.oneOfType([ PropTypes.string, PropTypes.number, @@ -33,6 +37,7 @@ const propTypes = { }; const defaultProps = { + renderTrigger: false, validators: [], validationErrors: [], }; @@ -65,12 +70,15 @@ export default class FieldSet extends React.PureComponent { } render() { const FieldType = fieldMap[this.props.type]; + const divStyle = this.props.hidden ? { display: 'none' } : null; return ( -
+
{}, removeFilter: () => {}, choices: [], @@ -21,6 +22,11 @@ const defaultProps = { }; export default class Filter extends React.Component { + constructor(props) { + super(props); + this.opChoices = this.props.having ? ['==', '!=', '>', '<', '>=', '<='] + : ['in', 'not in']; + } fetchFilterValues(col) { if (!this.props.datasource) { return; @@ -61,24 +67,27 @@ export default class Filter extends React.Component { if (!filter.choices) { this.fetchFilterValues(filter.col); } + } + if (this.props.having) { + // druid having filter return ( - ); } return ( - ); } @@ -102,7 +111,7 @@ export default class Filter extends React.Component { fills in False value - """ - - def __call__(self, **kwargs): - html = super(BetterBooleanField, self).__call__(**kwargs) - html += u''.format(self.name) - return widgets.HTMLString(html) - - -class SelectMultipleSortableField(SelectMultipleField): - - """Works along with select2sortable to preserves the sort order""" - - def iter_choices(self): - d = OrderedDict() - for value, label in self.choices: - selected = self.data is not None and self.coerce(value) in self.data - d[value] = (value, label, selected) - if self.data: - for value in self.data: - if value and value in d: - yield d.pop(value) - while d: - yield d.popitem(last=False)[1] - - -class FreeFormSelect(widgets.Select): - - """A WTF widget that allows for free form entry""" - - def __call__(self, field, **kwargs): - kwargs.setdefault('id', field.id) - if self.multiple: - kwargs['multiple'] = True - html = ['') - return widgets.HTMLString(''.join(html)) - - -class FreeFormSelectField(SelectField): - - """A WTF SelectField that allows for free form input""" - - widget = FreeFormSelect() - - def pre_validate(self, form): - return - - -class OmgWtForm(Form): - - """Supersetification of the WTForm Form object""" - - fieldsets = {} - css_classes = dict() - - def get_field(self, fieldname): - return getattr(self, fieldname) - - def field_css_classes(self, fieldname): - if fieldname in self.css_classes: - return " ".join(self.css_classes[fieldname]) - return "" - - -class FormFactory(object): - - """Used to create the forms in the explore view dynamically""" - - series_limits = [0, 5, 10, 25, 50, 100, 500] - fieltype_class = { - SelectField: 'select2', - SelectMultipleField: 'select2', - FreeFormSelectField: 'select2_freeform', - SelectMultipleSortableField: 'select2Sortable', - } - - def __init__(self, viz): - self.viz = viz - from superset.viz import viz_types - viz = self.viz - datasource = viz.datasource - if not datasource.metrics_combo: - raise Exception("Please define at least one metric for your table") - default_metric = datasource.metrics_combo[0][0] - - gb_cols = datasource.groupby_column_names - default_groupby = gb_cols[0] if gb_cols else None - group_by_choices = self.choicify(gb_cols) - order_by_choices = [] - for s in sorted(datasource.column_names): - order_by_choices.append((json.dumps([s, True]), s + ' [asc]')) - order_by_choices.append((json.dumps([s, False]), s + ' [desc]')) - # Pool of all the fields that can be used in Superset - field_data = { - 'viz_type': (SelectField, { - "label": _("Viz"), - "default": 'table', - "choices": [(k, v.verbose_name) for k, v in viz_types.items()], - "description": _("The type of visualization to display") - }), - 'metrics': (SelectMultipleSortableField, { - "label": _("Metrics"), - "choices": datasource.metrics_combo, - "default": [default_metric], - "description": _("One or many metrics to display") - }), - 'order_by_cols': (SelectMultipleSortableField, { - "label": _("Ordering"), - "choices": order_by_choices, - "description": _("One or many metrics to display") - }), - 'metric': (SelectField, { - "label": _("Metric"), - "choices": datasource.metrics_combo, - "default": default_metric, - "description": _("Choose the metric") - }), - 'metric_2': (SelectField, { - "label": _("Right Axis Metric"), - "choices": datasource.metrics_combo, - "default": default_metric, - "description": _("Choose the metric for second y axis") - }), - 'stacked_style': (SelectField, { - "label": _("Chart Style"), - "choices": ( - ('stack', _('stack')), - ('stream', _('stream')), - ('expand', _('expand')), - ), - "default": 'stack', - "description": "" - }), - 'linear_color_scheme': (SelectField, { - "label": _("Color Scheme"), - "choices": ( - ('fire', _('fire')), - ('blue_white_yellow', _('blue_white_yellow')), - ('white_black', _('white_black')), - ('black_white', _('black_white')), - ), - "default": 'blue_white_yellow', - "description": "" - }), - 'normalize_across': (SelectField, { - "label": _("Normalize Across"), - "choices": ( - ('heatmap', _('heatmap')), - ('x', _('x')), - ('y', _('y')), - ), - "default": 'heatmap', - "description": _( - "Color will be rendered based on a ratio " - "of the cell against the sum of across this " - "criteria") - }), - 'horizon_color_scale': (SelectField, { - "label": _("Color Scale"), - "choices": ( - ('series', _('series')), - ('overall', _('overall')), - ('change', _('change')), - ), - "default": 'series', - "description": _("Defines how the color are attributed.") - }), - 'canvas_image_rendering': (SelectField, { - "label": _("Rendering"), - "choices": ( - ('pixelated', _('pixelated (Sharp)')), - ('auto', _('auto (Smooth)')), - ), - "default": 'pixelated', - "description": _( - "image-rendering CSS attribute of the canvas object that " - "defines how the browser scales up the image") - }), - 'xscale_interval': (SelectField, { - "label": _("XScale Interval"), - "choices": self.choicify(range(1, 50)), - "default": '1', - "description": _( - "Number of step to take between ticks when " - "printing the x scale") - }), - 'yscale_interval': (SelectField, { - "label": _("YScale Interval"), - "choices": self.choicify(range(1, 50)), - "default": '1', - "description": _( - "Number of step to take between ticks when " - "printing the y scale") - }), - 'bar_stacked': (BetterBooleanField, { - "label": _("Stacked Bars"), - "default": False, - "description": "" - }), - 'show_markers': (BetterBooleanField, { - "label": _("Show Markers"), - "default": False, - "description": ( - "Show data points as circle markers on top of the lines " - "in the chart") - }), - 'show_bar_value': (BetterBooleanField, { - "label": _("Bar Values"), - "default": False, - "description": "Show the value on top of the bars or not" - }), - 'order_bars': (BetterBooleanField, { - "label": _("Sort Bars"), - "default": False, - "description": _("Sort bars by x labels."), - }), - 'show_controls': (BetterBooleanField, { - "label": _("Extra Controls"), - "default": False, - "description": _( - "Whether to show extra controls or not. Extra controls " - "include things like making mulitBar charts stacked " - "or side by side.") - }), - 'reduce_x_ticks': (BetterBooleanField, { - "label": _("Reduce X ticks"), - "default": False, - "description": _( - "Reduces the number of X axis ticks to be rendered. " - "If true, the x axis wont overflow and labels may be " - "missing. If false, a minimum width will be applied " - "to columns and the width may overflow into an " - "horizontal scroll."), - }), - 'include_series': (BetterBooleanField, { - "label": _("Include Series"), - "default": False, - "description": _("Include series name as an axis") - }), - 'secondary_metric': (SelectField, { - "label": _("Color Metric"), - "choices": datasource.metrics_combo, - "default": default_metric, - "description": _("A metric to use for color") - }), - 'country_fieldtype': (SelectField, { - "label": _("Country Field Type"), - "default": 'cca2', - "choices": ( - ('name', _('Full name')), - ('cioc', _('code International Olympic Committee (cioc)')), - ('cca2', _('code ISO 3166-1 alpha-2 (cca2)')), - ('cca3', _('code ISO 3166-1 alpha-3 (cca3)')), - ), - "description": _( - "The country code standard that Superset should expect " - "to find in the [country] column") - }), - 'groupby': (SelectMultipleSortableField, { - "label": _("Group by"), - "choices": self.choicify(datasource.groupby_column_names), - "description": _("One or many fields to group by") - }), - 'columns': (SelectMultipleSortableField, { - "label": _("Columns"), - "choices": self.choicify(datasource.groupby_column_names), - "description": _("One or many fields to pivot as columns") - }), - 'all_columns': (SelectMultipleSortableField, { - "label": _("Columns"), - "choices": self.choicify(datasource.column_names), - "description": _("Columns to display") - }), - 'all_columns_x': (SelectField, { - "label": _("X"), - "choices": self.choicify(datasource.column_names), - "default": datasource.column_names[0], - "description": _("Columns to display") - }), - 'all_columns_y': (SelectField, { - "label": _("Y"), - "choices": self.choicify(datasource.column_names), - "default": datasource.column_names[0], - "description": _("Columns to display") - }), - 'druid_time_origin': (FreeFormSelectField, { - "label": _("Origin"), - "choices": ( - ('', _('default')), - ('now', _('now')), - ), - "default": '', - "description": _( - "Defines the origin where time buckets start, " - "accepts natural dates as in 'now', 'sunday' or '1970-01-01'") - }), - 'bottom_margin': (FreeFormSelectField, { - "label": _("Bottom Margin"), - "choices": self.choicify(['auto', 50, 75, 100, 125, 150, 200]), - "default": 'auto', - "description": _( - "Bottom margin, in pixels, allowing for more room for " - "axis labels"), - }), - 'page_length': (FreeFormSelectField, { - "label": _("Page Length"), - "default": 0, - "choices": self.choicify([0, 10, 25, 50, 100, 250, 500]), - "description": _( - "Number of rows per page, 0 means no pagination") - }), - 'granularity': (FreeFormSelectField, { - "label": _("Time Granularity"), - "default": "one day", - "choices": ( - ('all', _('all')), - ('5 seconds', _('5 seconds')), - ('30 seconds', _('30 seconds')), - ('1 minute', _('1 minute')), - ('5 minutes', _('5 minutes')), - ('1 hour', _('1 hour')), - ('6 hour', _('6 hour')), - ('1 day', _('1 day')), - ('7 days', _('7 days')), - ('week', _('week')), - ('week_starting_sunday', _('week_starting_sunday')), - ('week_ending_saturday', _('week_ending_saturday')), - ('month', _('month')), - ), - "description": _( - "The time granularity for the visualization. Note that you " - "can type and use simple natural language as in '10 seconds', " - "'1 day' or '56 weeks'") - }), - 'domain_granularity': (SelectField, { - "label": _("Domain"), - "default": "month", - "choices": ( - ('hour', _('hour')), - ('day', _('day')), - ('week', _('week')), - ('month', _('month')), - ('year', _('year')), - ), - "description": _( - "The time unit used for the grouping of blocks") - }), - 'subdomain_granularity': (SelectField, { - "label": _("Subdomain"), - "default": "day", - "choices": ( - ('min', _('min')), - ('hour', _('hour')), - ('day', _('day')), - ('week', _('week')), - ('month', _('month')), - ), - "description": _( - "The time unit for each block. Should be a smaller unit than " - "domain_granularity. Should be larger or equal to Time Grain") - }), - 'link_length': (FreeFormSelectField, { - "label": _("Link Length"), - "default": "200", - "choices": self.choicify([ - '10', - '25', - '50', - '75', - '100', - '150', - '200', - '250', - ]), - "description": _("Link length in the force layout") - }), - 'charge': (FreeFormSelectField, { - "label": _("Charge"), - "default": "-500", - "choices": self.choicify([ - '-50', - '-75', - '-100', - '-150', - '-200', - '-250', - '-500', - '-1000', - '-2500', - '-5000', - ]), - "description": _("Charge in the force layout") - }), - 'granularity_sqla': (SelectField, { - "label": _("Time Column"), - "default": datasource.main_dttm_col or datasource.any_dttm_col, - "choices": self.choicify(datasource.dttm_cols), - "description": _( - "The time column for the visualization. Note that you " - "can define arbitrary expression that return a DATETIME " - "column in the table editor. Also note that the " - "filter below is applied against this column or " - "expression") - }), - 'resample_rule': (FreeFormSelectField, { - "label": _("Resample Rule"), - "default": '', - "choices": ( - ('1T', _('1T')), - ('1H', _('1H')), - ('1D', _('1D')), - ('7D', _('7D')), - ('1M', _('1M')), - ('1AS', _('1AS')), - ), - "description": _("Pandas resample rule") - }), - 'resample_how': (FreeFormSelectField, { - "label": _("Resample How"), - "default": '', - "choices": ( - ('', ''), - ('mean', _('mean')), - ('sum', _('sum')), - ('median', _('median')), - ), - "description": _("Pandas resample how") - }), - 'resample_fillmethod': (FreeFormSelectField, { - "label": _("Resample Fill Method"), - "default": '', - "choices": ( - ('', ''), - ('ffill', _('ffill')), - ('bfill', _('bfill')), - ), - "description": _("Pandas resample fill method") - }), - 'since': (FreeFormSelectField, { - "label": _("Since"), - "default": "7 days ago", - "choices": ( - ('1 hour ago', _('1 hour ago')), - ('12 hours ago', _('12 hours ago')), - ('1 day ago', _('1 day ago')), - ('7 days ago', _('7 days ago')), - ('28 days ago', _('28 days ago')), - ('90 days ago', _('90 days ago')), - ('1 year ago', _('1 year ago')), - ), - "description": _( - "Timestamp from filter. This supports free form typing and " - "natural language as in '1 day ago', '28 days' or '3 years'") - }), - 'until': (FreeFormSelectField, { - "label": _("Until"), - "default": "now", - "choices": ( - ('now', _('now')), - ('1 day ago', _('1 day ago')), - ('7 days ago', _('7 days ago')), - ('28 days ago', _('28 days ago')), - ('90 days ago', _('90 days ago')), - ('1 year ago', _('1 year ago')), - ) - }), - 'max_bubble_size': (FreeFormSelectField, { - "label": _("Max Bubble Size"), - "default": "25", - "choices": self.choicify([ - '5', - '10', - '15', - '25', - '50', - '75', - '100', - ]) - }), - 'whisker_options': (FreeFormSelectField, { - "label": _("Whisker/outlier options"), - "default": "Tukey", - "description": _( - "Determines how whiskers and outliers are calculated."), - "choices": ( - ('Tukey', _('Tukey')), - ('Min/max (no outliers)', _('Min/max (no outliers)')), - ('2/98 percentiles', _('2/98 percentiles')), - ('9/91 percentiles', _('9/91 percentiles')), - ) - }), - 'treemap_ratio': (DecimalField, { - "label": _("Ratio"), - "default": 0.5 * (1 + math.sqrt(5)), # d3 default, golden ratio - "description": _('Target aspect ratio for treemap tiles.'), - }), - 'number_format': (FreeFormSelectField, { - "label": _("Number format"), - "default": '.3s', - "choices": [ - ('.3s', '".3s" | 12.3k'), - ('.3%', '".3%" | 1234543.210%'), - ('.4r', '".4r" | 12350'), - ('.3f', '".3f" | 12345.432'), - ('+,', '"+," | +12,345.4321'), - ('$,.2f', '"$,.2f" | $12,345.43'), - ], - "description": D3_FORMAT_DOCS, - }), - 'row_limit': (FreeFormSelectField, { - "label": _('Row limit'), - "default": config.get("VIZ_ROW_LIMIT"), - "choices": self.choicify( - [10, 50, 100, 250, 500, 1000, 5000, 10000, 50000]) - }), - 'limit': (FreeFormSelectField, { - "label": _('Series limit'), - "choices": self.choicify(self.series_limits), - "default": 50, - "description": _( - "Limits the number of time series that get displayed") - }), - 'timeseries_limit_metric': (SelectField, { - "label": _("Sort By"), - "choices": [('', '')] + datasource.metrics_combo, - "default": "", - "description": _("Metric used to define the top series") - }), - 'rolling_type': (SelectField, { - "label": _("Rolling"), - "default": 'None', - "choices": [(s, s) for s in ['None', 'mean', 'sum', 'std', 'cumsum']], - "description": _( - "Defines a rolling window function to apply, works along " - "with the [Periods] text box") - }), - 'rolling_periods': (IntegerField, { - "label": _("Periods"), - "validators": [validators.optional()], - "description": _( - "Defines the size of the rolling window function, " - "relative to the time granularity selected") - }), - 'series': (SelectField, { - "label": _("Series"), - "choices": group_by_choices, - "default": default_groupby, - "description": _( - "Defines the grouping of entities. " - "Each series is shown as a specific color on the chart and " - "has a legend toggle") - }), - 'entity': (SelectField, { - "label": _("Entity"), - "choices": group_by_choices, - "default": default_groupby, - "description": _("This define the element to be plotted on the chart") - }), - 'x': (SelectField, { - "label": _("X Axis"), - "choices": datasource.metrics_combo, - "default": default_metric, - "description": _("Metric assigned to the [X] axis") - }), - 'y': (SelectField, { - "label": _("Y Axis"), - "choices": datasource.metrics_combo, - "default": default_metric, - "description": _("Metric assigned to the [Y] axis") - }), - 'size': (SelectField, { - "label": _('Bubble Size'), - "default": default_metric, - "choices": datasource.metrics_combo - }), - 'url': (TextField, { - "label": _("URL"), - "description": _( - "The URL, this field is templated, so you can integrate " - "{{ width }} and/or {{ height }} in your URL string." - ), - "default": 'https: //www.youtube.com/embed/JkI5rg_VcQ4', - }), - 'x_axis_label': (TextField, { - "label": _("X Axis Label"), - "default": '', - }), - 'y_axis_label': (TextField, { - "label": _("Y Axis Label"), - "default": '', - }), - 'where': (TextField, { - "label": _("Custom WHERE clause"), - "default": '', - "description": _( - "The text in this box gets included in your query's WHERE " - "clause, as an AND to other criteria. You can include " - "complex expression, parenthesis and anything else " - "supported by the backend it is directed towards.") - }), - 'having': (TextField, { - "label": _("Custom HAVING clause"), - "default": '', - "description": _( - "The text in this box gets included in your query's HAVING" - " clause, as an AND to other criteria. You can include " - "complex expression, parenthesis and anything else " - "supported by the backend it is directed towards.") - }), - 'compare_lag': (TextField, { - "label": _("Comparison Period Lag"), - "description": _( - "Based on granularity, number of time periods to " - "compare against") - }), - 'compare_suffix': (TextField, { - "label": _("Comparison suffix"), - "description": _("Suffix to apply after the percentage display") - }), - 'table_timestamp_format': (FreeFormSelectField, { - "label": _("Table Timestamp Format"), - "default": 'smart_date', - "choices": TIMESTAMP_CHOICES, - "description": _("Timestamp Format") - }), - 'series_height': (FreeFormSelectField, { - "label": _("Series Height"), - "default": 25, - "choices": self.choicify([10, 25, 40, 50, 75, 100, 150, 200]), - "description": _("Pixel height of each series") - }), - 'x_axis_format': (FreeFormSelectField, { - "label": _("X axis format"), - "default": 'smart_date', - "choices": TIMESTAMP_CHOICES, - "description": D3_FORMAT_DOCS, - }), - 'y_axis_format': (FreeFormSelectField, { - "label": _("Y axis format"), - "default": '.3s', - "choices": AXIS_FORMAT_CHOICES, - "description": D3_FORMAT_DOCS, - }), - 'y_axis_2_format': (FreeFormSelectField, { - "label": _("Right axis format"), - "default": '.3s', - "choices": AXIS_FORMAT_CHOICES, - "description": D3_FORMAT_DOCS, - }), - 'markup_type': (SelectField, { - "label": _("Markup Type"), - "choices": ( - ('markdown', _('markdown')), - ('html', _('html')) - ), - "default": "markdown", - "description": _("Pick your favorite markup language") - }), - 'rotation': (SelectField, { - "label": _("Rotation"), - "choices": ( - ('random', _('random')), - ('flat', _('flat')), - ('square', _('square')), - ), - "default": "random", - "description": _("Rotation to apply to words in the cloud") - }), - 'line_interpolation': (SelectField, { - "label": _("Line Style"), - "choices": ( - ('linear', _('linear')), - ('basis', _('basis')), - ('cardinal', _('cardinal')), - ('monotone', _('monotone')), - ('step-before', _('step-before')), - ('step-after', _('step-after')), - ), - "default": 'linear', - "description": _("Line interpolation as defined by d3.js") - }), - 'pie_label_type': (SelectField, { - "label": _("Label Type"), - "default": 'key', - "choices": ( - ('key', _("Category Name")), - ('value', _("Value")), - ('percent', _("Percentage")), - ), - "description": _("What should be shown on the label?") - }), - 'code': (TextAreaField, { - "label": _("Code"), - "description": _("Put your code here"), - "default": '' - }), - 'pandas_aggfunc': (SelectField, { - "label": _("Aggregation function"), - "choices": ( - ('sum', _('sum')), - ('mean', _('mean')), - ('min', _('min')), - ('max', _('max')), - ('median', _('median')), - ('stdev', _('stdev')), - ('var', _('var')), - ), - "default": 'sum', - "description": _( - "Aggregate function to apply when pivoting and " - "computing the total rows and columns") - }), - 'size_from': (TextField, { - "label": _("Font Size From"), - "default": "20", - "description": _("Font size for the smallest value in the list") - }), - 'size_to': (TextField, { - "label": _("Font Size To"), - "default": "150", - "description": _("Font size for the biggest value in the list") - }), - 'show_brush': (BetterBooleanField, { - "label": _("Range Filter"), - "default": False, - "description": _( - "Whether to display the time range interactive selector") - }), - 'date_filter': (BetterBooleanField, { - "label": _("Date Filter"), - "default": False, - "description": _("Whether to include a time filter") - }), - 'show_datatable': (BetterBooleanField, { - "label": _("Data Table"), - "default": False, - "description": _("Whether to display the interactive data table") - }), - 'include_search': (BetterBooleanField, { - "label": _("Search Box"), - "default": False, - "description": _( - "Whether to include a client side search box") - }), - 'table_filter': (BetterBooleanField, { - "label": _("Table Filter"), - "default": False, - "description": _( - "Whether to apply filter when table cell is clicked") - }), - 'show_bubbles': (BetterBooleanField, { - "label": _("Show Bubbles"), - "default": False, - "description": _( - "Whether to display bubbles on top of countries") - }), - 'show_legend': (BetterBooleanField, { - "label": _("Legend"), - "default": True, - "description": _("Whether to display the legend (toggles)") - }), - 'x_axis_showminmax': (BetterBooleanField, { - "label": _("X bounds"), - "default": True, - "description": _( - "Whether to display the min and max values of the X axis") - }), - 'rich_tooltip': (BetterBooleanField, { - "label": _("Rich Tooltip"), - "default": True, - "description": _( - "The rich tooltip shows a list of all series for that" - " point in time") - }), - 'y_axis_zero': (BetterBooleanField, { - "label": _("Y Axis Zero"), - "default": False, - "description": _( - "Force the Y axis to start at 0 instead of the minimum " - "value") - }), - 'y_log_scale': (BetterBooleanField, { - "label": _("Y Log"), - "default": False, - "description": _("Use a log scale for the Y axis") - }), - 'x_log_scale': (BetterBooleanField, { - "label": _("X Log"), - "default": False, - "description": _("Use a log scale for the X axis") - }), - 'donut': (BetterBooleanField, { - "label": _("Donut"), - "default": False, - "description": _("Do you want a donut or a pie?") - }), - 'labels_outside': (BetterBooleanField, { - "label": _("Put labels outside"), - "default": True, - "description": _("Put the labels outside the pie?") - }), - 'contribution': (BetterBooleanField, { - "label": _("Contribution"), - "default": False, - "description": _("Compute the contribution to the total") - }), - 'num_period_compare': (IntegerField, { - "label": _("Period Ratio"), - "default": None, - "validators": [validators.optional()], - "description": _( - "[integer] Number of period to compare against, " - "this is relative to the granularity selected") - }), - 'period_ratio_type': (SelectField, { - "label": _("Period Ratio Type"), - "default": 'growth', - "choices": ( - ('factor', _('factor')), - ('growth', _('growth')), - ('value', _('value')), - ), - "description": _( - "`factor` means (new/previous), `growth` is " - "((new/previous) - 1), `value` is (new-previous)") - }), - 'time_compare': (TextField, { - "label": _("Time Shift"), - "default": "", - "description": _( - "Overlay a timeseries from a " - "relative time period. Expects relative time delta " - "in natural language (example: 24 hours, 7 days, " - "56 weeks, 365 days") - }), - 'subheader': (TextField, { - "label": _("Subheader"), - "description": _( - "Description text that shows up below your Big " - "Number") - }), - 'mapbox_label': (SelectMultipleSortableField, { - "label": "Label", - "choices": self.choicify(["count"] + datasource.column_names), - "description": _( - "'count' is COUNT(*) if a group by is used. " - "Numerical columns will be aggregated with the aggregator. " - "Non-numerical columns will be used to label points. " - "Leave empty to get a count of points in each cluster."), - }), - 'mapbox_style': (SelectField, { - "label": "Map Style", - "choices": [ - ("mapbox://styles/mapbox/streets-v9", "Streets"), - ("mapbox://styles/mapbox/dark-v9", "Dark"), - ("mapbox://styles/mapbox/light-v9", "Light"), - ("mapbox://styles/mapbox/satellite-streets-v9", "Satellite Streets"), - ("mapbox://styles/mapbox/satellite-v9", "Satellite"), - ("mapbox://styles/mapbox/outdoors-v9", "Outdoors"), - ], - "default": "mapbox://styles/mapbox/streets-v9", - "description": _("Base layer map style") - }), - 'clustering_radius': (FreeFormSelectField, { - "label": _("Clustering Radius"), - "default": "60", - "choices": self.choicify([ - '0', - '20', - '40', - '60', - '80', - '100', - '200', - '500', - '1000', - ]), - "description": _( - "The radius (in pixels) the algorithm uses to define a cluster. " - "Choose 0 to turn off clustering, but beware that a large " - "number of points (>1000) will cause lag.") - }), - 'point_radius': (SelectField, { - "label": _("Point Radius"), - "default": "Auto", - "choices": self.choicify(["Auto"] + datasource.column_names), - "description": _( - "The radius of individual points (ones that are not in a cluster). " - "Either a numerical column or 'Auto', which scales the point based " - "on the largest cluster") - }), - 'point_radius_unit': (SelectField, { - "label": _("Point Radius Unit"), - "default": "Pixels", - "choices": self.choicify([ - "Pixels", - "Miles", - "Kilometers", - ]), - "description": _("The unit of measure for the specified point radius") - }), - 'global_opacity': (DecimalField, { - "label": _("Opacity"), - "default": 1, - "description": _( - "Opacity of all clusters, points, and labels. " - "Between 0 and 1."), - }), - 'viewport_zoom': (DecimalField, { - "label": _("Zoom"), - "default": 11, - "validators": [validators.optional()], - "description": _("Zoom level of the map"), - "places": 8, - }), - 'viewport_latitude': (DecimalField, { - "label": _("Default latitude"), - "default": 37.772123, - "description": _("Latitude of default viewport"), - "places": 8, - }), - 'viewport_longitude': (DecimalField, { - "label": _("Default longitude"), - "default": -122.405293, - "description": _("Longitude of default viewport"), - "places": 8, - }), - 'render_while_dragging': (BetterBooleanField, { - "label": _("Live render"), - "default": True, - "description": _( - "Points and clusters will update as viewport " - "is being changed"), - }), - 'mapbox_color': (FreeFormSelectField, { - "label": _("RGB Color"), - "default": "rgb(0, 122, 135)", - "choices": [ - ("rgb(0, 139, 139)", "Dark Cyan"), - ("rgb(128, 0, 128)", "Purple"), - ("rgb(255, 215, 0)", "Gold"), - ("rgb(69, 69, 69)", "Dim Gray"), - ("rgb(220, 20, 60)", "Crimson"), - ("rgb(34, 139, 34)", "Forest Green"), - ], - "description": _("The color for points and clusters in RGB") - }), - 'ranges': (TextField, { - "label": _("Ranges"), - "default": "", - "description": _("Ranges to highlight with shading") - }), - 'range_labels': (TextField, { - "label": _("Range labels"), - "default": "", - "description": _("Labels for the ranges") - }), - 'markers': (TextField, { - "label": _("Markers"), - "default": "", - "description": _("List of values to mark with triangles") - }), - 'marker_labels': (TextField, { - "label": _("Marker labels"), - "default": "", - "description": _("Labels for the markers") - }), - 'marker_lines': (TextField, { - "label": _("Marker lines"), - "default": "", - "description": _("List of values to mark with lines") - }), - 'marker_line_labels': (TextField, { - "label": _("Marker line labels"), - "default": "", - "description": _("Labels for the marker lines") - }), - } - - # Override default arguments with form overrides - for field_name, override_map in viz.form_overrides.items(): - if field_name in field_data: - field_data[field_name][1].update(override_map) - - self.field_dict = { - field_name: v[0](**v[1]) - for field_name, v in field_data.items() - } - - @staticmethod - def choicify(l): - return [("{}".format(obj), "{}".format(obj)) for obj in l] - - def get_form(self): - """Returns a form object based on the viz/datasource/context""" - viz = self.viz - field_css_classes = {} - for name, obj in self.field_dict.items(): - field_css_classes[name] = ['form-control', 'input-sm'] - s = self.fieltype_class.get(obj.field_class) - if s: - field_css_classes[name] += [s] - - for field in ('show_brush', 'show_legend', 'rich_tooltip'): - field_css_classes[field] += ['input-sm'] - - class QueryForm(OmgWtForm): - - """The dynamic form object used for the explore view""" - - fieldsets = copy(viz.fieldsets) - css_classes = field_css_classes - standalone = HiddenField() - async = HiddenField() - force = HiddenField() - extra_filters = HiddenField() - json = HiddenField() - slice_id = HiddenField() - slice_name = HiddenField() - previous_viz_type = HiddenField(default=viz.viz_type) - collapsed_fieldsets = HiddenField() - viz_type = self.field_dict.get('viz_type') - - for field in viz.flat_form_fields(): - setattr(QueryForm, field, self.field_dict[field]) - - def add_to_form(attrs): - for attr in attrs: - setattr(QueryForm, attr, self.field_dict[attr]) - - filter_choices = self.choicify(['in', 'not in']) - having_op_choices = [] - filter_prefixes = ['flt'] - # datasource type specific form elements - datasource_classname = viz.datasource.__class__.__name__ - time_fields = None - if datasource_classname == 'SqlaTable': - QueryForm.fieldsets += ({ - 'label': _('SQL'), - 'fields': ['where', 'having'], - 'description': _( - "This section exposes ways to include snippets of " - "SQL in your query"), - },) - add_to_form(('where', 'having')) - grains = viz.datasource.database.grains() - - if grains: - grains_choices = [(grain.name, grain.label) for grain in grains] - time_fields = ('granularity_sqla', 'time_grain_sqla') - self.field_dict['time_grain_sqla'] = SelectField( - _('Time Grain'), - choices=grains_choices, - default="Time Column", - description=_( - "The time granularity for the visualization. This " - "applies a date transformation to alter " - "your time column and defines a new time granularity." - "The options here are defined on a per database " - "engine basis in the Superset source code")) - add_to_form(time_fields) - field_css_classes['time_grain_sqla'] = ['form-control', 'select2'] - field_css_classes['granularity_sqla'] = ['form-control', 'select2'] - else: - time_fields = 'granularity_sqla' - add_to_form((time_fields, )) - elif datasource_classname == 'DruidDatasource': - time_fields = ('granularity', 'druid_time_origin') - add_to_form(('granularity', 'druid_time_origin')) - field_css_classes['granularity'] = ['form-control', 'select2_freeform'] - field_css_classes['druid_time_origin'] = ['form-control', 'select2_freeform'] - filter_choices = self.choicify(['in', 'not in', 'regex']) - having_op_choices = self.choicify( - ['==', '!=', '>', '<', '>=', '<=']) - filter_prefixes += ['having'] - add_to_form(('since', 'until')) - - # filter_cols defaults to ''. Filters with blank col will be ignored - filter_cols = self.choicify( - ([''] + viz.datasource.filterable_column_names) or ['']) - having_cols = filter_cols + viz.datasource.metrics_combo - for field_prefix in filter_prefixes: - is_having_filter = field_prefix == 'having' - col_choices = filter_cols if not is_having_filter else having_cols - op_choices = filter_choices if not is_having_filter else \ - having_op_choices - for i in range(10): - setattr(QueryForm, field_prefix + '_col_' + str(i), - SelectField( - _('Filter 1'), - default=col_choices[0][0], - choices=col_choices)) - setattr(QueryForm, field_prefix + '_op_' + str(i), SelectField( - _('Filter 1'), - default=op_choices[0][0], - choices=op_choices)) - setattr( - QueryForm, field_prefix + '_eq_' + str(i), - TextField(_("Super"), default='')) - - if time_fields: - QueryForm.fieldsets = ({ - 'label': _('Time'), - 'fields': ( - time_fields, - ('since', 'until'), - ), - 'description': _("Time related form attributes"), - },) + tuple(QueryForm.fieldsets) - return QueryForm diff --git a/superset/legacy.py b/superset/legacy.py new file mode 100644 index 0000000000..8bec0846b2 --- /dev/null +++ b/superset/legacy.py @@ -0,0 +1,79 @@ +"""Code related with dealing with legacy / change management""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from superset import frontend_config +import re + +FORM_DATA_KEY_WHITELIST = list(frontend_config.get('fields').keys()) + ['slice_id'] + +def cast_filter_data(form_data): + """Used by cast_form_data to parse the filters""" + flts = [] + having_flts = [] + fd = form_data + filter_pattern = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''') + for i in range(0, 10): + for prefix in ['flt', 'having']: + col_str = '{}_col_{}'.format(prefix, i) + op_str = '{}_op_{}'.format(prefix, i) + val_str = '{}_eq_{}'.format(prefix, i) + if col_str in fd and op_str in fd and val_str in fd \ + and len(fd[val_str]) > 0: + f = {} + f['col'] = fd[col_str] + f['op'] = fd[op_str] + if prefix == 'flt': + # transfer old strings in filter value to list + splitted = filter_pattern.split(fd[val_str])[1::2] + values = [types.replace("'", '').strip() for types in splitted] + f['val'] = values + flts.append(f) + if prefix == 'having': + f['val'] = fd[val_str] + having_flts.append(f) + if col_str in fd: + del fd[col_str] + if op_str in fd: + del fd[op_str] + if val_str in fd: + del fd[val_str] + fd['filters'] = flts + fd['having_filters'] = having_flts + return fd + + +def cast_form_data(form_data): + """Translates old to new form_data""" + d = {} + fields = frontend_config.get('fields', {}) + for k, v in form_data.items(): + field_config = fields.get(k, {}) + ft = field_config.get('type') + if ft == 'CheckboxField': + # bug in some urls with dups on bools + if isinstance(v, list): + v = 'y' in v + else: + v = True if v in ('true', 'y') or v is True else False + elif v and ft == 'TextField' and field_config.get('isInt'): + v = int(v) if v != '' else None + elif v and ft == 'TextField' and field_config.get('isFloat'): + v = float(v) if v != '' else None + elif v and ft == 'SelectField': + if field_config.get('multi') and not isinstance(v, list): + v = [v] + if d.get('slice_id'): + d['slice_id'] = int(d['slice_id']) + + d[k] = v + if 'filters' not in d: + d = cast_filter_data(d) + for k in d.keys(): + if k not in FORM_DATA_KEY_WHITELIST: + del d[k] + return d + + diff --git a/superset/migrations/versions/a99f2f7c195a_rewriting_url_from_shortner_with_new_.py b/superset/migrations/versions/a99f2f7c195a_rewriting_url_from_shortner_with_new_.py new file mode 100644 index 0000000000..818d08d830 --- /dev/null +++ b/superset/migrations/versions/a99f2f7c195a_rewriting_url_from_shortner_with_new_.py @@ -0,0 +1,71 @@ +"""rewriting url from shortner with new format + +Revision ID: a99f2f7c195a +Revises: 53fc3de270ae +Create Date: 2017-02-08 14:16:34.948793 + +""" + +# revision identifiers, used by Alembic. +revision = 'a99f2f7c195a' +down_revision = 'db0c65b146bd' + +from alembic import op +import json +import sqlalchemy as sa +from superset import db +from superset.legacy import cast_form_data +from sqlalchemy.ext.declarative import declarative_base +from future.standard_library import install_aliases +install_aliases() +from urllib import parse + +Base = declarative_base() + +def parse_querystring(qs): + d = {} + for k, v in parse.parse_qsl(qs): + if not k in d: + d[k] = v + else: + if isinstance(d[k], list): + d[k].append(v) + else: + d[k] = [d[k], v] + return d + +class Url(Base): + + """Used for the short url feature""" + + __tablename__ = 'url' + id = sa.Column(sa.Integer, primary_key=True) + url = sa.Column(sa.Text) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + urls = session.query(Url).all() + urls_len = len(urls) + for i, url in enumerate(urls): + if ( + '?form_data' not in url.url and + '?' in url.url and + 'dbid' not in url.url and + url.url.startswith('//superset/explore')): + d = parse_querystring(url.url.split('?')[1]) + split = url.url.split('/') + d['datasource'] = split[5] + '__' + split[4] + d = cast_form_data(d) + newurl = '/'.join(split[:-1]) + '/?form_data=' + parse.quote_plus(json.dumps(d)) + url.url = newurl + session.merge(url) + session.commit() + print('Updating url ({}/{})'.format(i, urls_len)) + session.close() + + +def downgrade(): + pass diff --git a/superset/migrations/versions/d6db5a5cdb5d_.py b/superset/migrations/versions/d6db5a5cdb5d_.py new file mode 100644 index 0000000000..4a51fb8ff5 --- /dev/null +++ b/superset/migrations/versions/d6db5a5cdb5d_.py @@ -0,0 +1,22 @@ +"""empty message + +Revision ID: d6db5a5cdb5d +Revises: ('a99f2f7c195a', 'bcf3126872fc') +Create Date: 2017-02-10 17:58:20.149960 + +""" + +# revision identifiers, used by Alembic. +revision = 'd6db5a5cdb5d' +down_revision = ('a99f2f7c195a', 'bcf3126872fc') + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + pass + + +def downgrade(): + pass diff --git a/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py b/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py new file mode 100644 index 0000000000..d4135562cf --- /dev/null +++ b/superset/migrations/versions/db0c65b146bd_update_slice_model_json.py @@ -0,0 +1,54 @@ +"""update_slice_model_json + +Revision ID: db0c65b146bd +Revises: f18570e03440 +Create Date: 2017-01-24 12:31:06.541746 + +""" + +# revision identifiers, used by Alembic. +revision = 'db0c65b146bd' +down_revision = 'f18570e03440' + +from alembic import op +import json +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, Integer, String, Text + +from superset import db +from superset.legacy import cast_form_data + +Base = declarative_base() + + +class Slice(Base): + """Declarative class to do query in upgrade""" + __tablename__ = 'slices' + id = Column(Integer, primary_key=True) + datasource_type = Column(String(200)) + slice_name = Column(String(200)) + params = Column(Text) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + slices = session.query(Slice).all() + slice_len = len(slices) + for i, slc in enumerate(slices): + try: + d = json.loads(slc.params or '{}') + d = cast_form_data(d) + slc.params = json.dumps(d, indent=2, sort_keys=True) + session.merge(slc) + session.commit() + print('Upgraded ({}/{}): {}'.format(i, slice_len, slc.slice_name)) + except Exception as e: + print(slc.slice_name + ' error: ' + str(e)) + + session.close() + + +def downgrade(): + pass diff --git a/superset/models.py b/superset/models.py index 99af95499d..4e5b55697a 100644 --- a/superset/models.py +++ b/superset/models.py @@ -4,7 +4,6 @@ from __future__ import division from __future__ import print_function from __future__ import unicode_literals -import ast from collections import OrderedDict import functools import json @@ -13,6 +12,9 @@ import numpy import pickle import re import textwrap +from future.standard_library import install_aliases +install_aliases() +from urllib import parse from copy import deepcopy, copy from datetime import timedelta, datetime, date @@ -24,7 +26,7 @@ from sqlalchemy.engine.url import make_url from sqlalchemy.orm import subqueryload import sqlparse -from dateutil.parser import parse +from dateutil.parser import parse as dparse from flask import escape, g, Markup, request from flask_appbuilder import Model @@ -52,11 +54,10 @@ from sqlalchemy.sql import table, literal_column, text, column from sqlalchemy.sql.expression import ColumnClause, TextAsFrom from sqlalchemy_utils import EncryptedType -from werkzeug.datastructures import ImmutableMultiDict - from superset import ( - app, db, db_engine_specs, get_session, utils, sm, import_util + app, db, db_engine_specs, get_session, utils, sm, import_util, ) +from superset.legacy import cast_form_data from superset.source_registry import SourceRegistry from superset.viz import viz_types from superset.jinja_context import get_template_processor @@ -309,34 +310,37 @@ class Slice(Model, AuditMixinNullable, ImportMixin): except Exception as e: logging.exception(e) d['error'] = str(e) - d['slice_id'] = self.id - d['slice_name'] = self.slice_name - d['description'] = self.description - d['slice_url'] = self.slice_url - d['edit_url'] = self.edit_url - d['description_markeddown'] = self.description_markeddown - return d + return { + 'datasource': self.datasource_name, + 'description': self.description, + 'description_markeddown': self.description_markeddown, + 'edit_url': self.edit_url, + 'form_data': self.form_data, + 'slice_id': self.id, + 'slice_name': self.slice_name, + 'slice_url': self.slice_url, + } @property def json_data(self): return json.dumps(self.data) + @property + def form_data(self): + form_data = json.loads(self.params) + form_data['slice_id'] = self.id + form_data['viz_type'] = self.viz_type + form_data['datasource'] = ( + str(self.datasource_id) + '__' + self.datasource_type) + return form_data + @property def slice_url(self): """Defines the url to access the slice""" - try: - slice_params = json.loads(self.params) - except Exception as e: - logging.exception(e) - slice_params = {} - slice_params['slice_id'] = self.id - slice_params['json'] = "false" - slice_params['slice_name'] = self.slice_name - from werkzeug.urls import Href - href = Href( + return ( "/superset/explore/{obj.datasource_type}/" - "{obj.datasource_id}/".format(obj=self)) - return href(slice_params) + "{obj.datasource_id}/?form_data={params}".format( + obj=self, params=parse.quote(json.dumps(self.form_data)))) @property def slice_id_url(self): @@ -364,21 +368,15 @@ class Slice(Model, AuditMixinNullable, ImportMixin): url_params_multidict or self.params. :rtype: :py:class:viz.BaseViz """ - slice_params = json.loads(self.params) # {} + slice_params = json.loads(self.params) slice_params['slice_id'] = self.id slice_params['json'] = "false" slice_params['slice_name'] = self.slice_name slice_params['viz_type'] = self.viz_type if self.viz_type else "table" - if url_params_multidict: - slice_params.update(url_params_multidict) - to_del = [k for k in slice_params if k not in url_params_multidict] - for k in to_del: - del slice_params[k] - immutable_slice_params = ImmutableMultiDict(slice_params) - return viz_types[immutable_slice_params.get('viz_type')]( + return viz_types[slice_params.get('viz_type')]( self.datasource, - form_data=immutable_slice_params, + form_data=slice_params, slice_=self ) @@ -651,10 +649,13 @@ class Dashboard(Model, AuditMixinNullable, ImportMixin): }) -class Queryable(object): +class Datasource(object): """A common interface to objects that are queryable (tables and datasources)""" + # Used to do code highlighting when displaying the query in the UI + query_language = None + @property def column_names(self): return sorted([c.column_name for c in self.columns]) @@ -686,33 +687,40 @@ class Queryable(object): else: return "/superset/explore/{obj.type}/{obj.id}/".format(obj=self) + @property + def column_formats(self): + return { + m.metric_name: m.d3format + for m in self.metrics + if m.d3format + } + @property def data(self): """data representation of the datasource sent to the frontend""" - gb_cols = [(col, col) for col in self.groupby_column_names] - all_cols = [(c, c) for c in self.column_names] - filter_cols = [(c, c) for c in self.filterable_column_names] order_by_choices = [] for s in sorted(self.column_names): order_by_choices.append((json.dumps([s, True]), s + ' [asc]')) order_by_choices.append((json.dumps([s, False]), s + ' [desc]')) d = { - 'id': self.id, - 'type': self.type, - 'name': self.name, - 'metrics_combo': self.metrics_combo, - 'order_by_choices': order_by_choices, - 'gb_cols': gb_cols, - 'all_cols': all_cols, - 'filterable_cols': filter_cols, + 'all_cols': utils.choicify(self.column_names), + 'column_formats': self.column_formats, + 'edit_url' : self.url, 'filter_select': self.filter_select_enabled, + 'filterable_cols': utils.choicify(self.filterable_column_names), + 'gb_cols': utils.choicify(self.groupby_column_names), + 'id': self.id, + 'metrics_combo': self.metrics_combo, + 'name': self.name, + 'order_by_choices': order_by_choices, + 'type': self.type, } if self.type == 'table': grains = self.database.grains() or [] if grains: grains = [(g.name, g.name) for g in grains] - d['granularity_sqla'] = [(c, c) for c in self.dttm_cols] + d['granularity_sqla'] = utils.choicify(self.dttm_cols) d['time_grain_sqla'] = grains return d @@ -1094,11 +1102,12 @@ class SqlMetric(Model, AuditMixinNullable, ImportMixin): return import_util.import_simple_obj(db.session, i_metric, lookup_obj) -class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin): +class SqlaTable(Model, Datasource, AuditMixinNullable, ImportMixin): """An ORM object for SqlAlchemy table references""" type = "table" + query_language = 'sql' __tablename__ = 'tables' id = Column(Integer, primary_key=True) @@ -1172,7 +1181,7 @@ class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin): @property def dttm_cols(self): l = [c.column_name for c in self.columns if c.is_dttm] - if self.main_dttm_col not in l: + if self.main_dttm_col and self.main_dttm_col not in l: l.append(self.main_dttm_col) return l @@ -1261,8 +1270,9 @@ class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin): con=engine ) - def query( # sqla - self, groupby, metrics, + def get_query_str( # sqla + self, engine, qry_start_dttm, + groupby, metrics, granularity, from_dttm, to_dttm, filter=None, # noqa @@ -1285,7 +1295,6 @@ class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin): cols = {col.column_name: col for col in self.columns} metrics_dict = {m.metric_name: m for m in self.metrics} - qry_start_dttm = datetime.now() if not granularity and is_timeseries: raise Exception(_( @@ -1374,20 +1383,16 @@ class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin): where_clause_and = [] having_clause_and = [] - for col, op, eq in filter: + for flt in filter: + if not all([flt.get(s) for s in ['col', 'op', 'val']]): + continue + col = flt['col'] + op = flt['op'] + eq = ','.join(flt['val']) col_obj = cols[col] if op in ('in', 'not in'): - split = FilterPattern.split(eq)[1::2] - values = [types.strip() for types in split] - # attempt to get the values type if they are not in quotes - if not col_obj.is_string: - try: - values = [ast.literal_eval(v) for v in values] - except Exception as e: - logging.info(utils.error_msg_from_exception(e)) - values = [v.replace("'", '').strip() for v in values] - else: - values = [v.replace("'", '').strip() for v in values] + splitted = FilterPattern.split(eq)[1::2] + values = [types.strip("'").strip('"') for types in splitted] cond = col_obj.sqla_col.in_(values) if op == 'not in': cond = ~cond @@ -1443,12 +1448,18 @@ class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin): qry = qry.select_from(tbl) - engine = self.database.get_sqla_engine() sql = "{}".format( qry.compile( engine, compile_kwargs={"literal_binds": True},), ) + logging.info(sql) sql = sqlparse.format(sql, reindent=True) + return sql + + def query(self, query_obj): + qry_start_dttm = datetime.now() + engine = self.database.get_sqla_engine() + sql = self.get_query_str(engine, qry_start_dttm, **query_obj) status = QueryStatus.SUCCESS error_message = None df = None @@ -1873,11 +1884,12 @@ class DruidMetric(Model, AuditMixinNullable, ImportMixin): return import_util.import_simple_obj(db.session, i_metric, lookup_obj) -class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): +class DruidDatasource(Model, AuditMixinNullable, Datasource, ImportMixin): """ORM object referencing Druid datasources (tables)""" type = "druid" + query_langtage = "json" baselink = "druiddatasourcemodelview" @@ -2045,7 +2057,7 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): if not results: return max_time = results[0]['result']['maxTime'] - max_time = parse(max_time) + max_time = dparse(max_time) # Query segmentMetadata for 7 days back. However, due to a bug, # we need to set this interval to more than 1 day ago to exclude # realtime segments, which triggered a bug (fixed in druid 0.8.2). @@ -2286,8 +2298,9 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): return df - def query( # druid - self, groupby, metrics, + def get_query_str( # druid + self, client, qry_start_dttm, + groupby, metrics, granularity, from_dttm, to_dttm, filter=None, # noqa @@ -2299,13 +2312,12 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): orderby=None, extras=None, # noqa select=None, # noqa - columns=None, ): + columns=None, phase=2): """Runs a query against Druid and returns a dataframe. This query interface is common to SqlAlchemy and Druid """ # TODO refactor into using a TBD Query object - qry_start_dttm = datetime.now() if not is_timeseries: granularity = 'all' inner_from_dttm = inner_from_dttm or from_dttm @@ -2401,7 +2413,6 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): if having_filters: qry['having'] = having_filters - client = self.cluster.get_pydruid_client() orig_filters = filters if len(groupby) == 0: del qry['dimensions'] @@ -2440,6 +2451,8 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): query_str += json.dumps( client.query_builder.last_query.query_dict, indent=2) query_str += "\n" + if phase == 1: + return query_str query_str += ( "//\nPhase 2 (built based on phase one's results)\n") df = client.export_pandas() @@ -2479,15 +2492,24 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): client.groupby(**qry) query_str += json.dumps( client.query_builder.last_query.query_dict, indent=2) + return query_str + + def query(self, query_obj): + qry_start_dttm = datetime.now() + client = self.cluster.get_pydruid_client() + query_str = self.get_query_str(client, qry_start_dttm, **query_obj) df = client.export_pandas() + if df is None or df.size == 0: raise Exception(_("No data was returned.")) df.columns = [ DTTM_ALIAS if c == 'timestamp' else c for c in df.columns] + is_timeseries = query_obj['is_timeseries'] \ + if 'is_timeseries' in query_obj else True if ( not is_timeseries and - granularity == "all" and + query_obj['granularity'] == "all" and DTTM_ALIAS in df.columns): del df[DTTM_ALIAS] @@ -2495,11 +2517,11 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): cols = [] if DTTM_ALIAS in df.columns: cols += [DTTM_ALIAS] - cols += [col for col in groupby if col in df.columns] - cols += [col for col in metrics if col in df.columns] + cols += [col for col in query_obj['groupby'] if col in df.columns] + cols += [col for col in query_obj['metrics'] if col in df.columns] df = df[cols] - time_offset = DruidDatasource.time_offset(granularity) + time_offset = DruidDatasource.time_offset(query_obj['granularity']) def increment_timestamp(ts): dt = utils.parse_human_datetime(ts).replace( @@ -2516,7 +2538,12 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): @staticmethod def get_filters(raw_filters): filters = None - for col, op, eq in raw_filters: + for flt in raw_filters: + if not all(f in flt for f in ['col', 'op', 'val']): + continue + col = flt['col'] + op = flt['op'] + eq = flt['val'] cond = None if op == '==': cond = Dimension(col) == eq @@ -2569,7 +2596,12 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable, ImportMixin): '<=': '>' } - for col, op, eq in raw_filters: + for flt in raw_filters: + if not all(f in flt for f in ['col', 'op', 'val']): + continue + col = flt['col'] + op = flt['op'] + eq = flt['val'] cond = None if op in ['==', '>', '<']: cond = self._get_having_obj(col, op, eq) diff --git a/superset/templates/superset/explore.html b/superset/templates/superset/explore.html deleted file mode 100644 index 8324adf4ee..0000000000 --- a/superset/templates/superset/explore.html +++ /dev/null @@ -1,307 +0,0 @@ -{% extends "superset/basic.html" %} - -{% block title %} - {% if slice %} - [slice] {{ slice.slice_name }} - {% else %} - [explore] {{ viz.datasource.table_name }} - {% endif %} -{% endblock %} - -{% block body %} - {% set datasource = viz.datasource %} - {% set form = viz.form %} - - {% macro panofield(fieldname)%} -
- {% set field = form.get_field(fieldname)%} -
- {{ field.label }} - {% if field.description %} - - {% endif %} - {{ field(class_=form.field_css_classes(field.name)) }} -
-
- {% endmacro %} - -
- -{% endblock %} - -{% block tail_js %} - {{ super() }} - {% with filename="explore" %} - {% include "superset/partials/_script_tag.html" %} - {% endwith %} -{% endblock %} diff --git a/superset/templates/superset/standalone.html b/superset/templates/superset/standalone.html deleted file mode 100644 index 276e47a62c..0000000000 --- a/superset/templates/superset/standalone.html +++ /dev/null @@ -1,33 +0,0 @@ - - - {{ viz.token }} - - - - - {% set CSS_THEME = appbuilder.get_app.config.get("CSS_THEME") %} - {% set height = request.args.get("height", 700) %} - {% if CSS_THEME %} - - {% endif %} - - - -
- loading -
-
- - {% with filename="css-theme" %} - {% include "superset/partials/_script_tag.html" %} - {% endwith %} - {% with filename="standalone" %} - {% include "superset/partials/_script_tag.html" %} - {% endwith %} - - diff --git a/superset/utils.py b/superset/utils.py index 7e6da2a1ca..5dea8a4ef0 100644 --- a/superset/utils.py +++ b/superset/utils.py @@ -529,8 +529,6 @@ def get_email_address_list(address_string): return address_string -# Forked from the flask_appbuilder.security.decorators -# TODO(bkyryliuk): contribute it back to FAB def has_access(f): """ Use this decorator to enable granular security permissions to your @@ -538,6 +536,9 @@ def has_access(f): associated to users. By default the permission's name is the methods name. + + Forked from the flask_appbuilder.security.decorators + TODO(bkyryliuk): contribute it back to FAB """ if hasattr(f, '_permission_name'): permission_str = f._permission_name @@ -559,3 +560,8 @@ def has_access(f): next=request.path)) f._permission_name = permission_str return functools.update_wrapper(wraps, f) + + +def choicify(values): + """Takes an iterable and makes an iterable of tuples with it""" + return [(v, v) for v in values] diff --git a/superset/views.py b/superset/views.py index ce98b96719..c2803131aa 100755 --- a/superset/views.py +++ b/superset/views.py @@ -18,7 +18,7 @@ import functools import sqlalchemy as sqla from flask import ( - g, request, redirect, flash, Response, render_template, Markup, url_for) + g, request, redirect, flash, Response, render_template, Markup) from flask_appbuilder import ModelView, CompactCRUDMixin, BaseView, expose from flask_appbuilder.actions import action from flask_appbuilder.models.sqla.interface import SQLAInterface @@ -32,7 +32,6 @@ from flask_babel import lazy_gettext as _ from sqlalchemy import create_engine from werkzeug.routing import BaseConverter -from wtforms.validators import ValidationError import superset from superset import ( @@ -183,15 +182,17 @@ def get_error_msg(): return error_msg -def json_error_response(msg, status=None): +def json_error_response(msg, status=None, stacktrace=None): data = {'error': msg} + if stacktrace: + data['stacktrace'] = stacktrace status = status if status else 500 return Response( - json.dumps(data), status=status, mimetype="application/json") + json.dumps(data), + status=status, mimetype="application/json") -def json_success(json_msg, status=None): - status = status if status else 200 +def json_success(json_msg, status=200): return Response(json_msg, status=status, mimetype="application/json") @@ -209,6 +210,9 @@ def api(f): return functools.update_wrapper(wraps, f) +def is_owner(obj, user): + """ Check if user is owner of the slice """ + return obj and obj.owners and user in obj.owners def check_ownership(obj, raise_if_false=True): """Meant to be used in `pre_update` hooks on models to enforce ownership @@ -355,7 +359,7 @@ def validate_json(form, field): # noqa json.loads(field.data) except Exception as e: logging.exception(e) - raise ValidationError("json isn't valid") + raise Exception("json isn't valid") def generate_download_headers(extension): @@ -1262,12 +1266,25 @@ class Superset(BaseSupersetView): role = sm.find_role(role_name) role.user = existing_users sm.get_session.commit() - return Response(json.dumps({ + return self.json_response({ 'role': role_name, '# missing users': len(missing_users), '# granted': len(existing_users), 'created_users': created_users, - }), status=201) + }, status=201) + + def json_response(self, obj, status=200): + return Response( + json.dumps(obj, default=utils.json_int_dttm_ser), + status=status, + mimetype="application/json") + + @has_access_api + @expose("/datasources/") + def datasources(self): + datasources = SourceRegistry.get_all_datasources(db.session) + datasources = [(str(o.id) + '__' + o.type, repr(o)) for o in datasources] + return self.json_response(datasources) @has_access_api @expose("/override_role_permissions/", methods=['POST']) @@ -1317,10 +1334,10 @@ class Superset(BaseSupersetView): role.permissions.append(view_menu_perm) granted_perms.append(view_menu_perm.view_menu.name) db.session.commit() - return Response(json.dumps({ + return self.json_response({ 'granted': granted_perms, 'requested': list(db_ds_names) - }), status=201) + }, status=201) @log_this @has_access @@ -1446,6 +1463,20 @@ class Superset(BaseSupersetView): session.commit() return redirect('/accessrequestsmodelview/list/') + def get_form_data(self): + form_data = request.args.get("form_data") + if not form_data: + form_data = request.form.get("form_data") + if not form_data: + form_data = '{}' + d = json.loads(form_data) + extra_filters = request.args.get("extra_filters") + filters = d.get('filters', []) + if extra_filters: + extra_filters = json.loads(extra_filters) + d['filters'] = filters + extra_filters + return d + def get_viz( self, slice_id=None, @@ -1453,21 +1484,38 @@ class Superset(BaseSupersetView): datasource_type=None, datasource_id=None): if slice_id: - slc = db.session.query(models.Slice).filter_by(id=slice_id).one() + slc = ( + db.session.query(models.Slice) + .filter_by(id=slice_id) + .one() + ) return slc.get_viz() else: - viz_type = args.get('viz_type', 'table') + form_data=self.get_form_data() + viz_type = form_data.get('viz_type', 'table') datasource = SourceRegistry.get_datasource( datasource_type, datasource_id, db.session) viz_obj = viz.viz_types[viz_type]( - datasource, request.args if request.args else args) + datasource, + form_data=form_data, + ) return viz_obj @has_access @expose("/slice//") def slice(self, slice_id): viz_obj = self.get_viz(slice_id) - return redirect(viz_obj.get_url(**request.args)) + endpoint = ( + '/superset/explore/{}/{}?form_data={}' + .format( + viz_obj.datasource.type, + viz_obj.datasource.id, + json.dumps(viz_obj.form_data) + ) + ) + if request.args.get("standalone") == "true": + endpoint += '&standalone=true' + return redirect(endpoint) @log_this @has_access_api @@ -1480,21 +1528,54 @@ class Superset(BaseSupersetView): args=request.args) except Exception as e: logging.exception(e) - return json_error_response(utils.error_msg_from_exception(e)) + return json_error_response( + utils.error_msg_from_exception(e), + stacktrace=traceback.format_exc()) + if not self.datasource_access(viz_obj.datasource): return json_error_response(DATASOURCE_ACCESS_ERR, status=404) + if request.args.get("csv") == "true": + return Response( + viz_obj.get_csv(), + status=200, + headers=generate_download_headers("csv"), + mimetype="application/csv") + + if request.args.get("query") == "true": + try: + query_obj = viz_obj.query_obj() + engine = viz_obj.datasource.database.get_sqla_engine() \ + if datasource_type == 'table' \ + else viz_obj.datasource.cluster.get_pydruid_client() + if datasource_type == 'druid': + # only retrive first phase query for druid + query_obj['phase'] = 1 + query = viz_obj.datasource.get_query_str( + engine, datetime.now(), **query_obj) + except Exception as e: + return json_error_response(e) + return Response( + json.dumps({ + 'query': query, + 'language': viz_obj.datasource.query_language, + }), + status=200, + mimetype="application/json") + payload = {} try: payload = viz_obj.get_payload() except Exception as e: logging.exception(e) return json_error_response(utils.error_msg_from_exception(e)) - if payload.get('status') == QueryStatus.FAILED: - return json_error_response(viz_obj.json_dumps(payload)) - return json_success(viz_obj.json_dumps(payload)) + status = 200 + if payload.get('status') == QueryStatus.FAILED: + status = 400 + + return json_success(viz_obj.json_dumps(payload), status=status) @expose("/import_dashboards", methods=['GET', 'POST']) @log_this @@ -1523,35 +1604,31 @@ class Superset(BaseSupersetView): @has_access @expose("/explore///") def explore(self, datasource_type, datasource_id): - viz_type = request.args.get("viz_type") - slice_id = request.args.get('slice_id') - slc = None + form_data = self.get_form_data() + + datasource_id = int(datasource_id) + viz_type = form_data.get("viz_type") + slice_id = form_data.get('slice_id') user_id = g.user.get_id() if g.user else None + slc = None if slice_id: slc = db.session.query(models.Slice).filter_by(id=slice_id).first() error_redirect = '/slicemodelview/list/' - datasource_class = SourceRegistry.sources[datasource_type] - datasources = db.session.query(datasource_class).all() - datasources = sorted(datasources, key=lambda ds: ds.full_name) + datasource = ( + db.session.query(SourceRegistry.sources[datasource_type]) + .filter_by(id=datasource_id) + .one() + ) - try: - viz_obj = self.get_viz( - datasource_type=datasource_type, - datasource_id=datasource_id, - args=request.args) - except Exception as e: - flash('{}'.format(e), "alert") + if not datasource: + flash(DATASOURCE_MISSING_ERR, "danger") return redirect(error_redirect) - if not viz_obj.datasource: - flash(DATASOURCE_MISSING_ERR, "alert") - return redirect(error_redirect) - - if not self.datasource_access(viz_obj.datasource): + if not self.datasource_access(datasource): flash( - __(get_datasource_access_error_msg(viz_obj.datasource.name)), + __(get_datasource_access_error_msg(datasource.name)), "danger") return redirect( 'superset/request_access/?' @@ -1559,65 +1636,49 @@ class Superset(BaseSupersetView): 'datasource_id={datasource_id}&' ''.format(**locals())) - if not viz_type and viz_obj.datasource.default_endpoint: - return redirect(viz_obj.datasource.default_endpoint) + if not viz_type and datasource.default_endpoint: + return redirect(datasource.default_endpoint) # slc perms slice_add_perm = self.can_access('can_add', 'SliceModelView') - slice_edit_perm = check_ownership(slc, raise_if_false=False) + slice_overwrite_perm = is_owner(slc, g.user) slice_download_perm = self.can_access('can_download', 'SliceModelView') # handle save or overwrite action = request.args.get('action') if action in ('saveas', 'overwrite'): return self.save_or_overwrite_slice( - request.args, slc, slice_add_perm, slice_edit_perm) + request.args, + slc, slice_add_perm, + slice_overwrite_perm, + datasource_id, + datasource_type) - # find out if user is in explore v2 beta group - # and set flag `is_in_explore_v2_beta` - is_in_explore_v2_beta = sm.find_role('explore-v2-beta') in get_user_roles() - - # handle different endpoints - if request.args.get("csv") == "true": - payload = viz_obj.get_csv() - return Response( - payload, - status=200, - headers=generate_download_headers("csv"), - mimetype="application/csv") - elif request.args.get("standalone") == "true": - return self.render_template("superset/standalone.html", viz=viz_obj, standalone_mode=True) - elif request.args.get("V2") == "true" or is_in_explore_v2_beta: - # bootstrap data for explore V2 - bootstrap_data = { - "can_add": slice_add_perm, - "can_download": slice_download_perm, - "can_edit": slice_edit_perm, - # TODO: separate endpoint for fetching datasources - "datasources": [(d.id, d.full_name) for d in datasources], - "datasource_id": datasource_id, - "datasource_name": viz_obj.datasource.name, - "datasource_type": datasource_type, - "user_id": user_id, - "viz": json.loads(viz_obj.json_data), - "filter_select": viz_obj.datasource.filter_select_enabled - } - table_name = viz_obj.datasource.table_name \ - if datasource_type == 'table' \ - else viz_obj.datasource.datasource_name - return self.render_template( - "superset/explorev2.html", - bootstrap_data=json.dumps(bootstrap_data), - slice=slc, - table_name=table_name) - else: - return self.render_template( - "superset/explore.html", - viz=viz_obj, slice=slc, datasources=datasources, - can_add=slice_add_perm, can_edit=slice_edit_perm, - can_download=slice_download_perm, - userid=g.user.get_id() if g.user else '' - ) + form_data['datasource'] = str(datasource_id) + '__' + datasource_type + standalone = request.args.get("standalone") == "true" + bootstrap_data = { + "can_add": slice_add_perm, + "can_download": slice_download_perm, + "can_overwrite": slice_overwrite_perm, + "datasource": datasource.data, + # TODO: separate endpoint for fetching datasources + "form_data": form_data, + "datasource_id": datasource_id, + "datasource_type": datasource_type, + "slice": slc.data if slc else None, + "standalone": standalone, + "user_id": user_id, + "forced_height": request.args.get('height'), + } + table_name = datasource.table_name \ + if datasource_type == 'table' \ + else datasource.datasource_name + return self.render_template( + "superset/explorev2.html", + bootstrap_data=json.dumps(bootstrap_data), + slice=slc, + standalone_mode=standalone, + table_name=table_name) @api @has_access_api @@ -1662,44 +1723,28 @@ class Superset(BaseSupersetView): return json_success(obj.get_values_for_column(column)) def save_or_overwrite_slice( - self, args, slc, slice_add_perm, slice_edit_perm): + self, args, slc, slice_add_perm, slice_overwrite_perm, + datasource_id, datasource_type): """Save or overwrite a slice""" slice_name = args.get('slice_name') action = args.get('action') - - # TODO use form processing form wtforms - d = args.to_dict(flat=False) - del d['action'] - if 'previous_viz_type' in d: - del d['previous_viz_type'] - - as_list = ('metrics', 'groupby', 'columns', 'all_columns', - 'mapbox_label', 'order_by_cols') - for k in d: - v = d.get(k) - if k in as_list and not isinstance(v, list): - d[k] = [v] if v else [] - if k not in as_list and isinstance(v, list): - d[k] = v[0] - - datasource_type = args.get('datasource_type') - datasource_id = args.get('datasource_id') + form_data = self.get_form_data() if action in ('saveas'): - if 'slice_id' in d: - d.pop('slice_id') # don't save old slice_id + if 'slice_id' in form_data: + form_data.pop('slice_id') # don't save old slice_id slc = models.Slice(owners=[g.user] if g.user else []) - slc.params = json.dumps(d, indent=4, sort_keys=True) + slc.params = json.dumps(form_data) slc.datasource_name = args.get('datasource_name') - slc.viz_type = args.get('viz_type') + slc.viz_type = form_data['viz_type'] slc.datasource_type = datasource_type slc.datasource_id = datasource_id slc.slice_name = slice_name if action in ('saveas') and slice_add_perm: self.save_slice(slc) - elif action == 'overwrite' and slice_edit_perm: + elif action == 'overwrite' and slice_overwrite_perm: self.overwrite_slice(slc) # Adding slice to a dashboard if requested @@ -1731,13 +1776,9 @@ class Superset(BaseSupersetView): db.session.commit() if request.args.get('goto_dash') == 'true': - if request.args.get('V2') == 'true': - return dash.url - return redirect(dash.url) + return dash.url else: - if request.args.get('V2') == 'true': - return slc.slice_url - return redirect(slc.slice_url) + return slc.slice_url def save_slice(self, slc): session = db.session() @@ -1747,15 +1788,11 @@ class Superset(BaseSupersetView): flash(msg, "info") def overwrite_slice(self, slc): - can_update = check_ownership(slc, raise_if_false=False) - if not can_update: - flash("You cannot overwrite [{}]".format(slc), "danger") - else: - session = db.session() - session.merge(slc) - session.commit() - msg = "Slice [{}] has been overwritten".format(slc.slice_name) - flash(msg, "info") + session = db.session() + session.merge(slc) + session.commit() + msg = "Slice [{}] has been overwritten".format(slc.slice_name) + flash(msg, "info") @api @has_access_api @@ -2603,11 +2640,12 @@ class Superset(BaseSupersetView): @expose("/fetch_datasource_metadata") @log_this def fetch_datasource_metadata(self): - datasource_type = request.args.get('datasource_type') + datasource_id, datasource_type = ( + request.args.get('datasourceKey').split('__')) datasource_class = SourceRegistry.sources[datasource_type] datasource = ( db.session.query(datasource_class) - .filter_by(id=request.args.get('datasource_id')) + .filter_by(id=int(datasource_id)) .first() ) diff --git a/superset/viz.py b/superset/viz.py index 6d7532359a..0186ff5433 100755 --- a/superset/viz.py +++ b/superset/viz.py @@ -11,6 +11,7 @@ from __future__ import unicode_literals import copy import hashlib import logging +import traceback import uuid import zlib @@ -29,8 +30,7 @@ from werkzeug.urls import Href from dateutil import relativedelta as rdelta from superset import app, utils, cache -from superset.forms import FormFactory -from superset.utils import flasher, DTTM_ALIAS +from superset.utils import DTTM_ALIAS config = app.config @@ -43,13 +43,6 @@ class BaseViz(object): verbose_name = "Base Viz" credits = "" is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'metrics', 'groupby', - ) - },) - form_overrides = {} def __init__(self, datasource, form_data, slice_=None): self.orig_form_data = form_data @@ -59,101 +52,17 @@ class BaseViz(object): self.request = request self.viz_type = form_data.get("viz_type") self.slice = slice_ + self.form_data = form_data - # TODO refactor all form related logic out of here and into forms.py - ff = FormFactory(self) - form_class = ff.get_form() - defaults = form_class().data.copy() - previous_viz_type = form_data.get('previous_viz_type') - if isinstance(form_data, (MultiDict, ImmutableMultiDict)): - form = form_class(form_data) - else: - form = form_class(**form_data) - data = form.data.copy() - - if not form.validate(): - for k, v in form.errors.items(): - if not data.get('json') and not data.get('async'): - flasher("{}: {}".format(k, " ".join(v)), 'danger') - if previous_viz_type != self.viz_type: - data = { - k: form.data[k] - for k in form_data.keys() - if k in form.data} - defaults.update(data) - self.form_data = defaults self.query = "" - self.form_data['previous_viz_type'] = self.viz_type self.token = self.form_data.get( 'token', 'token_' + uuid.uuid4().hex[:8]) self.metrics = self.form_data.get('metrics') or [] self.groupby = self.form_data.get('groupby') or [] - self.reassignments() self.status = None self.error_message = None - @classmethod - def flat_form_fields(cls): - l = set() - for d in cls.fieldsets: - for obj in d['fields']: - if obj and isinstance(obj, (tuple, list)): - l |= {a for a in obj if a} - elif obj: - l.add(obj) - return tuple(l) - - def reassignments(self): - pass - - def get_url(self, for_cache_key=False, json_endpoint=False, **kwargs): - """Returns the URL for the viz - - :param for_cache_key: when getting the url as the identifier to hash - for the cache key - :type for_cache_key: boolean - """ - d = self.orig_form_data.copy() - if 'json' in d: - del d['json'] - if 'action' in d: - del d['action'] - if 'slice_id' in d: - del d['slice_id'] - d.update(kwargs) - # Remove unchecked checkboxes because HTML is weird like that - od = MultiDict() - for key in sorted(d.keys()): - # if MultiDict is initialized with MD({key:[emptyarray]}), - # key is included in d.keys() but accessing it throws - try: - if d[key] is False: - del d[key] - continue - except IndexError: - pass - - if isinstance(d, (MultiDict, ImmutableMultiDict)): - v = d.getlist(key) - else: - v = d.get(key) - if not isinstance(v, list): - v = [v] - for item in v: - od.add(key, item) - - base_endpoint = '/superset/explore' - if json_endpoint: - base_endpoint = '/superset/explore_json' - - href = Href( - '{base_endpoint}/{self.datasource.type}/' - '{self.datasource.id}/'.format(**locals())) - if for_cache_key and 'force' in od: - del od['force'] - return href(od) - def get_filter_url(self): """Returns the URL to retrieve column values used in the filter""" data = self.orig_form_data.copy() @@ -197,10 +106,10 @@ class BaseViz(object): timestamp_format = dttm_col.python_date_format # The datasource here can be different backend but the interface is common - self.results = self.datasource.query(**query_obj) + self.results = self.datasource.query(query_obj) + self.query = self.results.query self.status = self.results.status self.error_message = self.results.error_message - self.query = self.results.query df = self.results.df # Transform the timestamp we received from database to pandas supported @@ -225,46 +134,12 @@ class BaseViz(object): df = df.fillna(0) return df - @property - def form(self): - return self.form_class(**self.form_data) - - @property - def form_class(self): - return FormFactory(self).get_form() - def get_extra_filters(self): extra_filters = self.form_data.get('extra_filters') if not extra_filters: return {} return json.loads(extra_filters) - def query_filters(self, is_having_filter=False): - """Processes the filters for the query""" - form_data = self.form_data - # Building filters - filters = [] - field_prefix = 'flt' if not is_having_filter else 'having' - for i in range(1, 10): - col = form_data.get(field_prefix + "_col_" + str(i)) - op = form_data.get(field_prefix + "_op_" + str(i)) - eq = form_data.get(field_prefix + "_eq_" + str(i)) - if col and op and eq is not None: - filters.append((col, op, eq)) - - if is_having_filter: - return filters - - # Extra filters (coming from dashboard) - for col, vals in self.get_extra_filters().items(): - if not (col and vals): - continue - elif col in self.datasource.filterable_column_names: - # Quote values with comma to avoid conflict - vals = ["'{}'".format(x) if "," in x else x for x in vals] - filters += [(col, 'in', ",".join(vals))] - return filters - def query_obj(self): """Building a query object""" form_data = self.form_data @@ -274,10 +149,10 @@ class BaseViz(object): granularity = ( form_data.get("granularity") or form_data.get("granularity_sqla") ) - limit = int(form_data.get("limit", 0)) + limit = int(form_data.get("limit") or 0) timeseries_limit_metric = form_data.get("timeseries_limit_metric") row_limit = int( - form_data.get("row_limit", config.get("ROW_LIMIT"))) + form_data.get("row_limit") or config.get("ROW_LIMIT")) since = ( extra_filters.get('__from') or form_data.get("since", "1 year ago") ) @@ -288,18 +163,31 @@ class BaseViz(object): until = extra_filters.get('__to') or form_data.get("until", "now") to_dttm = utils.parse_human_datetime(until) if from_dttm > to_dttm: - flasher("The date range doesn't seem right.", "danger") - from_dttm = to_dttm # Making them identical to not raise + raise Exception("From date cannot be larger than to date") # extras are used to query elements specific to a datasource type # for instance the extra where clause that applies only to Tables extras = { 'where': form_data.get("where", ''), 'having': form_data.get("having", ''), - 'having_druid': self.query_filters(is_having_filter=True), + 'having_druid': form_data.get('having_filters') \ + if 'having_filters' in form_data else [], 'time_grain_sqla': form_data.get("time_grain_sqla", ''), 'druid_time_origin': form_data.get("druid_time_origin", ''), } + filters = form_data['filters'] if 'filters' in form_data \ + else [] + for col, vals in self.get_extra_filters().items(): + if not (col and vals): + continue + elif col in self.datasource.filterable_column_names: + # Quote values with comma to avoid conflict + vals = ["'{}'".format(x) if "," in x else x for x in vals] + filters += [{ + 'col': col, + 'op': 'in', + 'val': ",".join(vals), + }] d = { 'granularity': granularity, 'from_dttm': from_dttm, @@ -308,7 +196,7 @@ class BaseViz(object): 'groupby': groupby, 'metrics': metrics, 'row_limit': row_limit, - 'filter': self.query_filters(), + 'filter': filters, 'timeseries_limit': limit, 'extras': extras, 'timeseries_limit_metric': timeseries_limit_metric, @@ -333,6 +221,11 @@ class BaseViz(object): self.get_payload(force), default=utils.json_int_dttm_ser, ignore_nan=True) + @property + def cache_key(self): + s = str((k, self.form_data[k]) for k in sorted(self.form_data.keys())) + return hashlib.md5(s.encode('utf-8')).hexdigest() + def get_payload(self, force=False): """Handles caching around the json payload retrieval""" cache_key = self.cache_key @@ -355,29 +248,31 @@ class BaseViz(object): logging.info("Serving from cache") if not payload: + data = None is_cached = False cache_timeout = self.cache_timeout + stacktrace = None try: - data = self.get_data() + df = self.get_df() + if not self.error_message: + data = self.get_data(df) except Exception as e: logging.exception(e) if not self.error_message: self.error_message = str(e) self.status = utils.QueryStatus.FAILED data = None + stacktrace = traceback.format_exc() payload = { 'cache_key': cache_key, 'cache_timeout': cache_timeout, - 'column_formats': self.data['column_formats'], - 'csv_endpoint': self.csv_endpoint, 'data': data, 'error': self.error_message, 'filter_endpoint': self.filter_endpoint, 'form_data': self.form_data, - 'json_endpoint': self.json_endpoint, 'query': self.query, - 'standalone_endpoint': self.standalone_endpoint, 'status': self.status, + 'stacktrace': stacktrace, } payload['cached_dttm'] = datetime.now().isoformat().split('.')[0] logging.info("Caching for the next {} seconds".format( @@ -406,19 +301,11 @@ class BaseViz(object): def data(self): """This is the data object serialized to the js layer""" content = { - 'csv_endpoint': self.csv_endpoint, 'form_data': self.form_data, - 'json_endpoint': self.json_endpoint, 'filter_endpoint': self.filter_endpoint, - 'standalone_endpoint': self.standalone_endpoint, 'token': self.token, 'viz_name': self.viz_type, 'filter_select_enabled': self.datasource.filter_select_enabled, - 'column_formats': { - m.metric_name: m.d3format - for m in self.datasource.metrics - if m.d3format - }, } return content @@ -444,8 +331,7 @@ class BaseViz(object): until = form_data.get("until", "now") to_dttm = utils.parse_human_datetime(until) if from_dttm > to_dttm: - flasher("The date range doesn't seem right.", "danger") - from_dttm = to_dttm # Making them identical to not raise + raise Exception("From date cannot be larger than to date") kwargs = dict( column_name=column, @@ -455,30 +341,13 @@ class BaseViz(object): df = self.datasource.values_for_column(**kwargs) return df[column].to_json() - def get_data(self): + def get_data(self, df): return [] - @property - def json_endpoint(self): - return self.get_url(json_endpoint=True) - @property def filter_endpoint(self): return self.get_filter_url() - @property - def cache_key(self): - url = self.get_url(for_cache_key=True, json="true", force="false") - return hashlib.md5(url.encode('utf-8')).hexdigest() - - @property - def csv_endpoint(self): - return self.get_url(csv="true") - - @property - def standalone_endpoint(self): - return self.get_url(standalone="true") - @property def json_data(self): return json.dumps(self.data) @@ -491,28 +360,6 @@ class TableViz(BaseViz): viz_type = "table" verbose_name = _("Table View") credits = 'a Superset original' - fieldsets = ({ - 'label': _("GROUP BY"), - 'description': _('Use this section if you want a query that aggregates'), - 'fields': ('groupby', 'metrics') - }, { - 'label': _("NOT GROUPED BY"), - 'description': _('Use this section if you want to query atomic rows'), - 'fields': ('all_columns', 'order_by_cols'), - }, { - 'label': _("Options"), - 'fields': ( - 'table_timestamp_format', - 'row_limit', - 'page_length', - ('include_search', 'table_filter'), - ) - }) - form_overrides = ({ - 'metrics': { - 'default': [], - }, - }) is_timeseries = False def query_obj(self): @@ -529,8 +376,7 @@ class TableViz(BaseViz): d['orderby'] = [json.loads(t) for t in order_by_cols] return d - def get_data(self): - df = self.get_df() + def get_data(self, df): if ( self.form_data.get("granularity") == "all" and DTTM_ALIAS in df): @@ -553,15 +399,6 @@ class PivotTableViz(BaseViz): verbose_name = _("Pivot Table") credits = 'a Superset original' is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'groupby', - 'columns', - 'metrics', - 'pandas_aggfunc', - ) - },) def query_obj(self): d = super(PivotTableViz, self).query_obj() @@ -584,8 +421,7 @@ class PivotTableViz(BaseViz): d['groupby'] = list(set(groupby) | set(columns)) return d - def get_data(self): - df = self.get_df() + def get_data(self, df): if ( self.form_data.get("granularity") == "all" and DTTM_ALIAS in df): @@ -610,13 +446,12 @@ class MarkupViz(BaseViz): viz_type = "markup" verbose_name = _("Markup") - fieldsets = ({ - 'label': None, - 'fields': ('markup_type', 'code') - },) is_timeseries = False - def get_data(self): + def get_df(self): + return True + + def get_data(self, df): markup_type = self.form_data.get("markup_type") code = self.form_data.get("code", '') if markup_type == "markdown": @@ -630,17 +465,6 @@ class SeparatorViz(MarkupViz): viz_type = "separator" verbose_name = _("Separator") - form_overrides = { - 'code': { - 'default': ( - "####Section Title\n" - "A paragraph describing the section" - "of the dashboard, right before the separator line " - "\n\n" - "---------------" - ), - } - } class WordCloudViz(BaseViz): @@ -654,14 +478,6 @@ class WordCloudViz(BaseViz): viz_type = "word_cloud" verbose_name = _("Word Cloud") is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'series', 'metric', 'limit', - ('size_from', 'size_to'), - 'rotation', - ) - },) def query_obj(self): d = super(WordCloudViz, self).query_obj() @@ -670,8 +486,7 @@ class WordCloudViz(BaseViz): d['groupby'] = [self.form_data.get('series')] return d - def get_data(self): - df = self.get_df() + def get_data(self, df): # Ordering the columns df = df[[self.form_data.get('series'), self.form_data.get('metric')]] # Labeling the columns for uniform json schema @@ -687,19 +502,6 @@ class TreemapViz(BaseViz): verbose_name = _("Treemap") credits = 'd3.js' is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'metrics', - 'groupby', - ), - }, { - 'label': _('Chart Options'), - 'fields': ( - 'treemap_ratio', - 'number_format', - ) - },) def _nest(self, metric, df): nlevels = df.index.nlevels @@ -711,8 +513,7 @@ class TreemapViz(BaseViz): for l in df.index.levels[0]] return result - def get_data(self): - df = self.get_df() + def get_data(self, df): df = df.set_index(self.form_data.get("groupby")) chart_data = [{"name": metric, "children": self._nest(metric, df)} for metric in df.columns] @@ -728,17 +529,8 @@ class CalHeatmapViz(BaseViz): credits = ( 'cal-heatmap') is_timeseries = True - fieldsets = ({ - 'label': None, - 'fields': ( - 'metric', - 'domain_granularity', - 'subdomain_granularity', - ), - },) - def get_data(self): - df = self.get_df() + def get_data(self, df): form_data = self.form_data df.columns = ["timestamp", "metric"] @@ -794,23 +586,32 @@ class BoxPlotViz(NVD3Viz): verbose_name = _("Box Plot") sort_series = False is_timeseries = True - fieldsets = ({ - 'label': None, - 'fields': ( - 'metrics', - 'groupby', 'limit', - ), - }, { - 'label': _('Chart Options'), - 'fields': ( - 'whisker_options', - ) - },) - def get_df(self, query_obj=None): + def to_series(self, df, classed='', title_suffix=''): + label_sep = " - " + chart_data = [] + for index_value, row in zip(df.index, df.to_dict(orient="records")): + if isinstance(index_value, tuple): + index_value = label_sep.join(index_value) + boxes = defaultdict(dict) + for (label, key), value in row.items(): + if key == "median": + key = "Q2" + boxes[label][key] = value + for label, box in boxes.items(): + if len(self.form_data.get("metrics")) > 1: + # need to render data labels with metrics + chart_label = label_sep.join([index_value, label]) + else: + chart_label = index_value + chart_data.append({ + "label": chart_label, + "values": box, + }) + return chart_data + + def get_data(self, df): form_data = self.form_data - df = super(BoxPlotViz, self).get_df(query_obj) - df = df.fillna(0) # conform to NVD3 names @@ -862,33 +663,6 @@ class BoxPlotViz(NVD3Viz): aggregate = [Q1, np.median, Q3, whisker_high, whisker_low, outliers] df = df.groupby(form_data.get('groupby')).agg(aggregate) - return df - - def to_series(self, df, classed='', title_suffix=''): - label_sep = " - " - chart_data = [] - for index_value, row in zip(df.index, df.to_dict(orient="records")): - if isinstance(index_value, tuple): - index_value = label_sep.join(index_value) - boxes = defaultdict(dict) - for (label, key), value in row.items(): - if key == "median": - key = "Q2" - boxes[label][key] = value - for label, box in boxes.items(): - if len(self.form_data.get("metrics")) > 1: - # need to render data labels with metrics - chart_label = label_sep.join([index_value, label]) - else: - chart_label = index_value - chart_data.append({ - "label": chart_label, - "values": box, - }) - return chart_data - - def get_data(self): - df = self.get_df() chart_data = self.to_series(df) return chart_data @@ -900,22 +674,6 @@ class BubbleViz(NVD3Viz): viz_type = "bubble" verbose_name = _("Bubble Chart") is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'series', 'entity', - 'x', 'y', - 'size', 'limit', - ) - }, { - 'label': _('Chart Options'), - 'fields': ( - ('x_log_scale', 'y_log_scale'), - ('show_legend', None), - 'max_bubble_size', - ('x_axis_label', 'y_axis_label'), - ) - },) def query_obj(self): form_data = self.form_data @@ -939,18 +697,13 @@ class BubbleViz(NVD3Viz): raise Exception("Pick a metric for x, y and size") return d - def get_df(self, query_obj=None): - df = super(BubbleViz, self).get_df(query_obj) - df = df.fillna(0) + def get_data(self, df): df['x'] = df[[self.x_metric]] df['y'] = df[[self.y_metric]] df['size'] = df[[self.z_metric]] df['shape'] = 'circle' df['group'] = df[[self.series]] - return df - def get_data(self): - df = self.get_df() series = defaultdict(list) for row in df.to_dict(orient='records'): series[row['group']].append(row) @@ -969,15 +722,6 @@ class BulletViz(NVD3Viz): viz_type = "bullet" verbose_name = _("Bullet Chart") is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'metric', - 'ranges', 'range_labels', - 'markers', 'marker_labels', - 'marker_lines', 'marker_line_labels', - ) - },) def query_obj(self): form_data = self.form_data @@ -1005,14 +749,9 @@ class BulletViz(NVD3Viz): raise Exception("Pick a metric to display") return d - def get_df(self, query_obj=None): - df = super(BulletViz, self).get_df(query_obj) + def get_data(self, df): df = df.fillna(0) df['metric'] = df[[self.metric]] - return df - - def get_data(self): - df = self.get_df() values = df['metric'].values return { 'measures': values.tolist(), @@ -1033,25 +772,6 @@ class BigNumberViz(BaseViz): verbose_name = _("Big Number with Trendline") credits = 'a Superset original' is_timeseries = True - fieldsets = ({ - 'label': None, - 'fields': ( - 'metric', - 'compare_lag', - 'compare_suffix', - 'y_axis_format', - ) - },) - form_overrides = { - 'y_axis_format': { - 'label': _('Number format'), - } - } - - def reassignments(self): - metric = self.form_data.get('metric') - if not metric: - self.form_data['metric'] = self.orig_form_data.get('metrics') def query_obj(self): d = super(BigNumberViz, self).query_obj() @@ -1062,12 +782,10 @@ class BigNumberViz(BaseViz): self.form_data['metric'] = metric return d - def get_data(self): + def get_data(self, df): form_data = self.form_data - df = self.get_df() df.sort_values(by=df.columns[0], inplace=True) - compare_lag = form_data.get("compare_lag", "") - compare_lag = int(compare_lag) if compare_lag and compare_lag.isdigit() else 0 + compare_lag = form_data.get("compare_lag") return { 'data': df.values.tolist(), 'compare_lag': compare_lag, @@ -1083,24 +801,6 @@ class BigNumberTotalViz(BaseViz): verbose_name = _("Big Number") credits = 'a Superset original' is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'metric', - 'subheader', - 'y_axis_format', - ) - },) - form_overrides = { - 'y_axis_format': { - 'label': _('Number format'), - } - } - - def reassignments(self): - metric = self.form_data.get('metric') - if not metric: - self.form_data['metric'] = self.orig_form_data.get('metrics') def query_obj(self): d = super(BigNumberTotalViz, self).query_obj() @@ -1111,9 +811,8 @@ class BigNumberTotalViz(BaseViz): self.form_data['metric'] = metric return d - def get_data(self): + def get_data(self, df): form_data = self.form_data - df = self.get_df() df.sort_values(by=df.columns[0], inplace=True) return { 'data': df.values.tolist(), @@ -1129,96 +828,6 @@ class NVD3TimeSeriesViz(NVD3Viz): verbose_name = _("Time Series - Line Chart") sort_series = False is_timeseries = True - fieldsets = ({ - 'label': None, - 'fields': ( - 'metrics', - 'groupby', - ('limit', 'timeseries_limit_metric'), - ), - }, { - 'label': _('Chart Options'), - 'fields': ( - ('show_brush', 'show_legend'), - ('rich_tooltip', 'y_axis_zero'), - ('y_log_scale', 'contribution'), - ('show_markers', 'x_axis_showminmax'), - ('line_interpolation', None), - ('x_axis_format', 'y_axis_format'), - ('x_axis_label', 'y_axis_label'), - ), - }, { - 'label': _('Advanced Analytics'), - 'description': _( - "This section contains options " - "that allow for advanced analytical post processing " - "of query results"), - 'fields': ( - ('rolling_type', 'rolling_periods'), - 'time_compare', - ('num_period_compare', 'period_ratio_type'), - None, - ('resample_how', 'resample_rule',), 'resample_fillmethod' - ), - },) - - def get_df(self, query_obj=None): - form_data = self.form_data - df = super(NVD3TimeSeriesViz, self).get_df(query_obj) - df = df.fillna(0) - if form_data.get("granularity") == "all": - raise Exception("Pick a time granularity for your time series") - - df = df.pivot_table( - index=DTTM_ALIAS, - columns=form_data.get('groupby'), - values=form_data.get('metrics')) - - fm = form_data.get("resample_fillmethod") - if not fm: - fm = None - how = form_data.get("resample_how") - rule = form_data.get("resample_rule") - if how and rule: - df = df.resample(rule, how=how, fill_method=fm) - if not fm: - df = df.fillna(0) - - if self.sort_series: - dfs = df.sum() - dfs.sort_values(ascending=False, inplace=True) - df = df[dfs.index] - - if form_data.get("contribution"): - dft = df.T - df = (dft / dft.sum()).T - - rolling_periods = form_data.get("rolling_periods") - rolling_type = form_data.get("rolling_type") - - if rolling_type in ('mean', 'std', 'sum') and rolling_periods: - if rolling_type == 'mean': - df = pd.rolling_mean(df, int(rolling_periods), min_periods=0) - elif rolling_type == 'std': - df = pd.rolling_std(df, int(rolling_periods), min_periods=0) - elif rolling_type == 'sum': - df = pd.rolling_sum(df, int(rolling_periods), min_periods=0) - elif rolling_type == 'cumsum': - df = df.cumsum() - - num_period_compare = form_data.get("num_period_compare") - if num_period_compare: - num_period_compare = int(num_period_compare) - prt = form_data.get('period_ratio_type') - if prt and prt == 'growth': - df = (df / df.shift(num_period_compare)) - 1 - elif prt and prt == 'value': - df = df - df.shift(num_period_compare) - else: - df = df / df.shift(num_period_compare) - - df = df[num_period_compare:] - return df def to_series(self, df, classed='', title_suffix=''): cols = [] @@ -1237,7 +846,6 @@ class NVD3TimeSeriesViz(NVD3Viz): ys = series[name] if df[name].dtype.kind not in "biufc": continue - df[DTTM_ALIAS] = pd.to_datetime(df.index, utc=False) if isinstance(name, string_types): series_title = name else: @@ -1254,17 +862,71 @@ class NVD3TimeSeriesViz(NVD3Viz): "classed": classed, "values": [ {'x': ds, 'y': ys[ds] if ds in ys else None} - for ds in df[DTTM_ALIAS] + for ds in df.index ], } chart_data.append(d) return chart_data - def get_data(self): - df = self.get_df() + def get_data(self, df): + fd = self.form_data + df = df.fillna(0) + if fd.get("granularity") == "all": + raise Exception("Pick a time granularity for your time series") + + df = df.pivot_table( + index=DTTM_ALIAS, + columns=fd.get('groupby'), + values=fd.get('metrics')) + + fm = fd.get("resample_fillmethod") + if not fm: + fm = None + how = fd.get("resample_how") + rule = fd.get("resample_rule") + if how and rule: + df = df.resample(rule, how=how, fill_method=fm) + if not fm: + df = df.fillna(0) + + if self.sort_series: + dfs = df.sum() + dfs.sort_values(ascending=False, inplace=True) + df = df[dfs.index] + + if fd.get("contribution"): + dft = df.T + df = (dft / dft.sum()).T + + rolling_periods = fd.get("rolling_periods") + rolling_type = fd.get("rolling_type") + + if rolling_type in ('mean', 'std', 'sum') and rolling_periods: + if rolling_type == 'mean': + df = pd.rolling_mean(df, int(rolling_periods), min_periods=0) + elif rolling_type == 'std': + df = pd.rolling_std(df, int(rolling_periods), min_periods=0) + elif rolling_type == 'sum': + df = pd.rolling_sum(df, int(rolling_periods), min_periods=0) + elif rolling_type == 'cumsum': + df = df.cumsum() + + num_period_compare = fd.get("num_period_compare") + if num_period_compare: + num_period_compare = int(num_period_compare) + prt = fd.get('period_ratio_type') + if prt and prt == 'growth': + df = (df / df.shift(num_period_compare)) - 1 + elif prt and prt == 'value': + df = df - df.shift(num_period_compare) + else: + df = df / df.shift(num_period_compare) + + df = df[num_period_compare:] + chart_data = self.to_series(df) - time_compare = self.form_data.get('time_compare') + time_compare = fd.get('time_compare') if time_compare: query_object = self.query_obj() delta = utils.parse_human_timedelta(time_compare) @@ -1274,7 +936,11 @@ class NVD3TimeSeriesViz(NVD3Viz): query_object['to_dttm'] -= delta df2 = self.get_df(query_object) - df2.index += delta + df2[DTTM_ALIAS] += delta + df2 = df2.pivot_table( + index=DTTM_ALIAS, + columns=fd.get('groupby'), + values=fd.get('metrics')) chart_data += self.to_series( df2, classed='superset', title_suffix="---") chart_data = sorted(chart_data, key=lambda x: x['key']) @@ -1289,54 +955,17 @@ class NVD3DualLineViz(NVD3Viz): verbose_name = _("Time Series - Dual Axis Line Chart") sort_series = False is_timeseries = True - fieldsets = ({ - 'label': _('Chart Options'), - 'fields': ('x_axis_format',), - }, { - 'label': _('Y Axis 1'), - 'fields': ( - 'metric', - 'y_axis_format' - ), - }, { - 'label': _('Y Axis 2'), - 'fields': ( - 'metric_2', - 'y_axis_2_format' - ), - },) - form_overrides = { - 'y_axis_format': { - 'label': _('Left Axis Format'), - 'description': _("Select the numeric column to draw the histogram"), - }, - 'metric': { - 'label': _("Left Axis Metric"), - } - } - - def get_df(self, query_obj=None): - if not query_obj: - query_obj = super(NVD3DualLineViz, self).query_obj() - metrics = [ - self.form_data.get('metric'), - self.form_data.get('metric_2') - ] - query_obj['metrics'] = metrics - df = super(NVD3DualLineViz, self).get_df(query_obj) - df = df.fillna(0) - if self.form_data.get("granularity") == "all": - raise Exception("Pick a time granularity for your time series") - - df = df.pivot_table( - index=DTTM_ALIAS, - values=metrics) - - return df def query_obj(self): d = super(NVD3DualLineViz, self).query_obj() - if self.form_data.get('metric') == self.form_data.get('metric_2'): + m1 = self.form_data.get('metric') + m2 = self.form_data.get('metric_2') + d['metrics'] = [m1, m2] + if not m1: + raise Exception("Pick a metric for left axis!") + if not m2: + raise Exception("Pick a metric for right axis!") + if m1 == m2: raise Exception("Please choose different metrics" " on left and right axis") return d @@ -1353,7 +982,6 @@ class NVD3DualLineViz(NVD3Viz): df.columns = cols series = df.to_dict('series') chart_data = [] - index_list = df.T.index.tolist() metrics = [ self.form_data.get('metric'), self.form_data.get('metric_2') @@ -1362,7 +990,6 @@ class NVD3DualLineViz(NVD3Viz): ys = series[m] if df[m].dtype.kind not in "biufc": continue - df[DTTM_ALIAS] = pd.to_datetime(df.index, utc=False) series_title = m d = { "key": series_title, @@ -1377,16 +1004,19 @@ class NVD3DualLineViz(NVD3Viz): chart_data.append(d) return chart_data - def get_data(self): - form_data = self.form_data - metric = form_data.get('metric') - metric_2 = form_data.get('metric_2') - if not metric: - raise Exception("Pick a metric for left axis!") - if not metric_2: - raise Exception("Pick a metric for right axis!") + def get_data(self, df): + fd = self.form_data + df = df.fillna(0) + + if self.form_data.get("granularity") == "all": + raise Exception("Pick a time granularity for your time series") + + metric = fd.get('metric') + metric_2 = fd.get('metric_2') + df = df.pivot_table( + index=DTTM_ALIAS, + values=[metric, metric_2]) - df = self.get_df() chart_data = self.to_series(df) return chart_data @@ -1398,18 +1028,6 @@ class NVD3TimeSeriesBarViz(NVD3TimeSeriesViz): viz_type = "bar" sort_series = True verbose_name = _("Time Series - Bar Chart") - fieldsets = [NVD3TimeSeriesViz.fieldsets[0]] + [{ - 'label': _('Chart Options'), - 'fields': ( - ('show_brush', 'show_legend', 'show_bar_value'), - ('rich_tooltip', 'y_axis_zero'), - ('y_log_scale', 'contribution'), - ('x_axis_format', 'y_axis_format'), - ('line_interpolation', 'bar_stacked'), - ('x_axis_showminmax', 'bottom_margin'), - ('x_axis_label', 'y_axis_label'), - ('reduce_x_ticks', 'show_controls'), - ), }] + [NVD3TimeSeriesViz.fieldsets[2]] class NVD3CompareTimeSeriesViz(NVD3TimeSeriesViz): @@ -1427,16 +1045,6 @@ class NVD3TimeSeriesStackedViz(NVD3TimeSeriesViz): viz_type = "area" verbose_name = _("Time Series - Stacked") sort_series = True - fieldsets = [NVD3TimeSeriesViz.fieldsets[0]] + [{ - 'label': _('Chart Options'), - 'fields': ( - ('show_brush', 'show_legend'), - ('rich_tooltip', 'y_axis_zero'), - ('y_log_scale', 'contribution'), - ('x_axis_format', 'y_axis_format'), - ('x_axis_showminmax', 'show_controls'), - ('line_interpolation', 'stacked_style'), - ), }] + [NVD3TimeSeriesViz.fieldsets[2]] class DistributionPieViz(NVD3Viz): @@ -1446,33 +1054,13 @@ class DistributionPieViz(NVD3Viz): viz_type = "pie" verbose_name = _("Distribution - NVD3 - Pie Chart") is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'metrics', 'groupby', - 'limit', - 'pie_label_type', - ('donut', 'show_legend'), - 'labels_outside', - ) - },) - def query_obj(self): - d = super(DistributionPieViz, self).query_obj() - d['is_timeseries'] = False - return d - - def get_df(self, query_obj=None): - df = super(DistributionPieViz, self).get_df(query_obj) + def get_data(self, df): df = df.pivot_table( index=self.groupby, values=[self.metrics[0]]) df.sort_values(by=self.metrics[0], ascending=False, inplace=True) - return df - - def get_data(self): df = self.get_df() - df = df.reset_index() df.columns = ['x', 'y'] return df.to_dict(orient="records") @@ -1484,30 +1072,6 @@ class HistogramViz(BaseViz): viz_type = "histogram" verbose_name = _("Histogram") is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - ('all_columns_x',), - 'row_limit', - ) - }, { - 'label': _("Histogram Options"), - 'fields': ( - 'link_length', - ) - },) - - form_overrides = { - 'all_columns_x': { - 'label': _('Numeric Column'), - 'description': _("Select the numeric column to draw the histogram"), - }, - 'link_length': { - 'label': _("No of Bins"), - 'description': _("Select number of bins for the histogram"), - 'default': 5 - } - } def query_obj(self): """Returns the query object for this visualization""" @@ -1520,9 +1084,8 @@ class HistogramViz(BaseViz): d['columns'] = [numeric_column] return d - def get_data(self): + def get_data(self, df): """Returns the chart data""" - df = self.get_df() chart_data = df[df.columns[0]].values.tolist() return chart_data @@ -1534,34 +1097,10 @@ class DistributionBarViz(DistributionPieViz): viz_type = "dist_bar" verbose_name = _("Distribution - Bar Chart") is_timeseries = False - fieldsets = ({ - 'label': _('Chart Options'), - 'fields': ( - 'groupby', - 'columns', - 'metrics', - 'row_limit', - ('show_legend', 'show_bar_value', 'bar_stacked'), - ('y_axis_format', 'bottom_margin'), - ('x_axis_label', 'y_axis_label'), - ('reduce_x_ticks', 'contribution'), - ('show_controls', 'order_bars'), - ) - },) - form_overrides = { - 'groupby': { - 'label': _('Series'), - }, - 'columns': { - 'label': _('Breakdowns'), - 'description': _("Defines how each series is broken down"), - }, - } def query_obj(self): d = super(DistributionPieViz, self).query_obj() # noqa fd = self.form_data - d['is_timeseries'] = False gb = fd.get('groupby') or [] cols = fd.get('columns') or [] d['groupby'] = set(gb + cols) @@ -1573,8 +1112,7 @@ class DistributionBarViz(DistributionPieViz): raise Exception("Pick at least one field for [Series]") return d - def get_df(self, query_obj=None): - df = super(DistributionPieViz, self).get_df(query_obj) # noqa + def get_data(self, df): fd = self.form_data row = df.groupby(self.groupby).sum()[self.metrics[0]].copy() @@ -1589,13 +1127,9 @@ class DistributionBarViz(DistributionPieViz): pt = pt.T pt = (pt / pt.sum()).T pt = pt.reindex(row.index) - return pt - - def get_data(self): - df = self.get_df() chart_data = [] for name, ys in df.iteritems(): - if df[name].dtype.kind not in "biufc": + if df[name].dtype.kind not in "biufc" or name in self.groupby: continue if isinstance(name, string_types): series_title = name @@ -1607,7 +1141,7 @@ class DistributionBarViz(DistributionPieViz): d = { "key": series_title, "values": [ - {'x': str(i), 'y': v} + {'x': str(row.index[i]), 'y': v} for i, v in ys.iteritems()] } chart_data.append(d) @@ -1624,36 +1158,8 @@ class SunburstViz(BaseViz): credits = ( 'Kerry Rodden ' '@bl.ocks.org') - fieldsets = ({ - 'label': None, - 'fields': ( - 'groupby', - 'metric', 'secondary_metric', - 'row_limit', - ) - },) - form_overrides = { - 'metric': { - 'label': _('Primary Metric'), - 'description': _( - "The primary metric is used to " - "define the arc segment sizes"), - }, - 'secondary_metric': { - 'label': _('Secondary Metric'), - 'description': _( - "This secondary metric is used to " - "define the color as a ratio against the primary metric. " - "If the two metrics match, color is mapped level groups"), - }, - 'groupby': { - 'label': _('Hierarchy'), - 'description': _("This defines the level of the hierarchy"), - }, - } - def get_data(self): - df = self.get_df() + def get_data(self, df): # if m1 == m2 duplicate the metric column cols = self.form_data.get('groupby') @@ -1683,20 +1189,6 @@ class SankeyViz(BaseViz): verbose_name = _("Sankey") is_timeseries = False credits = 'd3-sankey on npm' - fieldsets = ({ - 'label': None, - 'fields': ( - 'groupby', - 'metric', - 'row_limit', - ) - },) - form_overrides = { - 'groupby': { - 'label': _('Source / Target'), - 'description': _("Choose a source and a target"), - }, - } def query_obj(self): qry = super(SankeyViz, self).query_obj() @@ -1706,8 +1198,7 @@ class SankeyViz(BaseViz): self.form_data['metric']] return qry - def get_data(self): - df = self.get_df() + def get_data(self, df): df.columns = ['source', 'target', 'value'] recs = df.to_dict(orient='records') @@ -1747,26 +1238,6 @@ class DirectedForceViz(BaseViz): verbose_name = _("Directed Force Layout") credits = 'd3noob @bl.ocks.org' is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'groupby', - 'metric', - 'row_limit', - ) - }, { - 'label': _('Force Layout'), - 'fields': ( - 'link_length', - 'charge', - ) - },) - form_overrides = { - 'groupby': { - 'label': _('Source / Target'), - 'description': _("Choose a source and a target"), - }, - } def query_obj(self): qry = super(DirectedForceViz, self).query_obj() @@ -1775,8 +1246,7 @@ class DirectedForceViz(BaseViz): qry['metrics'] = [self.form_data['metric']] return qry - def get_data(self): - df = self.get_df() + def get_data(self, df): df.columns = ['source', 'target', 'value'] return df.to_dict(orient='records') @@ -1789,35 +1259,6 @@ class WorldMapViz(BaseViz): verbose_name = _("World Map") is_timeseries = False credits = 'datamaps on npm' - fieldsets = ({ - 'label': None, - 'fields': ( - 'entity', - 'country_fieldtype', - 'metric', - ) - }, { - 'label': _('Bubbles'), - 'fields': ( - ('show_bubbles', None), - 'secondary_metric', - 'max_bubble_size', - ) - }) - form_overrides = { - 'entity': { - 'label': _('Country Field'), - 'description': _("3 letter code of the country"), - }, - 'metric': { - 'label': _('Metric for color'), - 'description': _("Metric that defines the color of the country"), - }, - 'secondary_metric': { - 'label': _('Bubble size'), - 'description': _("Metric that defines the size of the bubble"), - }, - } def query_obj(self): qry = super(WorldMapViz, self).query_obj() @@ -1826,9 +1267,8 @@ class WorldMapViz(BaseViz): qry['groupby'] = [self.form_data['entity']] return qry - def get_data(self): + def get_data(self, df): from superset.data import countries - df = self.get_df() cols = [self.form_data.get('entity')] metric = self.form_data.get('metric') secondary_metric = self.form_data.get('secondary_metric') @@ -1868,21 +1308,6 @@ class FilterBoxViz(BaseViz): verbose_name = _("Filters") is_timeseries = False credits = 'a Superset original' - fieldsets = ({ - 'label': None, - 'fields': ( - ('date_filter', None), - 'groupby', - 'metric', - ) - },) - form_overrides = { - 'groupby': { - 'label': _('Filter fields'), - 'description': _("The fields you want to filter on"), - 'default': [], - }, - } def query_obj(self): qry = super(FilterBoxViz, self).query_obj() @@ -1893,7 +1318,7 @@ class FilterBoxViz(BaseViz): self.form_data['metric']] return qry - def get_data(self): + def get_data(self, df): qry = self.query_obj() filters = [g for g in self.form_data['groupby']] d = {} @@ -1918,10 +1343,6 @@ class IFrameViz(BaseViz): verbose_name = _("iFrame") credits = 'a Superset original' is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ('url',) - },) class ParallelCoordinatesViz(BaseViz): @@ -1938,16 +1359,6 @@ class ParallelCoordinatesViz(BaseViz): '' 'Syntagmatic\'s library') is_timeseries = False - fieldsets = ({ - 'label': None, - 'fields': ( - 'series', - 'metrics', - 'secondary_metric', - 'limit', - ('show_datatable', 'include_series'), - ) - },) def query_obj(self): d = super(ParallelCoordinatesViz, self).query_obj() @@ -1959,8 +1370,7 @@ class ParallelCoordinatesViz(BaseViz): d['groupby'] = [fd.get('series')] return d - def get_data(self): - df = self.get_df() + def get_data(self, df): return df.to_dict(orient="records") @@ -1974,22 +1384,6 @@ class HeatmapViz(BaseViz): credits = ( 'inspired from mbostock @' 'bl.ocks.org') - fieldsets = ({ - 'label': None, - 'fields': ( - 'all_columns_x', - 'all_columns_y', - 'metric', - ) - }, { - 'label': _('Heatmap Options'), - 'fields': ( - 'linear_color_scheme', - ('xscale_interval', 'yscale_interval'), - 'canvas_image_rendering', - 'normalize_across', - ) - },) def query_obj(self): d = super(HeatmapViz, self).query_obj() @@ -1998,8 +1392,7 @@ class HeatmapViz(BaseViz): d['groupby'] = [fd.get('all_columns_x'), fd.get('all_columns_y')] return d - def get_data(self): - df = self.get_df() + def get_data(self, df): fd = self.form_data x = fd.get('all_columns_x') y = fd.get('all_columns_y') @@ -2041,11 +1434,6 @@ class HorizonViz(NVD3TimeSeriesViz): credits = ( '' 'd3-horizon-chart') - fieldsets = [NVD3TimeSeriesViz.fieldsets[0]] + [{ - 'label': _('Chart Options'), - 'fields': ( - ('series_height', 'horizon_color_scale'), - ), }] class MapboxViz(BaseViz): @@ -2057,70 +1445,6 @@ class MapboxViz(BaseViz): is_timeseries = False credits = ( 'Mapbox GL JS') - fieldsets = ({ - 'label': None, - 'fields': ( - ('all_columns_x', 'all_columns_y'), - 'clustering_radius', - 'row_limit', - 'groupby', - 'render_while_dragging', - ) - }, { - 'label': _('Points'), - 'fields': ( - 'point_radius', - 'point_radius_unit', - ) - }, { - 'label': _('Labelling'), - 'fields': ( - 'mapbox_label', - 'pandas_aggfunc', - ) - }, { - 'label': _('Visual Tweaks'), - 'fields': ( - 'mapbox_style', - 'global_opacity', - 'mapbox_color', - ) - }, { - 'label': _('Viewport'), - 'fields': ( - 'viewport_longitude', - 'viewport_latitude', - 'viewport_zoom', - ) - },) - - form_overrides = { - 'all_columns_x': { - 'label': _('Longitude'), - 'description': _("Column containing longitude data"), - }, - 'all_columns_y': { - 'label': _('Latitude'), - 'description': _("Column containing latitude data"), - }, - 'pandas_aggfunc': { - 'label': _('Cluster label aggregator'), - 'description': _( - "Aggregate function applied to the list of points " - "in each cluster to produce the cluster label."), - }, - 'rich_tooltip': { - 'label': _('Tooltip'), - 'description': _( - "Show a tooltip when hovering over points and clusters " - "describing the label"), - }, - 'groupby': { - 'description': _( - "One or many fields to group by. If grouping, latitude " - "and longitude columns must be present."), - }, - } def query_obj(self): d = super(MapboxViz, self).query_obj() @@ -2159,8 +1483,7 @@ class MapboxViz(BaseViz): "[Longitude] and [Latitude] columns must be present in [Group By]") return d - def get_data(self): - df = self.get_df() + def get_data(self, df): fd = self.form_data label_col = fd.get('mapbox_label') custom_metric = label_col and len(label_col) >= 1 diff --git a/tests/core_tests.py b/tests/core_tests.py index 7df53c567e..2634b9eb0d 100644 --- a/tests/core_tests.py +++ b/tests/core_tests.py @@ -78,14 +78,22 @@ class CoreTests(SupersetTestCase): self.login(username='admin') slc = self.get_slice("Girls", db.session) - resp = self.get_resp(slc.viz.json_endpoint) + json_endpoint = ( + '/superset/explore_json/{}/{}?form_data={}' + .format(slc.datasource_type, slc.datasource_id, json.dumps(slc.viz.form_data)) + ) + resp = self.get_resp(json_endpoint) assert '"Jennifer"' in resp def test_slice_csv_endpoint(self): self.login(username='admin') slc = self.get_slice("Girls", db.session) - resp = self.get_resp(slc.viz.csv_endpoint) + csv_endpoint = ( + '/superset/explore_json/{}/{}?csv=true&form_data={}' + .format(slc.datasource_type, slc.datasource_id, json.dumps(slc.viz.form_data)) + ) + resp = self.get_resp(csv_endpoint) assert 'Jennifer,' in resp def test_admin_only_permissions(self): @@ -122,24 +130,55 @@ class CoreTests(SupersetTestCase): db.session.commit() copy_name = "Test Sankey Save" tbl_id = self.table_ids.get('energy_usage') + new_slice_name = "Test Sankey Overwirte" + url = ( - "/superset/explore/table/{}/?viz_type=sankey&groupby=source&" - "groupby=target&metric=sum__value&row_limit=5000&where=&having=&" - "flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id={}&slice_name={}&" - "collapsed_fieldsets=&action={}&datasource_name=energy_usage&" - "datasource_id=1&datasource_type=table&previous_viz_type=sankey") + "/superset/explore/table/{}/?slice_name={}&" + "action={}&datasource_name=energy_usage&form_data={}") - # Changing name - resp = self.get_resp(url.format(tbl_id, slice_id, copy_name, 'save')) - assert copy_name in resp + form_data = { + 'viz_type': 'sankey', + 'groupby': 'source', + 'groupby': 'target', + 'metric': 'sum__value', + 'row_limit': 5000, + 'slice_id': slice_id, + } + # Changing name and save as a new slice + resp = self.get_resp( + url.format( + tbl_id, + copy_name, + 'saveas', + json.dumps(form_data) + ) + ) + slices = db.session.query(models.Slice) \ + .filter_by(slice_name=copy_name).all() + assert len(slices) == 1 + new_slice_id = slices[0].id - # Setting the name back to its original name - resp = self.get_resp(url.format(tbl_id, slice_id, slice_name, 'save')) - assert slice_name in resp + form_data = { + 'viz_type': 'sankey', + 'groupby': 'source', + 'groupby': 'target', + 'metric': 'sum__value', + 'row_limit': 5000, + 'slice_id': new_slice_id, + } + # Setting the name back to its original name by overwriting new slice + resp = self.get_resp( + url.format( + tbl_id, + new_slice_name, + 'overwrite', + json.dumps(form_data) + ) + ) + slc = db.session.query(models.Slice).filter_by(id=new_slice_id).first() + assert slc.slice_name == new_slice_name + db.session.delete(slc) - # Doing a basic overwrite - assert 'Energy' in self.get_resp( - url.format(tbl_id, slice_id, copy_name, 'overwrite')) def test_filter_endpoint(self): self.login(username='admin') @@ -168,8 +207,6 @@ class CoreTests(SupersetTestCase): for slc in db.session.query(Slc).all(): urls += [ (slc.slice_name, 'slice_url', slc.slice_url), - (slc.slice_name, 'json_endpoint', slc.viz.json_endpoint), - (slc.slice_name, 'csv_endpoint', slc.viz.csv_endpoint), (slc.slice_name, 'slice_id_url', slc.slice_id_url), ] for name, method, url in urls: @@ -544,8 +581,7 @@ class CoreTests(SupersetTestCase): self.login(username='admin') url = ( '/superset/fetch_datasource_metadata?' - 'datasource_type=table&' - 'datasource_id=1' + + 'datasourceKey=1__table' ) resp = self.get_json_resp(url) keys = [ diff --git a/tests/druid_tests.py b/tests/druid_tests.py index 7a1b8da59c..8d026234d2 100644 --- a/tests/druid_tests.py +++ b/tests/druid_tests.py @@ -116,30 +116,44 @@ class DruidTests(SupersetTestCase): resp = self.get_resp('/superset/explore/druid/{}/'.format( datasource_id)) - self.assertIn("[test_cluster].[test_datasource]", resp) - + self.assertIn("test_datasource", resp) + form_data = { + 'viz_type': 'table', + 'granularity': 'one+day', + 'druid_time_origin': '', + 'since': '7+days+ago', + 'until': 'now', + 'row_limit': 5000, + 'include_search': 'false', + 'metrics': ['count'], + 'groupby': ['dim1'], + 'force': 'true', + } # One groupby url = ( - '/superset/explore_json/druid/{}/?viz_type=table&granularity=one+day&' - 'druid_time_origin=&since=7+days+ago&until=now&row_limit=5000&' - 'include_search=false&metrics=count&groupby=dim1&flt_col_0=dim1&' - 'flt_op_0=in&flt_eq_0=&slice_id=&slice_name=&collapsed_fieldsets=&' - 'action=&datasource_name=test_datasource&datasource_id={}&' - 'datasource_type=druid&previous_viz_type=table&' - 'force=true'.format(datasource_id, datasource_id)) + '/superset/explore_json/druid/{}/?form_data={}'.format( + datasource_id, json.dumps(form_data)) + ) resp = self.get_json_resp(url) self.assertEqual("Canada", resp['data']['records'][0]['dim1']) + form_data = { + 'viz_type': 'table', + 'granularity': 'one+day', + 'druid_time_origin': '', + 'since': '7+days+ago', + 'until': 'now', + 'row_limit': 5000, + 'include_search': 'false', + 'metrics': ['count'], + 'groupby': ['dim1', 'dim2d'], + 'force': 'true', + } # two groupby url = ( - '/superset/explore_json/druid/{}/?viz_type=table&granularity=one+day&' - 'druid_time_origin=&since=7+days+ago&until=now&row_limit=5000&' - 'include_search=false&metrics=count&groupby=dim1&' - 'flt_col_0=dim1&groupby=dim2d&' - 'flt_op_0=in&flt_eq_0=&slice_id=&slice_name=&collapsed_fieldsets=&' - 'action=&datasource_name=test_datasource&datasource_id={}&' - 'datasource_type=druid&previous_viz_type=table&' - 'force=true'.format(datasource_id, datasource_id)) + '/superset/explore_json/druid/{}/?form_data={}'.format( + datasource_id, json.dumps(form_data)) + ) resp = self.get_json_resp(url) self.assertEqual("Canada", resp['data']['records'][0]['dim1'])