[explore] show the broken query when failing (#1871)

* Return query when failing

* Linting

* sjson -> simplejson
This commit is contained in:
Maxime Beauchemin 2017-01-05 10:00:39 -08:00 committed by GitHub
parent e3b296c558
commit c14c7edc5e
9 changed files with 132 additions and 116 deletions

View File

@ -129,8 +129,8 @@ export function chartUpdateSucceeded(query) {
}
export const CHART_UPDATE_FAILED = 'CHART_UPDATE_FAILED';
export function chartUpdateFailed(error) {
return { type: CHART_UPDATE_FAILED, error };
export function chartUpdateFailed(error, query) {
return { type: CHART_UPDATE_FAILED, error, query };
}
export const UPDATE_EXPLORE_ENDPOINTS = 'UPDATE_EXPLORE_ENDPOINTS';

View File

@ -142,7 +142,13 @@ class ChartContainer extends React.Component {
},
error(msg) {
props.actions.chartUpdateFailed(msg);
let payload = { error: msg };
try {
payload = JSON.parse(msg);
} catch (e) {
// pass
}
props.actions.chartUpdateFailed(payload.error, payload.query);
},
d3format: (col, number) => {

View File

@ -108,16 +108,14 @@ export const exploreReducer = function (state, action) {
);
},
[actions.CHART_UPDATE_SUCCEEDED]() {
const vizUpdates = {
query: action.query,
};
return Object.assign(
{},
state,
{
chartStatus: 'success',
viz: Object.assign({}, state.viz, vizUpdates),
});
viz: Object.assign({}, state.viz, { query: action.query }),
}
);
},
[actions.CHART_UPDATE_STARTED]() {
const chartUpdateStartTime = now();
@ -138,9 +136,12 @@ export const exploreReducer = function (state, action) {
});
},
[actions.CHART_UPDATE_FAILED]() {
const chartUpdateEndTime = now();
return Object.assign({}, state,
{ chartStatus: 'failed', chartAlert: action.error, chartUpdateEndTime });
return Object.assign({}, state, {
chartStatus: 'failed',
chartAlert: action.error,
chartUpdateEndTime: now(),
viz: Object.assign({}, state.viz, { query: action.query }),
});
},
[actions.UPDATE_CHART_STATUS]() {
const newState = Object.assign({}, state, { chartStatus: action.status });

View File

@ -68,11 +68,6 @@ const px = function () {
$('#timer').text(num.toFixed(2) + ' sec');
};
let qrystr = '';
const always = function () {
// Private f, runs after done and error
clearInterval(timer);
$('#timer').removeClass('btn-warning');
};
slice = {
data,
container,
@ -123,6 +118,13 @@ const px = function () {
return utils.d3format(format, number);
},
/* eslint no-shadow: 0 */
always(data) {
clearInterval(timer);
$('#timer').removeClass('btn-warning');
if (data && data.query) {
slice.viewSqlQuery = data.query;
}
},
done(payload) {
Object.assign(data, payload);
@ -131,14 +133,10 @@ const px = function () {
container.fadeTo(0.5, 1);
container.show();
if (data !== undefined) {
slice.viewSqlQuery = data.query;
}
$('#timer').removeClass('label-warning label-danger');
$('#timer').addClass('label-success');
$('#timer').removeClass('label-warning label-danger');
$('.query-and-save button').removeAttr('disabled');
always(data);
this.always(data);
controller.done(this);
},
getErrorMsg(xhr) {
@ -161,8 +159,9 @@ const px = function () {
token.find('img.loading').hide();
container.fadeTo(0.5, 1);
let errHtml = '';
let o;
try {
const o = JSON.parse(msg);
o = JSON.parse(msg);
if (o.error) {
errorMsg = o.error;
}
@ -181,7 +180,7 @@ const px = function () {
$('span.query').removeClass('disabled');
$('#timer').addClass('btn-danger');
$('.query-and-save button').removeAttr('disabled');
always(data);
this.always(o);
controller.error(this);
},
clearError() {

View File

@ -11,7 +11,6 @@ import logging
import pickle
import re
import textwrap
from collections import namedtuple
from copy import deepcopy, copy
from datetime import timedelta, datetime, date
@ -59,13 +58,30 @@ from superset.viz import viz_types
from superset.jinja_context import get_template_processor
from superset.utils import (
flasher, MetricPermException, DimSelector, wrap_clause_in_parens,
DTTM_ALIAS,
DTTM_ALIAS, QueryStatus,
)
config = app.config
QueryResult = namedtuple('namedtuple', ['df', 'query', 'duration'])
class QueryResult(object):
"""Object returned by the query interface"""
def __init__( # noqa
self,
df,
query,
duration,
status=QueryStatus.SUCCESS,
error_message=None):
self.df = df
self.query = query
self.duration = duration
self.status = status
self.error_message = error_message
FillterPattern = re.compile(r'''((?:[^,"']|"[^"]*"|'[^']*')+)''')
@ -1195,13 +1211,22 @@ class SqlaTable(Model, Queryable, AuditMixinNullable, ImportMixin):
qry.compile(
engine, compile_kwargs={"literal_binds": True},),
)
df = pd.read_sql_query(
sql=sql,
con=engine
)
sql = sqlparse.format(sql, reindent=True)
status = QueryStatus.SUCCESS
error_message = None
df = None
try:
df = pd.read_sql_query(sql, con=engine)
except Exception as e:
status = QueryStatus.FAILED
error_message = str(e)
return QueryResult(
df=df, duration=datetime.now() - qry_start_dttm, query=sql)
status=status,
df=df,
duration=datetime.now() - qry_start_dttm,
query=sql,
error_message=error_message)
def get_sqla_table_object(self):
return self.database.get_table(self.table_name, schema=self.schema)
@ -2548,16 +2573,6 @@ class FavStar(Model):
dttm = Column(DateTime, default=datetime.utcnow)
class QueryStatus:
CANCELLED = 'cancelled'
FAILED = 'failed'
PENDING = 'pending'
RUNNING = 'running'
SCHEDULED = 'scheduled'
SUCCESS = 'success'
TIMED_OUT = 'timed_out'
class Query(Model):
"""ORM model for SQL query"""

View File

@ -405,3 +405,16 @@ def pessimistic_connection_handling(target):
except:
raise exc.DisconnectionError()
cursor.close()
class QueryStatus:
"""Enum-type class for query statuses"""
CANCELLED = 'cancelled'
FAILED = 'failed'
PENDING = 'pending'
RUNNING = 'running'
SCHEDULED = 'scheduled'
SUCCESS = 'success'
TIMED_OUT = 'timed_out'

View File

@ -5,6 +5,7 @@ from __future__ import unicode_literals
from datetime import datetime, timedelta
import json
import simplejson
import logging
import pickle
import re
@ -1390,16 +1391,22 @@ class Superset(BaseSupersetView):
status=404,
mimetype="application/json")
payload = ""
payload = {}
status = 200
try:
payload = viz_obj.get_json()
payload = viz_obj.get_payload()
except Exception as e:
logging.exception(e)
status = 500
return json_error_response(utils.error_msg_from_exception(e))
if payload.get('status') == QueryStatus.FAILED:
status = 500
return Response(
payload,
status=200,
simplejson.dumps(
payload, default=utils.json_int_dttm_ser, ignore_nan=True),
status=status,
mimetype="application/json")
@expose("/import_dashboards", methods=['GET', 'POST'])
@ -2261,13 +2268,13 @@ class Superset(BaseSupersetView):
db.session.commit()
params = {
'viz_type': viz_type,
'groupby': dims[0].column_name if dims else '',
'metrics': metrics[0].metric_name if metrics else '',
'metric': metrics[0].metric_name if metrics else '',
'groupby': dims[0].column_name if dims else None,
'metrics': metrics[0].metric_name if metrics else None,
'metric': metrics[0].metric_name if metrics else None,
'since': '100 years ago',
'limit': '0',
}
params = "&".join([k + '=' + v for k, v in params.items()])
params = "&".join([k + '=' + v for k, v in params.items() if v])
url = '/superset/explore/table/{table.id}/?{params}'.format(**locals())
return redirect(url)

View File

@ -90,6 +90,9 @@ class BaseViz(object):
self.groupby = self.form_data.get('groupby') or []
self.reassignments()
self.status = None
self.error_message = None
@classmethod
def flat_form_fields(cls):
l = set()
@ -195,7 +198,10 @@ class BaseViz(object):
# The datasource here can be different backend but the interface is common
self.results = self.datasource.query(**query_obj)
self.status = self.results.status
self.error_message = self.results.error_message
self.query = self.results.query
df = self.results.df
# Transform the timestamp we received from database to pandas supported
# datetime format. If no python_date_format is specified, the pattern will
@ -203,7 +209,9 @@ class BaseViz(object):
# If the datetime format is unix, the parse will use the corresponding
# parsing logic.
if df is None or df.empty:
raise utils.NoDataException("No data.")
self.status = utils.QueryStatus.FAILED
self.error_message = "No data."
return pd.DataFrame()
else:
if DTTM_ALIAS in df.columns:
if timestamp_format in ("epoch_s", "epoch_ms"):
@ -213,8 +221,8 @@ class BaseViz(object):
df[DTTM_ALIAS], utc=False, format=timestamp_format)
if self.datasource.offset:
df[DTTM_ALIAS] += timedelta(hours=self.datasource.offset)
df.replace([np.inf, -np.inf], np.nan)
df = df.fillna(0)
df.replace([np.inf, -np.inf], np.nan)
df = df.fillna(0)
return df
@property
@ -321,6 +329,11 @@ class BaseViz(object):
return config.get("CACHE_DEFAULT_TIMEOUT")
def get_json(self, force=False):
return json.dumps(
self.get_payload(force),
default=utils.json_int_dttm_ser, ignore_nan=True)
def get_payload(self, force=False):
"""Handles caching around the json payload retrieval"""
cache_key = self.cache_key
payload = None
@ -344,18 +357,24 @@ class BaseViz(object):
if not payload:
is_cached = False
cache_timeout = self.cache_timeout
try:
data = self.get_data()
except Exception as e:
data = None
payload = {
'cache_timeout': cache_timeout,
'cache_key': cache_key,
'cache_timeout': cache_timeout,
'column_formats': self.data['column_formats'],
'csv_endpoint': self.csv_endpoint,
'data': self.get_data(),
'data': data,
'error': self.error_message,
'filter_endpoint': self.filter_endpoint,
'form_data': self.form_data,
'json_endpoint': self.json_endpoint,
'query': self.query,
'filter_endpoint': self.filter_endpoint,
'standalone_endpoint': self.standalone_endpoint,
'column_formats': self.data['column_formats'],
'status': self.status,
}
payload['cached_dttm'] = datetime.now().isoformat().split('.')[0]
logging.info("Caching for the next {} seconds".format(
@ -375,11 +394,7 @@ class BaseViz(object):
logging.exception(e)
cache.delete(cache_key)
payload['is_cached'] = is_cached
return self.json_dumps(payload)
def json_dumps(self, obj):
"""Used by get_json, can be overridden to use specific switches"""
return json.dumps(obj, default=utils.json_int_dttm_ser, ignore_nan=True)
return payload
@property
def data(self):
@ -508,24 +523,18 @@ class TableViz(BaseViz):
d['orderby'] = [json.loads(t) for t in order_by_cols]
return d
def get_df(self, query_obj=None):
df = super(TableViz, self).get_df(query_obj)
def get_data(self):
df = self.get_df()
if (
self.form_data.get("granularity") == "all" and
DTTM_ALIAS in df):
del df[DTTM_ALIAS]
return df
def get_data(self):
df = self.get_df()
return dict(
records=df.to_dict(orient="records"),
columns=list(df.columns),
)
def json_dumps(self, obj):
return json.dumps(obj, default=utils.json_iso_dttm_ser)
class PivotTableViz(BaseViz):
@ -566,8 +575,8 @@ class PivotTableViz(BaseViz):
d['groupby'] = list(set(groupby) | set(columns))
return d
def get_df(self, query_obj=None):
df = super(PivotTableViz, self).get_df(query_obj)
def get_data(self):
df = self.get_df()
if (
self.form_data.get("granularity") == "all" and
DTTM_ALIAS in df):
@ -579,10 +588,7 @@ class PivotTableViz(BaseViz):
aggfunc=self.form_data.get('pandas_aggfunc'),
margins=True,
)
return df
def get_data(self):
return self.get_df().to_html(
return df.to_html(
na_rep='',
classes=(
"dataframe table table-striped table-bordered "
@ -601,16 +607,12 @@ class MarkupViz(BaseViz):
},)
is_timeseries = False
def rendered(self):
def get_data(self):
markup_type = self.form_data.get("markup_type")
code = self.form_data.get("code", '')
if markup_type == "markdown":
return markdown(code)
elif markup_type == "html":
return code
def get_data(self):
return dict(html=self.rendered())
code = markdown(code)
return dict(html=code)
class SeparatorViz(MarkupViz):
@ -690,11 +692,6 @@ class TreemapViz(BaseViz):
)
},)
def get_df(self, query_obj=None):
df = super(TreemapViz, self).get_df(query_obj)
df = df.set_index(self.form_data.get("groupby"))
return df
def _nest(self, metric, df):
nlevels = df.index.nlevels
if nlevels == 1:
@ -707,6 +704,7 @@ class TreemapViz(BaseViz):
def get_data(self):
df = self.get_df()
df = df.set_index(self.form_data.get("groupby"))
chart_data = [{"name": metric, "children": self._nest(metric, df)}
for metric in df.columns]
return chart_data
@ -730,10 +728,6 @@ class CalHeatmapViz(BaseViz):
),
},)
def get_df(self, query_obj=None):
df = super(CalHeatmapViz, self).get_df(query_obj)
return df
def get_data(self):
df = self.get_df()
form_data = self.form_data
@ -1406,22 +1400,6 @@ class HistogramViz(BaseViz):
d['columns'] = [numeric_column]
return d
def get_df(self, query_obj=None):
"""Returns a pandas dataframe based on the query object"""
if not query_obj:
query_obj = self.query_obj()
self.results = self.datasource.query(**query_obj)
self.query = self.results.query
df = self.results.df
if df is None or df.empty:
raise Exception("No data, to build histogram")
df.replace([np.inf, -np.inf], np.nan)
df = df.fillna(0)
return df
def get_data(self):
"""Returns the chart data"""
df = self.get_df()
@ -1554,10 +1532,6 @@ class SunburstViz(BaseViz):
},
}
def get_df(self, query_obj=None):
df = super(SunburstViz, self).get_df(query_obj)
return df
def get_data(self):
df = self.get_df()

View File

@ -229,10 +229,11 @@ class CoreTests(SupersetTestCase):
self.assertEqual(sqlalchemy_uri_decrypted, database.sqlalchemy_uri_decrypted)
def test_warm_up_cache(self):
slice = db.session.query(models.Slice).first()
slc = self.get_slice("Girls", db.session)
data = self.get_json_resp(
'/superset/warm_up_cache?slice_id={}'.format(slice.id))
assert data == [{'slice_id': slice.id, 'slice_name': slice.slice_name}]
'/superset/warm_up_cache?slice_id={}'.format(slc.id))
assert data == [{'slice_id': slc.id, 'slice_name': slc.slice_name}]
data = self.get_json_resp(
'/superset/warm_up_cache?table_name=energy_usage&db_name=main')