Minor improvments

This commit is contained in:
Maxime 2015-09-03 22:23:44 +00:00
parent e1e298044e
commit d419279d70
3 changed files with 38 additions and 17 deletions

View File

@ -265,8 +265,9 @@ class Table(Model, Queryable, AuditMixin):
try:
table = self.database.get_table(self.table_name)
except Exception as e:
flash(str(e))
flash(
"Table doesn't see to exist in the specified database, "
"Table doesn't seem to exist in the specified database, "
"couldn't fetch column information", "danger")
return
@ -275,6 +276,10 @@ class Table(Model, Queryable, AuditMixin):
metrics = []
any_date_col = None
for col in table.columns:
try:
datatype = str(col.type)
except Exception as e:
datatype = "UNKNOWN"
dbcol = (
db.session
.query(TC)
@ -285,15 +290,16 @@ class Table(Model, Queryable, AuditMixin):
db.session.flush()
if not dbcol:
dbcol = TableColumn(column_name=col.name)
if (
str(col.type).startswith('VARCHAR') or
str(col.type).startswith('STRING')):
str(datatype).startswith('VARCHAR') or
str(datatype).startswith('STRING')):
dbcol.groupby = True
dbcol.filterable = True
db.session.merge(self)
self.columns.append(dbcol)
if not any_date_col and 'date' in str(col.type).lower():
if not any_date_col and 'date' in datatype.lower():
any_date_col = dbcol
if dbcol.sum:
@ -324,7 +330,7 @@ class Table(Model, Queryable, AuditMixin):
metric_type='count_distinct',
expression="COUNT(DISTINCT {})".format(dbcol.column_name)
))
dbcol.type = str(col.type)
dbcol.type = datatype
db.session.merge(self)
db.session.commit()
@ -509,13 +515,14 @@ class Datasource(Model, AuditMixin, Queryable):
#session.commit()
def query(
self, groupby, metrics,
granularity,
from_dttm, to_dttm,
limit_spec=None,
filter=None,
is_timeseries=True,
timeseries_limit=15, row_limit=None):
self, groupby, metrics,
granularity,
from_dttm, to_dttm,
limit_spec=None,
filter=None,
is_timeseries=True,
timeseries_limit=None,
row_limit=None):
qry_start_dttm = datetime.now()
query_str = ""
@ -565,7 +572,7 @@ class Datasource(Model, AuditMixin, Queryable):
client = self.cluster.get_pydruid_client()
orig_filters = filters
if timeseries_limit:
if timeseries_limit and is_timeseries:
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
@ -605,7 +612,15 @@ class Datasource(Model, AuditMixin, Queryable):
Filter.build_filter(ff),
Filter.build_filter(orig_filters)])
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": metrics[0] if metrics else self.metrics[0],
"direction": "descending",
}],
}
client.groupby(**qry)
query_str += json.dumps(client.query_dict, indent=2)
df = client.export_pandas()

View File

@ -93,7 +93,7 @@ form input.form-control {
<h3>{{ viz.verbose_name }}
{% if results %}
<span class="label label-success">
{{ "{0:0.2f}".format(results.duration.total_seconds()) }} s
{{ "{0:0.4f}".format(results.duration.total_seconds()) }} s
</span>
<span class="label label-info btn"
data-toggle="modal" data-target="#query_modal">query</span>

View File

@ -120,8 +120,7 @@ class BaseViz(object):
if granularity != "all":
granularity = utils.parse_human_timedelta(
granularity).total_seconds() * 1000
limit = int(
args.get("limit", config.ROW_LIMIT))
limit = int(args.get("limit", 0))
row_limit = int(
args.get("row_limit", config.ROW_LIMIT))
since = args.get("since", "1 year ago")
@ -137,6 +136,7 @@ class BaseViz(object):
'granularity': granularity,
'from_dttm': from_dttm,
'to_dttm': to_dttm,
'is_timeseries': True,
'groupby': groupby,
'metrics': metrics,
'row_limit': row_limit,
@ -161,6 +161,12 @@ class TableViz(BaseViz):
verbose_name = "Table View"
template = 'panoramix/viz_table.html'
def query_obj(self):
d = super(TableViz, self).query_obj()
d['is_timeseries'] = False
d['timeseries_limit'] = None
return d
def render(self):
if self.error_msg:
return super(TableViz, self).render(error_msg=self.error_msg)