2016-03-29 00:55:58 -04:00
|
|
|
"""Utility functions used across Caravel"""
|
2016-04-07 11:39:08 -04:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
|
|
|
from __future__ import print_function
|
|
|
|
from __future__ import unicode_literals
|
2016-03-18 02:44:58 -04:00
|
|
|
|
2016-06-09 21:05:58 -04:00
|
|
|
from datetime import datetime
|
2016-06-10 00:12:44 -04:00
|
|
|
import decimal
|
2016-03-18 02:44:58 -04:00
|
|
|
import functools
|
|
|
|
import json
|
|
|
|
import logging
|
2016-05-20 14:10:29 -04:00
|
|
|
import numpy
|
2016-06-09 21:05:58 -04:00
|
|
|
import time
|
2016-03-18 02:44:58 -04:00
|
|
|
|
|
|
|
import parsedatetime
|
2016-06-03 12:47:51 -04:00
|
|
|
import sqlalchemy as sa
|
2016-04-08 02:01:40 -04:00
|
|
|
from dateutil.parser import parse
|
2016-04-20 20:36:37 -04:00
|
|
|
from flask import flash, Markup
|
2016-03-18 02:44:58 -04:00
|
|
|
from flask_appbuilder.security.sqla import models as ab_models
|
2016-04-08 02:01:40 -04:00
|
|
|
from markdown import markdown as md
|
|
|
|
from sqlalchemy.types import TypeDecorator, TEXT
|
2016-03-18 02:44:58 -04:00
|
|
|
|
|
|
|
|
2016-06-02 22:17:34 -04:00
|
|
|
class CaravelException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class CaravelSecurityException(CaravelException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-06-10 18:49:33 -04:00
|
|
|
class MetricPermException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-04-20 20:36:37 -04:00
|
|
|
def flasher(msg, severity=None):
|
|
|
|
"""Flask's flash if available, logging call if not"""
|
|
|
|
try:
|
|
|
|
flash(msg, severity)
|
|
|
|
except RuntimeError:
|
|
|
|
if severity == 'danger':
|
|
|
|
logging.error(msg)
|
|
|
|
else:
|
|
|
|
logging.info(msg)
|
|
|
|
|
|
|
|
|
2016-03-16 23:25:41 -04:00
|
|
|
class memoized(object): # noqa
|
|
|
|
|
|
|
|
"""Decorator that caches a function's return value each time it is called
|
2016-03-18 02:44:58 -04:00
|
|
|
|
|
|
|
If called later with the same arguments, the cached value is returned, and
|
|
|
|
not re-evaluated.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, func):
|
|
|
|
self.func = func
|
|
|
|
self.cache = {}
|
2016-03-16 23:25:41 -04:00
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def __call__(self, *args):
|
|
|
|
try:
|
|
|
|
return self.cache[args]
|
|
|
|
except KeyError:
|
|
|
|
value = self.func(*args)
|
|
|
|
self.cache[args] = value
|
|
|
|
return value
|
|
|
|
except TypeError:
|
|
|
|
# uncachable -- for instance, passing a list as an argument.
|
|
|
|
# Better to not cache than to blow up entirely.
|
|
|
|
return self.func(*args)
|
2016-03-16 23:25:41 -04:00
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def __repr__(self):
|
|
|
|
"""Return the function's docstring."""
|
|
|
|
return self.func.__doc__
|
2016-03-16 23:25:41 -04:00
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def __get__(self, obj, objtype):
|
|
|
|
"""Support instance methods."""
|
|
|
|
return functools.partial(self.__call__, obj)
|
|
|
|
|
2016-03-16 23:25:41 -04:00
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def list_minus(l, minus):
|
|
|
|
"""Returns l without what is in minus
|
|
|
|
|
|
|
|
>>> list_minus([1, 2, 3], [2])
|
|
|
|
[1, 3]
|
|
|
|
"""
|
|
|
|
return [o for o in l if o not in minus]
|
|
|
|
|
2016-03-16 23:25:41 -04:00
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def parse_human_datetime(s):
|
|
|
|
"""
|
|
|
|
Returns ``datetime.datetime`` from human readable strings
|
|
|
|
|
|
|
|
>>> from datetime import date, timedelta
|
|
|
|
>>> from dateutil.relativedelta import relativedelta
|
|
|
|
>>> parse_human_datetime('2015-04-03')
|
|
|
|
datetime.datetime(2015, 4, 3, 0, 0)
|
|
|
|
>>> parse_human_datetime('2/3/1969')
|
|
|
|
datetime.datetime(1969, 2, 3, 0, 0)
|
|
|
|
>>> parse_human_datetime("now") <= datetime.now()
|
|
|
|
True
|
|
|
|
>>> parse_human_datetime("yesterday") <= datetime.now()
|
|
|
|
True
|
|
|
|
>>> date.today() - timedelta(1) == parse_human_datetime('yesterday').date()
|
|
|
|
True
|
|
|
|
>>> year_ago_1 = parse_human_datetime('one year ago').date()
|
|
|
|
>>> year_ago_2 = (datetime.now() - relativedelta(years=1) ).date()
|
|
|
|
>>> year_ago_1 == year_ago_2
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
dttm = parse(s)
|
|
|
|
except Exception:
|
|
|
|
try:
|
|
|
|
cal = parsedatetime.Calendar()
|
|
|
|
dttm = dttm_from_timtuple(cal.parse(s)[0])
|
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
|
|
|
raise ValueError("Couldn't parse date string [{}]".format(s))
|
|
|
|
return dttm
|
|
|
|
|
|
|
|
|
|
|
|
def dttm_from_timtuple(d):
|
|
|
|
return datetime(
|
|
|
|
d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)
|
|
|
|
|
|
|
|
|
|
|
|
def merge_perm(sm, permission_name, view_menu_name):
|
|
|
|
pv = sm.find_permission_view_menu(permission_name, view_menu_name)
|
|
|
|
if not pv:
|
|
|
|
sm.add_permission_view_menu(permission_name, view_menu_name)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_human_timedelta(s):
|
|
|
|
"""
|
|
|
|
Returns ``datetime.datetime`` from natural language time deltas
|
|
|
|
|
|
|
|
>>> parse_human_datetime("now") <= datetime.now()
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
cal = parsedatetime.Calendar()
|
|
|
|
dttm = dttm_from_timtuple(datetime.now().timetuple())
|
|
|
|
d = cal.parse(s, dttm)[0]
|
|
|
|
d = datetime(
|
|
|
|
d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)
|
|
|
|
return d - dttm
|
|
|
|
|
|
|
|
|
|
|
|
class JSONEncodedDict(TypeDecorator):
|
|
|
|
|
|
|
|
"""Represents an immutable structure as a json-encoded string."""
|
|
|
|
|
|
|
|
impl = TEXT
|
2016-03-16 23:25:41 -04:00
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def process_bind_param(self, value, dialect):
|
|
|
|
if value is not None:
|
|
|
|
value = json.dumps(value)
|
|
|
|
|
|
|
|
return value
|
|
|
|
|
|
|
|
def process_result_value(self, value, dialect):
|
|
|
|
if value is not None:
|
|
|
|
value = json.loads(value)
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2016-03-29 00:55:58 -04:00
|
|
|
def init(caravel):
|
|
|
|
"""Inits the Caravel application with security roles and such"""
|
|
|
|
db = caravel.db
|
|
|
|
models = caravel.models
|
|
|
|
sm = caravel.appbuilder.sm
|
2016-03-18 02:44:58 -04:00
|
|
|
alpha = sm.add_role("Alpha")
|
|
|
|
admin = sm.add_role("Admin")
|
2016-05-17 00:24:43 -04:00
|
|
|
config = caravel.app.config
|
2016-03-18 02:44:58 -04:00
|
|
|
|
|
|
|
merge_perm(sm, 'all_datasource_access', 'all_datasource_access')
|
|
|
|
|
|
|
|
perms = db.session.query(ab_models.PermissionView).all()
|
|
|
|
for perm in perms:
|
|
|
|
if perm.permission.name == 'datasource_access':
|
|
|
|
continue
|
2016-03-24 17:11:29 -04:00
|
|
|
if perm.view_menu and perm.view_menu.name not in (
|
2016-03-18 02:44:58 -04:00
|
|
|
'UserDBModelView', 'RoleModelView', 'ResetPasswordView',
|
|
|
|
'Security'):
|
|
|
|
sm.add_permission_role(alpha, perm)
|
|
|
|
sm.add_permission_role(admin, perm)
|
|
|
|
gamma = sm.add_role("Gamma")
|
2016-05-17 00:24:43 -04:00
|
|
|
public_role = sm.find_role("Public")
|
|
|
|
public_role_like_gamma = \
|
|
|
|
public_role and config.get('PUBLIC_ROLE_LIKE_GAMMA', False)
|
2016-03-18 02:44:58 -04:00
|
|
|
for perm in perms:
|
2016-06-09 21:05:58 -04:00
|
|
|
if (
|
|
|
|
perm.view_menu and perm.view_menu.name not in (
|
|
|
|
'ResetPasswordView',
|
|
|
|
'RoleModelView',
|
|
|
|
'UserDBModelView',
|
|
|
|
'Security') and
|
|
|
|
perm.permission.name not in (
|
|
|
|
'all_datasource_access',
|
|
|
|
'can_add',
|
|
|
|
'can_download',
|
|
|
|
'can_delete',
|
|
|
|
'can_edit',
|
|
|
|
'can_save',
|
|
|
|
'datasource_access',
|
|
|
|
'muldelete',
|
|
|
|
)):
|
2016-03-18 02:44:58 -04:00
|
|
|
sm.add_permission_role(gamma, perm)
|
2016-05-17 00:24:43 -04:00
|
|
|
if public_role_like_gamma:
|
|
|
|
sm.add_permission_role(public_role, perm)
|
2016-03-18 02:44:58 -04:00
|
|
|
session = db.session()
|
|
|
|
table_perms = [
|
2016-03-16 23:25:41 -04:00
|
|
|
table.perm for table in session.query(models.SqlaTable).all()]
|
2016-03-18 02:44:58 -04:00
|
|
|
table_perms += [
|
2016-03-16 23:25:41 -04:00
|
|
|
table.perm for table in session.query(models.DruidDatasource).all()]
|
2016-03-18 02:44:58 -04:00
|
|
|
for table_perm in table_perms:
|
|
|
|
merge_perm(sm, 'datasource_access', table_perm)
|
|
|
|
|
2016-06-10 18:49:33 -04:00
|
|
|
init_metrics_perm(caravel)
|
|
|
|
|
|
|
|
|
|
|
|
def init_metrics_perm(caravel, metrics=None):
|
|
|
|
db = caravel.db
|
|
|
|
models = caravel.models
|
|
|
|
sm = caravel.appbuilder.sm
|
|
|
|
|
|
|
|
if metrics is None:
|
|
|
|
metrics = []
|
|
|
|
for model in [models.SqlMetric, models.DruidMetric]:
|
|
|
|
metrics += list(db.session.query(model).all())
|
|
|
|
|
2016-06-16 11:55:11 -04:00
|
|
|
metric_perms = filter(None, [metric.perm for metric in metrics])
|
2016-06-10 18:49:33 -04:00
|
|
|
for metric_perm in metric_perms:
|
|
|
|
merge_perm(sm, 'metric_access', metric_perm)
|
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
|
|
|
|
def datetime_f(dttm):
|
|
|
|
"""Formats datetime to take less room when it is recent"""
|
|
|
|
if dttm:
|
|
|
|
dttm = dttm.isoformat()
|
|
|
|
now_iso = datetime.now().isoformat()
|
|
|
|
if now_iso[:10] == dttm[:10]:
|
|
|
|
dttm = dttm[11:]
|
|
|
|
elif now_iso[:4] == dttm[:4]:
|
|
|
|
dttm = dttm[5:]
|
|
|
|
return "<nobr>{}</nobr>".format(dttm)
|
|
|
|
|
|
|
|
|
2016-06-09 21:05:58 -04:00
|
|
|
def base_json_conv(obj):
|
|
|
|
|
|
|
|
if isinstance(obj, numpy.int64):
|
|
|
|
return int(obj)
|
|
|
|
elif isinstance(obj, set):
|
|
|
|
return list(obj)
|
2016-06-10 00:12:44 -04:00
|
|
|
elif isinstance(obj, decimal.Decimal):
|
|
|
|
return float(obj)
|
2016-06-09 21:05:58 -04:00
|
|
|
|
|
|
|
|
2016-03-18 02:44:58 -04:00
|
|
|
def json_iso_dttm_ser(obj):
|
|
|
|
"""
|
|
|
|
json serializer that deals with dates
|
|
|
|
|
|
|
|
>>> dttm = datetime(1970, 1, 1)
|
|
|
|
>>> json.dumps({'dttm': dttm}, default=json_iso_dttm_ser)
|
|
|
|
'{"dttm": "1970-01-01T00:00:00"}'
|
|
|
|
"""
|
2016-06-09 21:05:58 -04:00
|
|
|
val = base_json_conv(obj)
|
|
|
|
if val is not None:
|
|
|
|
return val
|
2016-03-18 02:44:58 -04:00
|
|
|
if isinstance(obj, datetime):
|
|
|
|
obj = obj.isoformat()
|
2016-06-09 21:05:58 -04:00
|
|
|
else:
|
|
|
|
raise TypeError(
|
|
|
|
"Unserializable object {} of type {}".format(obj, type(obj))
|
|
|
|
)
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
|
|
|
def json_int_dttm_ser(obj):
|
|
|
|
"""json serializer that deals with dates"""
|
|
|
|
val = base_json_conv(obj)
|
|
|
|
if val is not None:
|
|
|
|
return val
|
|
|
|
if isinstance(obj, datetime):
|
|
|
|
obj = int(time.mktime(obj.timetuple())) * 1000
|
2016-05-20 14:10:29 -04:00
|
|
|
else:
|
|
|
|
raise TypeError(
|
|
|
|
"Unserializable object {} of type {}".format(obj, type(obj))
|
|
|
|
)
|
2016-03-18 02:44:58 -04:00
|
|
|
return obj
|
|
|
|
|
|
|
|
|
2016-04-04 19:13:08 -04:00
|
|
|
def markdown(s, markup_wrap=False):
|
2016-03-18 02:44:58 -04:00
|
|
|
s = s or ''
|
2016-04-04 19:13:08 -04:00
|
|
|
s = md(s, [
|
2016-03-18 02:44:58 -04:00
|
|
|
'markdown.extensions.tables',
|
|
|
|
'markdown.extensions.fenced_code',
|
2016-03-16 23:25:41 -04:00
|
|
|
'markdown.extensions.codehilite',
|
|
|
|
])
|
2016-04-04 19:13:08 -04:00
|
|
|
if markup_wrap:
|
|
|
|
s = Markup(s)
|
|
|
|
return s
|
2016-03-18 02:44:58 -04:00
|
|
|
|
|
|
|
|
|
|
|
def readfile(filepath):
|
|
|
|
with open(filepath) as f:
|
|
|
|
content = f.read()
|
|
|
|
return content
|
2016-06-03 12:47:51 -04:00
|
|
|
|
|
|
|
|
2016-06-09 21:05:58 -04:00
|
|
|
def generic_find_constraint_name(table, columns, referenced, db):
|
|
|
|
"""Utility to find a constraint name in alembic migrations"""
|
|
|
|
t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine)
|
2016-06-03 12:47:51 -04:00
|
|
|
|
|
|
|
for fk in t.foreign_key_constraints:
|
2016-06-09 21:05:58 -04:00
|
|
|
if (
|
|
|
|
fk.referred_table.name == referenced and
|
|
|
|
set(fk.column_keys) == columns):
|
2016-06-03 12:47:51 -04:00
|
|
|
return fk.name
|