TOX / Travis build matrix (#593)

* Building on many dbs

* Fixing some translation related error msg
This commit is contained in:
Maxime Beauchemin 2016-06-09 18:05:58 -07:00 committed by GitHub
parent dd662eaca3
commit 60ed3e4050
16 changed files with 298 additions and 172 deletions

View File

@ -1,23 +1,30 @@
language: python
python:
- "2.7"
- "3.4"
- "3.5"
addons:
apt:
sources:
- deadsnakes
packages:
- python3.5
cache:
directories:
- $HOME/.wheelhouse/
env:
global:
- TRAVIS_CACHE=$HOME/.travis_cache/
matrix:
#- TOX_ENV=py27-mysql
- TOX_ENV=py27-sqlite
#- TOX_ENV=py27-postgres
- TOX_ENV=py34-sqlite
- TOX_ENV=py34-mysql
- TOX_ENV=javascript
before_install:
- npm install -g npm@'>=2.7.1'
before_script:
- mysql -e 'drop database if exists caravel; create database caravel DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci' -u root
- psql -c 'create database caravel;' -U postgres
- export PATH=${PATH}:/tmp/hive/bin
install:
- pip wheel -w $HOME/.wheelhouse -f $HOME/.wheelhouse .
- pip install --find-links=$HOME/.wheelhouse --no-index .
- pip install -r dev-reqs.txt
- cd caravel/assets
- npm --version
- npm install
- npm run lint
- npm run prod
- cd $TRAVIS_BUILD_DIR
script: bash run_tests.sh
after_success:
- coveralls
- pip install --upgrade pip
- pip install tox tox-travis
script: tox -e $TOX_ENV

6
caravel/assets/js_build.sh Executable file
View File

@ -0,0 +1,6 @@
#!/bin/bash
cd "$(dirname "$0")"
npm --version
npm install
npm run lint
npm run prod

View File

@ -13,11 +13,15 @@ function nvd3Vis(slice) {
var colorKey = 'key';
var render = function () {
$.getJSON(slice.jsonEndpoint(), function (payload) {
d3.json(slice.jsonEndpoint(), function (error, payload) {
slice.container.html('');
if (error) {
slice.error(error.responseText);
return '';
}
var fd = payload.form_data;
var viz_type = fd.viz_type;
var f = d3.format('.3s');
slice.container.html('');
nv.addGraph(function () {
switch (viz_type) {
@ -25,8 +29,7 @@ function nvd3Vis(slice) {
if (fd.show_brush) {
chart = nv.models.lineWithFocusChart();
chart.lines2.xScale(d3.time.scale.utc());
chart
.x2Axis
chart.x2Axis
.showMaxMin(fd.x_axis_showminmax)
.staggerLabels(false);
} else {
@ -133,103 +136,100 @@ function nvd3Vis(slice) {
default:
throw new Error("Unrecognized visualization for nvd3" + viz_type);
}
if ("showLegend" in chart && typeof fd.show_legend !== 'undefined') {
chart.showLegend(fd.show_legend);
}
var height = slice.height();
height -= 15; // accounting for the staggered xAxis
chart.height(height);
slice.container.css('height', height + 'px');
if ((viz_type === "line" || viz_type === "area") && fd.rich_tooltip) {
chart.useInteractiveGuideline(true);
}
if (fd.y_axis_zero) {
chart.forceY([0]);
} else if (fd.y_log_scale) {
chart.yScale(d3.scale.log());
}
if (fd.x_log_scale) {
chart.xScale(d3.scale.log());
}
var xAxisFormatter = null;
if (viz_type === 'bubble') {
xAxisFormatter = d3.format('.3s');
} else if (fd.x_axis_format === 'smart_date') {
xAxisFormatter = px.formatDate;
chart.xAxis.tickFormat(xAxisFormatter);
} else if (fd.x_axis_format !== undefined) {
xAxisFormatter = px.timeFormatFactory(fd.x_axis_format);
chart.xAxis.tickFormat(xAxisFormatter);
}
if (chart.hasOwnProperty("x2Axis")) {
chart.x2Axis.tickFormat(xAxisFormatter);
height += 30;
}
if (viz_type === 'bubble') {
chart.xAxis.tickFormat(d3.format('.3s'));
} else if (fd.x_axis_format === 'smart_date') {
chart.xAxis.tickFormat(px.formatDate);
} else if (fd.x_axis_format !== undefined) {
chart.xAxis.tickFormat(px.timeFormatFactory(fd.x_axis_format));
}
if (chart.yAxis !== undefined) {
chart.yAxis.tickFormat(d3.format('.3s'));
}
if (fd.contribution || fd.num_period_compare || viz_type === 'compare') {
chart.yAxis.tickFormat(d3.format('.3p'));
if (chart.y2Axis !== undefined) {
chart.y2Axis.tickFormat(d3.format('.3p'));
}
} else if (fd.y_axis_format) {
chart.yAxis.tickFormat(d3.format(fd.y_axis_format));
if (chart.y2Axis !== undefined) {
chart.y2Axis.tickFormat(d3.format(fd.y_axis_format));
if ("showLegend" in chart && typeof fd.show_legend !== 'undefined') {
chart.showLegend(fd.show_legend);
}
}
chart.color(function (d, i) {
return px.color.category21(d[colorKey]);
var height = slice.height();
height -= 15; // accounting for the staggered xAxis
chart.height(height);
slice.container.css('height', height + 'px');
if ((viz_type === "line" || viz_type === "area") && fd.rich_tooltip) {
chart.useInteractiveGuideline(true);
}
if (fd.y_axis_zero) {
chart.forceY([0]);
} else if (fd.y_log_scale) {
chart.yScale(d3.scale.log());
}
if (fd.x_log_scale) {
chart.xScale(d3.scale.log());
}
var xAxisFormatter = null;
if (viz_type === 'bubble') {
xAxisFormatter = d3.format('.3s');
} else if (fd.x_axis_format === 'smart_date') {
xAxisFormatter = px.formatDate;
chart.xAxis.tickFormat(xAxisFormatter);
} else if (fd.x_axis_format !== undefined) {
xAxisFormatter = px.timeFormatFactory(fd.x_axis_format);
chart.xAxis.tickFormat(xAxisFormatter);
}
if (chart.hasOwnProperty("x2Axis")) {
chart.x2Axis.tickFormat(xAxisFormatter);
height += 30;
}
if (viz_type === 'bubble') {
chart.xAxis.tickFormat(d3.format('.3s'));
} else if (fd.x_axis_format === 'smart_date') {
chart.xAxis.tickFormat(px.formatDate);
} else if (fd.x_axis_format !== undefined) {
chart.xAxis.tickFormat(px.timeFormatFactory(fd.x_axis_format));
}
if (chart.yAxis !== undefined) {
chart.yAxis.tickFormat(d3.format('.3s'));
}
if (fd.contribution || fd.num_period_compare || viz_type === 'compare') {
chart.yAxis.tickFormat(d3.format('.3p'));
if (chart.y2Axis !== undefined) {
chart.y2Axis.tickFormat(d3.format('.3p'));
}
} else if (fd.y_axis_format) {
chart.yAxis.tickFormat(d3.format(fd.y_axis_format));
if (chart.y2Axis !== undefined) {
chart.y2Axis.tickFormat(d3.format(fd.y_axis_format));
}
}
chart.color(function (d, i) {
return px.color.category21(d[colorKey]);
});
var svg = d3.select(slice.selector).select("svg");
if (svg.empty()) {
svg = d3.select(slice.selector).append("svg");
}
svg
.datum(payload.data)
.transition().duration(500)
.attr('height', height)
.call(chart);
return chart;
});
var svg = d3.select(slice.selector).select("svg");
if (svg.empty()) {
svg = d3.select(slice.selector).append("svg");
}
svg
.datum(payload.data)
.transition().duration(500)
.attr('height', height)
.call(chart);
return chart;
slice.done(payload);
});
};
slice.done(payload);
})
.fail(function (xhr) {
slice.error(xhr.responseText);
});
};
var update = function () {
if (chart && chart.update) {
chart.update();
}
};
var update = function () {
if (chart && chart.update) {
chart.update();
}
};
return {
render: render,
resize: update
};
return {
render: render,
resize: update
};
}
module.exports = nvd3Vis;

View File

@ -58,16 +58,20 @@ def init():
"""Inits the Caravel application"""
utils.init(caravel)
@manager.command
def version():
@manager.option(
'-v', '--verbose', action='store_true',
help="Show extra information")
def version(verbose):
"""Prints the current version number"""
s = (
"\n{boat}\n\n"
"-----------------------\n"
"Caravel {version}\n"
"-----------------------\n").format(
"-----------------------").format(
boat=ascii_art.boat, version=caravel.VERSION)
print(s)
if verbose:
print("[DB] : " + "{}".format(db.engine))
@manager.option(
'-t', '--load-test-data', action='store_true',

View File

@ -4,7 +4,7 @@ import logging
from logging.config import fileConfig
from alembic import context
from flask.ext.appbuilder import Base
from flask_appbuilder import Base
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides

View File

@ -11,28 +11,41 @@ revision = '1226819ee0e3'
down_revision = '956a063c52b3'
from alembic import op
import sqlalchemy as sa
from caravel import db, models
from caravel.utils import generic_find_constraint_name
import logging
naming_convention = {
"fk":
"fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
}
def find_constraint_name(upgrade=True):
cols = {'column_name'} if upgrade else {'datasource_name'}
return generic_find_constraint_name(table='columns', columns=cols, referenced='datasources')
return generic_find_constraint_name(
table='columns', columns=cols, referenced='datasources', db=db)
def upgrade():
constraint = find_constraint_name() or 'fk_columns_column_name_datasources'
with op.batch_alter_table("columns",
naming_convention=naming_convention) as batch_op:
batch_op.drop_constraint(constraint, type_="foreignkey")
batch_op.create_foreign_key('fk_columns_datasource_name_datasources', 'datasources', ['datasource_name'], ['datasource_name'])
try:
constraint = find_constraint_name() or 'fk_columns_column_name_datasources'
with op.batch_alter_table("columns",
naming_convention=naming_convention) as batch_op:
batch_op.drop_constraint(constraint, type_="foreignkey")
batch_op.create_foreign_key(
'fk_columns_datasource_name_datasources',
'datasources',
['datasource_name'], ['datasource_name'])
except:
logging.warning(
"Could not find or drop constraint on `columns`")
def downgrade():
constraint = find_constraint_name(False) or 'fk_columns_datasource_name_datasources'
with op.batch_alter_table("columns",
with op.batch_alter_table("columns",
naming_convention=naming_convention) as batch_op:
batch_op.drop_constraint(constraint, type_="foreignkey")
batch_op.create_foreign_key('fk_columns_column_name_datasources', 'datasources', ['column_name'], ['datasource_name'])
batch_op.create_foreign_key(
'fk_columns_column_name_datasources',
'datasources',
['column_name'], ['datasource_name'])

View File

@ -734,7 +734,6 @@ class SqlaTable(Model, Queryable, AuditMixinNullable):
qry.compile(
engine, compile_kwargs={"literal_binds": True},),
)
print(sql)
df = pd.read_sql_query(
sql=sql,
con=engine
@ -1040,7 +1039,7 @@ class DruidDatasource(Model, AuditMixinNullable, Queryable):
@classmethod
def sync_to_db(cls, name, cluster):
"""Fetches metadata for that datasource and merges the Caravel db"""
print("Syncing Druid datasource [{}]".format(name))
logging.info("Syncing Druid datasource [{}]".format(name))
session = get_session()
datasource = session.query(cls).filter_by(datasource_name=name).first()
if not datasource:

View File

@ -4,16 +4,16 @@ from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
import functools
import json
import logging
import numpy
from datetime import datetime
import time
import parsedatetime
import sqlalchemy as sa
from dateutil.parser import parse
from alembic import op
from flask import flash, Markup
from flask_appbuilder.security.sqla import models as ab_models
from markdown import markdown as md
@ -183,21 +183,22 @@ def init(caravel):
public_role_like_gamma = \
public_role and config.get('PUBLIC_ROLE_LIKE_GAMMA', False)
for perm in perms:
if (perm.view_menu and perm.view_menu.name not in (
'ResetPasswordView',
'RoleModelView',
'UserDBModelView',
'Security') and
perm.permission.name not in (
'all_datasource_access',
'can_add',
'can_download',
'can_delete',
'can_edit',
'can_save',
'datasource_access',
'muldelete',
)):
if (
perm.view_menu and perm.view_menu.name not in (
'ResetPasswordView',
'RoleModelView',
'UserDBModelView',
'Security') and
perm.permission.name not in (
'all_datasource_access',
'can_add',
'can_download',
'can_delete',
'can_edit',
'can_save',
'datasource_access',
'muldelete',
)):
sm.add_permission_role(gamma, perm)
if public_role_like_gamma:
sm.add_permission_role(public_role, perm)
@ -222,6 +223,14 @@ def datetime_f(dttm):
return "<nobr>{}</nobr>".format(dttm)
def base_json_conv(obj):
if isinstance(obj, numpy.int64):
return int(obj)
elif isinstance(obj, set):
return list(obj)
def json_iso_dttm_ser(obj):
"""
json serializer that deals with dates
@ -230,10 +239,25 @@ def json_iso_dttm_ser(obj):
>>> json.dumps({'dttm': dttm}, default=json_iso_dttm_ser)
'{"dttm": "1970-01-01T00:00:00"}'
"""
val = base_json_conv(obj)
if val is not None:
return val
if isinstance(obj, datetime):
obj = obj.isoformat()
elif isinstance(obj, numpy.int64):
obj = int(obj)
else:
raise TypeError(
"Unserializable object {} of type {}".format(obj, type(obj))
)
return obj
def json_int_dttm_ser(obj):
"""json serializer that deals with dates"""
val = base_json_conv(obj)
if val is not None:
return val
if isinstance(obj, datetime):
obj = int(time.mktime(obj.timetuple())) * 1000
else:
raise TypeError(
"Unserializable object {} of type {}".format(obj, type(obj))
@ -259,16 +283,12 @@ def readfile(filepath):
return content
def generic_find_constraint_name(table, columns, referenced):
"""
Utility to find a constraint name in alembic migrations
"""
engine = op.get_bind().engine
m = sa.MetaData({})
t = sa.Table(table, m, autoload=True, autoload_with=engine)
def generic_find_constraint_name(table, columns, referenced, db):
"""Utility to find a constraint name in alembic migrations"""
t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine)
for fk in t.foreign_key_constraints:
if fk.referred_table.name == referenced and \
set(fk.column_keys) == columns:
if (
fk.referred_table.name == referenced and
set(fk.column_keys) == columns):
return fk.name
return None

View File

@ -99,7 +99,6 @@ class FilterDashboard(CaravelFilter):
.query(Slice.id)
.filter(Slice.perm.in_(self.get_perms()))
)
print([r for r in slice_ids_qry.all()])
query = query.filter(
Dash.id.in_(
db.session.query(Dash.id)
@ -108,7 +107,6 @@ class FilterDashboard(CaravelFilter):
.filter(Slice.id.in_(slice_ids_qry))
)
)
print(query)
return query
@ -727,7 +725,6 @@ class Caravel(BaseView):
resp = Response(
payload,
status=status,
headers=generate_download_headers("json"),
mimetype="application/json")
return resp
elif request.args.get("csv") == "true":

View File

@ -10,7 +10,6 @@ from __future__ import unicode_literals
import copy
import hashlib
import json
import logging
import uuid
from collections import OrderedDict, defaultdict
@ -20,7 +19,7 @@ import numpy as np
from flask import request
from flask_babelpkg import lazy_gettext as _
from markdown import markdown
from pandas.io.json import dumps
import json
from six import string_types
from werkzeug.datastructures import ImmutableMultiDict
from werkzeug.urls import Href
@ -143,6 +142,7 @@ class BaseViz(object):
df.timestamp = pd.to_datetime(df.timestamp, utc=False)
if self.datasource.offset:
df.timestamp += timedelta(hours=self.datasource.offset)
df.replace([np.inf, -np.inf], np.nan)
df = df.fillna(0)
return df
@ -262,7 +262,7 @@ class BaseViz(object):
def json_dumps(self, obj):
"""Used by get_json, can be overridden to use specific switches"""
return dumps(obj)
return json.dumps(obj, default=utils.json_int_dttm_ser)
@property
def data(self):
@ -303,7 +303,7 @@ class BaseViz(object):
@property
def json_data(self):
return dumps(self.data)
return json.dumps(self.data)
class TableViz(BaseViz):
@ -824,7 +824,7 @@ class BigNumberViz(BaseViz):
def get_data(self):
form_data = self.form_data
df = self.get_df()
df.sort(columns=df.columns[0], inplace=True)
df.sort_values(by=df.columns[0], inplace=True)
compare_lag = form_data.get("compare_lag", "")
compare_lag = int(compare_lag) if compare_lag and compare_lag.isdigit() else 0
return {
@ -873,7 +873,7 @@ class BigNumberTotalViz(BaseViz):
def get_data(self):
form_data = self.form_data
df = self.get_df()
df = df.sort(columns=df.columns[0])
df.sort_values(by=df.columns[0], inplace=True)
return {
'data': df.values.tolist(),
'subheader': form_data.get('subheader', ''),
@ -975,7 +975,7 @@ class NVD3TimeSeriesViz(NVD3Viz):
for col in df.columns:
if col == '':
cols.append('N/A')
elif col == None:
elif col is None:
cols.append('NULL')
else:
cols.append(col)
@ -1103,7 +1103,7 @@ class DistributionPieViz(NVD3Viz):
df = df.pivot_table(
index=self.groupby,
values=[self.metrics[0]])
df.sort(self.metrics[0], ascending=False, inplace=True)
df.sort_values(by=self.metrics[0], ascending=False, inplace=True)
return df
def get_data(self):

View File

@ -1,6 +1,8 @@
coveralls
mock
mysqlclient
nose
psycopg2
sphinx
sphinx_bootstrap_theme
sphinxcontrib.youtube

View File

@ -1,7 +1,9 @@
#!/usr/bin/env bash
echo $DB
rm /tmp/caravel_unittests.db
rm -f .coverage
export CARAVEL_CONFIG=tests.caravel_test_config
set -e
caravel/bin/caravel db upgrade
caravel/bin/caravel version -v
python setup.py nosetests

View File

@ -1,4 +1,5 @@
import imp, os
import imp
import os
from setuptools import setup, find_packages
version = imp.load_source(

View File

@ -4,3 +4,9 @@ AUTH_USER_REGISTRATION_ROLE = 'alpha'
SQLALCHEMY_DATABASE_URI = 'sqlite:////tmp/caravel_unittests.db'
DEBUG = True
CARAVEL_WEBSERVER_PORT = 8081
# Allowing SQLALCHEMY_DATABASE_URI to be defined as an env var for
# continuous integration
if 'CARAVEL__SQLALCHEMY_DATABASE_URI' in os.environ:
SQLALCHEMY_DATABASE_URI = os.environ.get('CARAVEL__SQLALCHEMY_DATABASE_URI')

View File

@ -139,12 +139,12 @@ class CoreTests(CaravelTestCase):
urls = []
for slc in db.session.query(Slc).all():
urls += [
(slc.slice_name, slc.slice_url),
(slc.slice_name, slc.viz.json_endpoint),
(slc.slice_name, slc.viz.csv_endpoint),
(slc.slice_name, 'slice_url', slc.slice_url),
(slc.slice_name, 'json_endpoint', slc.viz.json_endpoint),
(slc.slice_name, 'csv_endpoint', slc.viz.csv_endpoint),
]
for name, url in urls:
print("Slice: " + name)
for name, method, url in urls:
print("[name]/[{method}]: {url}".format(**locals()))
self.client.get(url)
def test_dashboard(self):

69
tox.ini Normal file
View File

@ -0,0 +1,69 @@
[tox]
envlist =
py27-mysql
py27-sqlite
py27-postgres
py34-mysql
py35-mysql
skipsdist=True
[global]
wheel_dir = {homedir}/.wheelhouse
find_links =
{homedir}/.wheelhouse
{homedir}/.pip-cache
[testenv]
deps =
wheel
coveralls
passenv =
HOME
TRAVIS
TRAVIS_BRANCH
TRAVIS_BUILD_DIR
TRAVIS_JOB_ID
USER
TRAVIS_CACHE
TRAVIS_PULL_REQUEST
PATH
commands =
python --version
pip wheel -w {homedir}/.wheelhouse -f {homedir}/.wheelhouse .
pip install --find-links={homedir}/.wheelhouse --no-index .
pip install -r dev-reqs.txt
{toxinidir}/run_tests.sh
coveralls
[testenv:py27-mysql]
basepython = python2.7
setenv =
CARAVEL__SQLALCHEMY_DATABASE_URI = mysql://root@localhost/caravel
[testenv:py34-mysql]
basepython = python3.4
setenv =
CARAVEL__SQLALCHEMY_DATABASE_URI = mysql://root@localhost/caravel
[testenv:py35-mysql]
basepython = python3.5
setenv =
CARAVEL__SQLALCHEMY_DATABASE_URI = mysql://root@localhost/caravel
[testenv:py27-sqlite]
basepython = python2.7
setenv =
CARAVEL__SQLALCHEMY_DATABASE_URI = sqlite:////tmp/caravel.db
[testenv:py34-sqlite]
basepython = python3.4
setenv =
CARAVEL__SQLALCHEMY_DATABASE_URI = sqlite:////tmp/caravel.db
[testenv:py27-postgres]
basepython = python2.7
setenv =
CARAVEL__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://postgres@localhost/caravel
[testenv:javascript]
commands = {toxinidir}/caravel/assets/js_build.sh