mirror of https://github.com/apache/superset.git
parent
e12d00ae71
commit
a9ef0aeaf5
|
@ -18,8 +18,7 @@ import werkzeug.serving
|
|||
import yaml
|
||||
|
||||
from superset import (
|
||||
app, dashboard_import_export_util, data, db,
|
||||
dict_import_export_util, security_manager, utils,
|
||||
app, data, db, dict_import_export_util, security_manager, utils,
|
||||
)
|
||||
|
||||
config = app.config
|
||||
|
@ -225,53 +224,6 @@ def refresh_druid(datasource, merge):
|
|||
session.commit()
|
||||
|
||||
|
||||
@app.cli.command()
|
||||
@click.option(
|
||||
'--path', '-p',
|
||||
help='Path to a single JSON file or path containing multiple JSON files'
|
||||
'files to import (*.json)')
|
||||
@click.option(
|
||||
'--recursive', '-r',
|
||||
help='recursively search the path for json files')
|
||||
def import_dashboards(path, recursive=False):
|
||||
"""Import dashboards from JSON"""
|
||||
p = Path(path)
|
||||
files = []
|
||||
if p.is_file():
|
||||
files.append(p)
|
||||
elif p.exists() and not recursive:
|
||||
files.extend(p.glob('*.json'))
|
||||
elif p.exists() and recursive:
|
||||
files.extend(p.rglob('*.json'))
|
||||
for f in files:
|
||||
logging.info('Importing dashboard from file %s', f)
|
||||
try:
|
||||
with f.open() as data_stream:
|
||||
dashboard_import_export_util.import_dashboards(
|
||||
db.session, data_stream)
|
||||
except Exception as e:
|
||||
logging.error('Error when importing dashboard from file %s', f)
|
||||
logging.error(e)
|
||||
|
||||
|
||||
@app.cli.command()
|
||||
@click.option(
|
||||
'--dashboard-file', '-f', default=None,
|
||||
help='Specify the the file to export to')
|
||||
@click.option(
|
||||
'--print_stdout', '-p',
|
||||
help='Print JSON to stdout')
|
||||
def export_dashboards(print_stdout, dashboard_file):
|
||||
"""Export dashboards to JSON"""
|
||||
data = dashboard_import_export_util.export_dashboards(db.session)
|
||||
if print_stdout or not dashboard_file:
|
||||
print(data)
|
||||
if dashboard_file:
|
||||
logging.info('Exporting dashboards to %s', dashboard_file)
|
||||
with open(dashboard_file, 'w') as data_stream:
|
||||
data_stream.write(data)
|
||||
|
||||
|
||||
@app.cli.command()
|
||||
@click.option(
|
||||
'--path', '-p',
|
||||
|
@ -316,7 +268,7 @@ def import_datasources(path, sync, recursive=False):
|
|||
'--datasource-file', '-f', default=None,
|
||||
help='Specify the the file to export to')
|
||||
@click.option(
|
||||
'--print_stdout', '-p',
|
||||
'--print', '-p',
|
||||
help='Print YAML to stdout')
|
||||
@click.option(
|
||||
'--back-references', '-b',
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# pylint: disable=C,R,W
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
from superset import utils
|
||||
from superset.models.core import Dashboard
|
||||
|
||||
|
||||
def import_dashboards(session, data_stream, import_time=None):
|
||||
"""Imports dashboards from a stream to databases"""
|
||||
current_tt = int(time.time())
|
||||
import_time = current_tt if import_time is None else import_time
|
||||
data = json.loads(data_stream.read(), object_hook=utils.decode_dashboards)
|
||||
# TODO: import DRUID datasources
|
||||
for table in data['datasources']:
|
||||
type(table).import_obj(table, import_time=import_time)
|
||||
session.commit()
|
||||
for dashboard in data['dashboards']:
|
||||
Dashboard.import_obj(
|
||||
dashboard, import_time=import_time)
|
||||
session.commit()
|
||||
|
||||
|
||||
def export_dashboards(session):
|
||||
"""Returns all dashboards metadata as a json dump"""
|
||||
logging.info('Starting export')
|
||||
dashboards = session.query(Dashboard)
|
||||
dashboard_ids = []
|
||||
for dashboard in dashboards:
|
||||
dashboard_ids.append(dashboard.id)
|
||||
data = Dashboard.export_dashboards(dashboard_ids)
|
||||
return data
|
|
@ -34,8 +34,9 @@ from werkzeug.routing import BaseConverter
|
|||
from werkzeug.utils import secure_filename
|
||||
|
||||
from superset import (
|
||||
app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
|
||||
security_manager, sql_lab, utils, viz)
|
||||
app, appbuilder, cache, db, results_backend, security_manager, sql_lab, utils,
|
||||
viz,
|
||||
)
|
||||
from superset.connectors.connector_registry import ConnectorRegistry
|
||||
from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
|
||||
from superset.exceptions import SupersetException
|
||||
|
@ -1237,7 +1238,16 @@ class Superset(BaseSupersetView):
|
|||
"""Overrides the dashboards using json instances from the file."""
|
||||
f = request.files.get('file')
|
||||
if request.method == 'POST' and f:
|
||||
dashboard_import_export_util.import_dashboards(db.session, f.stream)
|
||||
current_tt = int(time.time())
|
||||
data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards)
|
||||
# TODO: import DRUID datasources
|
||||
for table in data['datasources']:
|
||||
type(table).import_obj(table, import_time=current_tt)
|
||||
db.session.commit()
|
||||
for dashboard in data['dashboards']:
|
||||
models.Dashboard.import_obj(
|
||||
dashboard, import_time=current_tt)
|
||||
db.session.commit()
|
||||
return redirect('/dashboard/list/')
|
||||
return self.render_template('superset/import_dashboards.html')
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import unittest
|
|||
|
||||
from sqlalchemy.orm.session import make_transient
|
||||
|
||||
from superset import dashboard_import_export_util, db, utils
|
||||
from superset import db, utils
|
||||
from superset.connectors.druid.models import (
|
||||
DruidColumn, DruidDatasource, DruidMetric,
|
||||
)
|
||||
|
@ -149,9 +149,6 @@ class ImportExportTests(SupersetTestCase):
|
|||
return db.session.query(SqlaTable).filter_by(
|
||||
table_name=name).first()
|
||||
|
||||
def get_num_dashboards(self):
|
||||
return db.session.query(models.Dashboard).count()
|
||||
|
||||
def assert_dash_equals(self, expected_dash, actual_dash,
|
||||
check_position=True):
|
||||
self.assertEquals(expected_dash.slug, actual_dash.slug)
|
||||
|
@ -550,34 +547,6 @@ class ImportExportTests(SupersetTestCase):
|
|||
self.assert_datasource_equals(
|
||||
copy_datasource, self.get_datasource(imported_id))
|
||||
|
||||
def test_export_dashboards_util(self):
|
||||
dashboards_json_dump = dashboard_import_export_util.export_dashboards(
|
||||
db.session)
|
||||
dashboards_objects = json.loads(
|
||||
dashboards_json_dump,
|
||||
object_hook=utils.decode_dashboards,
|
||||
)
|
||||
|
||||
exported_dashboards = dashboards_objects['dashboards']
|
||||
for dashboard in exported_dashboards:
|
||||
id_ = dashboard.id
|
||||
dash = self.get_dash(id_)
|
||||
self.assert_dash_equals(dash, dashboard)
|
||||
self.assertEquals(
|
||||
dash.id, json.loads(
|
||||
dashboard.json_metadata,
|
||||
object_hook=utils.decode_dashboards,
|
||||
)['remote_id'],
|
||||
)
|
||||
numDasboards = self.get_num_dashboards()
|
||||
self.assertEquals(numDasboards, len(exported_dashboards))
|
||||
|
||||
exported_tables = dashboards_objects['datasources']
|
||||
for exported_table in exported_tables:
|
||||
id_ = exported_table.id
|
||||
table = self.get_table(id_)
|
||||
self.assert_table_equals(table, exported_table)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
Loading…
Reference in New Issue