Skip to content

Commit

Permalink
feat: import/export dashboards via cli (apache#5991)
Browse files Browse the repository at this point in the history
* feat: import/export dashboards via cli

* style: fixed lint error

* test: added test for import and export util

* test: removing import test as it is causing integrity issues

Import is a wrapper around exist functionality so we can go ahead without a test or mock the actual db operation using https://docs.python.org/3/library/unittest.mock.html

And validate the wrapper operations only.

* test: remove test data file

* test: removed usage of reserved keyword id
  • Loading branch information
Arpit authored and betodealmeida committed Oct 12, 2018
1 parent 47ef7eb commit 072a8d3
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 16 deletions.
52 changes: 50 additions & 2 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
import yaml

from superset import (
app, data, db, dict_import_export_util, security_manager, utils,
app, dashboard_import_export_util, data, db,
dict_import_export_util, security_manager, utils,
)

config = app.config
Expand Down Expand Up @@ -224,6 +225,53 @@ def refresh_druid(datasource, merge):
session.commit()


@app.cli.command()
@click.option(
'--path', '-p',
help='Path to a single JSON file or path containing multiple JSON files'
'files to import (*.json)')
@click.option(
'--recursive', '-r',
help='recursively search the path for json files')
def import_dashboards(path, recursive=False):
"""Import dashboards from JSON"""
p = Path(path)
files = []
if p.is_file():
files.append(p)
elif p.exists() and not recursive:
files.extend(p.glob('*.json'))
elif p.exists() and recursive:
files.extend(p.rglob('*.json'))
for f in files:
logging.info('Importing dashboard from file %s', f)
try:
with f.open() as data_stream:
dashboard_import_export_util.import_dashboards(
db.session, data_stream)
except Exception as e:
logging.error('Error when importing dashboard from file %s', f)
logging.error(e)


@app.cli.command()
@click.option(
'--dashboard-file', '-f', default=None,
help='Specify the the file to export to')
@click.option(
'--print_stdout', '-p',
help='Print JSON to stdout')
def export_dashboards(print_stdout, dashboard_file):
"""Export dashboards to JSON"""
data = dashboard_import_export_util.export_dashboards(db.session)
if print_stdout or not dashboard_file:
print(data)
if dashboard_file:
logging.info('Exporting dashboards to %s', dashboard_file)
with open(dashboard_file, 'w') as data_stream:
data_stream.write(data)


@app.cli.command()
@click.option(
'--path', '-p',
Expand Down Expand Up @@ -268,7 +316,7 @@ def import_datasources(path, sync, recursive=False):
'--datasource-file', '-f', default=None,
help='Specify the the file to export to')
@click.option(
'--print', '-p',
'--print_stdout', '-p',
help='Print YAML to stdout')
@click.option(
'--back-references', '-b',
Expand Down
39 changes: 39 additions & 0 deletions superset/dashboard_import_export_util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# -*- coding: utf-8 -*-
# pylint: disable=C,R,W
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals

import json
import logging
import time

from superset import utils
from superset.models.core import Dashboard


def import_dashboards(session, data_stream, import_time=None):
"""Imports dashboards from a stream to databases"""
current_tt = int(time.time())
import_time = current_tt if import_time is None else import_time
data = json.loads(data_stream.read(), object_hook=utils.decode_dashboards)
# TODO: import DRUID datasources
for table in data['datasources']:
type(table).import_obj(table, import_time=import_time)
session.commit()
for dashboard in data['dashboards']:
Dashboard.import_obj(
dashboard, import_time=import_time)
session.commit()


def export_dashboards(session):
"""Returns all dashboards metadata as a json dump"""
logging.info('Starting export')
dashboards = session.query(Dashboard)
dashboard_ids = []
for dashboard in dashboards:
dashboard_ids.append(dashboard.id)
data = Dashboard.export_dashboards(dashboard_ids)
return data
16 changes: 3 additions & 13 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,8 @@
from werkzeug.utils import secure_filename

from superset import (
app, appbuilder, cache, db, results_backend, security_manager, sql_lab, utils,
viz,
)
app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
security_manager, sql_lab, utils, viz)
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
from superset.exceptions import SupersetException
Expand Down Expand Up @@ -1238,16 +1237,7 @@ def import_dashboards(self):
"""Overrides the dashboards using json instances from the file."""
f = request.files.get('file')
if request.method == 'POST' and f:
current_tt = int(time.time())
data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards)
# TODO: import DRUID datasources
for table in data['datasources']:
type(table).import_obj(table, import_time=current_tt)
db.session.commit()
for dashboard in data['dashboards']:
models.Dashboard.import_obj(
dashboard, import_time=current_tt)
db.session.commit()
dashboard_import_export_util.import_dashboards(db.session, f.stream)
return redirect('/dashboard/list/')
return self.render_template('superset/import_dashboards.html')

Expand Down
33 changes: 32 additions & 1 deletion tests/import_export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from sqlalchemy.orm.session import make_transient

from superset import db, utils
from superset import dashboard_import_export_util, db, utils
from superset.connectors.druid.models import (
DruidColumn, DruidDatasource, DruidMetric,
)
Expand Down Expand Up @@ -149,6 +149,9 @@ def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(
table_name=name).first()

def get_num_dashboards(self):
return db.session.query(models.Dashboard).count()

def assert_dash_equals(self, expected_dash, actual_dash,
check_position=True):
self.assertEquals(expected_dash.slug, actual_dash.slug)
Expand Down Expand Up @@ -547,6 +550,34 @@ def test_import_druid_override_identical(self):
self.assert_datasource_equals(
copy_datasource, self.get_datasource(imported_id))

def test_export_dashboards_util(self):
dashboards_json_dump = dashboard_import_export_util.export_dashboards(
db.session)
dashboards_objects = json.loads(
dashboards_json_dump,
object_hook=utils.decode_dashboards,
)

exported_dashboards = dashboards_objects['dashboards']
for dashboard in exported_dashboards:
id_ = dashboard.id
dash = self.get_dash(id_)
self.assert_dash_equals(dash, dashboard)
self.assertEquals(
dash.id, json.loads(
dashboard.json_metadata,
object_hook=utils.decode_dashboards,
)['remote_id'],
)
numDasboards = self.get_num_dashboards()
self.assertEquals(numDasboards, len(exported_dashboards))

exported_tables = dashboards_objects['datasources']
for exported_table in exported_tables:
id_ = exported_table.id
table = self.get_table(id_)
self.assert_table_equals(table, exported_table)


if __name__ == '__main__':
unittest.main()

0 comments on commit 072a8d3

Please sign in to comment.