Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: import/export dashboards via cli #5991

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 50 additions & 2 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
import yaml

from superset import (
app, data, db, dict_import_export_util, security_manager, utils,
app, dashboard_import_export_util, data, db,
dict_import_export_util, security_manager, utils,
)

config = app.config
Expand Down Expand Up @@ -224,6 +225,53 @@ def refresh_druid(datasource, merge):
session.commit()


@app.cli.command()
@click.option(
'--path', '-p',
help='Path to a single JSON file or path containing multiple JSON files'
'files to import (*.json)')
@click.option(
'--recursive', '-r',
help='recursively search the path for json files')
def import_dashboards(path, recursive=False):
"""Import dashboards from JSON"""
p = Path(path)
files = []
if p.is_file():
files.append(p)
elif p.exists() and not recursive:
files.extend(p.glob('*.json'))
elif p.exists() and recursive:
files.extend(p.rglob('*.json'))
for f in files:
logging.info('Importing dashboard from file %s', f)
try:
with f.open() as data_stream:
dashboard_import_export_util.import_dashboards(
db.session, data_stream)
except Exception as e:
logging.error('Error when importing dashboard from file %s', f)
logging.error(e)


@app.cli.command()
@click.option(
'--dashboard-file', '-f', default=None,
help='Specify the the file to export to')
@click.option(
'--print_stdout', '-p',
help='Print JSON to stdout')
def export_dashboards(print_stdout, dashboard_file):
"""Export dashboards to JSON"""
data = dashboard_import_export_util.export_dashboards(db.session)
if print_stdout or not dashboard_file:
print(data)
if dashboard_file:
logging.info('Exporting dashboards to %s', dashboard_file)
with open(dashboard_file, 'w') as data_stream:
data_stream.write(data)


@app.cli.command()
@click.option(
'--path', '-p',
Expand Down Expand Up @@ -268,7 +316,7 @@ def import_datasources(path, sync, recursive=False):
'--datasource-file', '-f', default=None,
help='Specify the the file to export to')
@click.option(
'--print', '-p',
'--print_stdout', '-p',
help='Print YAML to stdout')
@click.option(
'--back-references', '-b',
Expand Down
39 changes: 39 additions & 0 deletions superset/dashboard_import_export_util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# -*- coding: utf-8 -*-
# pylint: disable=C,R,W
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals

import json
import logging
import time

from superset import utils
from superset.models.core import Dashboard


def import_dashboards(session, data_stream, import_time=None):
"""Imports dashboards from a stream to databases"""
current_tt = int(time.time())
import_time = current_tt if import_time is None else import_time
data = json.loads(data_stream.read(), object_hook=utils.decode_dashboards)
# TODO: import DRUID datasources
for table in data['datasources']:
type(table).import_obj(table, import_time=import_time)
session.commit()
for dashboard in data['dashboards']:
Dashboard.import_obj(
dashboard, import_time=import_time)
session.commit()


def export_dashboards(session):
"""Returns all dashboards metadata as a json dump"""
logging.info('Starting export')
dashboards = session.query(Dashboard)
dashboard_ids = []
for dashboard in dashboards:
dashboard_ids.append(dashboard.id)
data = Dashboard.export_dashboards(dashboard_ids)
return data
16 changes: 3 additions & 13 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,8 @@
from werkzeug.utils import secure_filename

from superset import (
app, appbuilder, cache, db, results_backend, security_manager, sql_lab, utils,
viz,
)
app, appbuilder, cache, dashboard_import_export_util, db, results_backend,
security_manager, sql_lab, utils, viz)
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.sqla.models import AnnotationDatasource, SqlaTable
from superset.exceptions import SupersetException
Expand Down Expand Up @@ -1238,16 +1237,7 @@ def import_dashboards(self):
"""Overrides the dashboards using json instances from the file."""
f = request.files.get('file')
if request.method == 'POST' and f:
current_tt = int(time.time())
data = json.loads(f.stream.read(), object_hook=utils.decode_dashboards)
# TODO: import DRUID datasources
for table in data['datasources']:
type(table).import_obj(table, import_time=current_tt)
db.session.commit()
for dashboard in data['dashboards']:
models.Dashboard.import_obj(
dashboard, import_time=current_tt)
db.session.commit()
dashboard_import_export_util.import_dashboards(db.session, f.stream)
return redirect('/dashboard/list/')
return self.render_template('superset/import_dashboards.html')

Expand Down
33 changes: 32 additions & 1 deletion tests/import_export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from sqlalchemy.orm.session import make_transient

from superset import db, utils
from superset import dashboard_import_export_util, db, utils
from superset.connectors.druid.models import (
DruidColumn, DruidDatasource, DruidMetric,
)
Expand Down Expand Up @@ -149,6 +149,9 @@ def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(
table_name=name).first()

def get_num_dashboards(self):
return db.session.query(models.Dashboard).count()

def assert_dash_equals(self, expected_dash, actual_dash,
check_position=True):
self.assertEquals(expected_dash.slug, actual_dash.slug)
Expand Down Expand Up @@ -547,6 +550,34 @@ def test_import_druid_override_identical(self):
self.assert_datasource_equals(
copy_datasource, self.get_datasource(imported_id))

def test_export_dashboards_util(self):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mistercrunch @arpit-agarwal I've noticed this test failing the py36-postgres tests when a pr has not made any changes to this code. For example https://travis-ci.org/apache/incubator-superset/jobs/436740957 Can you take a look?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mistercrunch @arpit-agarwal this is blocking merging all PRs into master

Copy link
Contributor

@williaster williaster Oct 4, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mistercrunch @arpit-agarwal this flaky test is still failing most builds for py36-postgres therefore blocking merges. Can you either PTAL or we will revert this PR if it's not fixed by noon today PDT?

cc @john-bodley @kristw @michellethomas @graceguo-supercat

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd say revert.

Copy link

@graceguo-supercat graceguo-supercat Oct 4, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i made a test branch which reverted this whole PR, and it passed all CI tests. So i created #6035 to revert this. Thank you!

Copy link
Contributor Author

@arpit-agarwal arpit-agarwal Oct 5, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank guys.
@graceguo-supercat @williaster @mistercrunch
However I don't understand how this PR can halt postgres.

I had seen stalled postgres even before this PR was merged. Look this job for a instance that was commit before this PR merged.

I also see build not passing after reverting the PR. see

We may need to find the first stalled build to see the root cause. I am out in a pycon India i will grok the logs once back

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@arpit-agarwal Thank you very much for investigation. After I reverted this PR, what we observed is, most PRs passed all CI tests, includes postgres one. But master branch still failed at postgres test. Right now i am totally confused what exactly happened :(

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As suggested we can look at the content of PR #5693 to fund the route cause.

I do see some changes in test in that PR related to test.
Any recent configruration changes on tarvis or postgress version or lsome python lib?

dashboards_json_dump = dashboard_import_export_util.export_dashboards(
db.session)
dashboards_objects = json.loads(
dashboards_json_dump,
object_hook=utils.decode_dashboards,
)

exported_dashboards = dashboards_objects['dashboards']
for dashboard in exported_dashboards:
id_ = dashboard.id
dash = self.get_dash(id_)
self.assert_dash_equals(dash, dashboard)
self.assertEquals(
dash.id, json.loads(
dashboard.json_metadata,
object_hook=utils.decode_dashboards,
)['remote_id'],
)
numDasboards = self.get_num_dashboards()
self.assertEquals(numDasboards, len(exported_dashboards))

exported_tables = dashboards_objects['datasources']
for exported_table in exported_tables:
id_ = exported_table.id
table = self.get_table(id_)
self.assert_table_equals(table, exported_table)


if __name__ == '__main__':
unittest.main()