From 6b5f0adf417572b0e47f57f35c306849c00abfae Mon Sep 17 00:00:00 2001 From: Bogdan Date: Fri, 13 Jan 2017 09:55:45 -0800 Subject: [PATCH 1/8] Permissions cleanup: remove none and duplicates. (#1967) --- superset/__init__.py | 42 ++++++- superset/assets/javascripts/SqlLab/actions.js | 12 +- .../SqlLab/components/SqlEditorLeftBar.jsx | 107 ++++++++++++++---- superset/config.py | 5 +- superset/db_engine_specs.py | 27 +++++ superset/models.py | 8 +- superset/utils.py | 22 ++++ superset/views.py | 63 ++++++----- 8 files changed, 233 insertions(+), 53 deletions(-) diff --git a/superset/__init__.py b/superset/__init__.py index 8ab8ded5e80db..ef45c438f515a 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -4,11 +4,12 @@ from __future__ import print_function from __future__ import unicode_literals +import functools import logging import os from logging.handlers import TimedRotatingFileHandler -from flask import Flask, redirect +from flask import Flask, redirect, request from flask_appbuilder import SQLA, AppBuilder, IndexView from flask_appbuilder.baseviews import expose from flask_cache import Cache @@ -37,6 +38,45 @@ cache = Cache(app, config=app.config.get('CACHE_CONFIG')) + +def cached_cls_func(timeout=5 * 60, key=None): + """Use this decorator to cache class functions. + + Key is a callable function that takes function arguments and + returns the caching key. + """ + def wrap(f): + def wrapped_f(cls, *args, **kwargs): + cache_key = key(*args) + rv = cache.get(cache_key) + if rv is not None: + return rv + rv = f(cls, *args, **kwargs) + cache.set(cache_key, rv, timeout=timeout) + return rv + return wrapped_f + return wrap + + +def cached_view(timeout=5 * 60, key='view/{}/{}'): + """Use this decorator to cache the view functions. + + Function uses the request context to generate key form the + uri and get attributes. + """ + def wrap(f): + def wrapped_f(self, *args, **kwargs): + cache_key = key.format( + request.path, hash(frozenset(request.args.items()))) + rv = cache.get(cache_key) + if rv is not None: + return rv + rv = f(self, *args, **kwargs) + cache.set(cache_key, rv, timeout=timeout) + return rv + return wrapped_f + return wrap + migrate = Migrate(app, db, directory=APP_DIR + "/migrations") # Logging configuration diff --git a/superset/assets/javascripts/SqlLab/actions.js b/superset/assets/javascripts/SqlLab/actions.js index 0e72c209a3944..398e599987ff0 100644 --- a/superset/assets/javascripts/SqlLab/actions.js +++ b/superset/assets/javascripts/SqlLab/actions.js @@ -213,8 +213,18 @@ export function mergeTable(table, query) { return { type: MERGE_TABLE, table, query }; } -export function addTable(query, tableName) { +export function addTable(query, tableOpt) { return function (dispatch) { + const namePieces = tableOpt.value.split('.'); + let tableName; + let schemaName; + if (namePieces.length === 1) { + schemaName = query.schema; + tableName = namePieces[0]; + } else { + schemaName = namePieces[0]; + tableName = namePieces[1]; + } let url = `/superset/table/${query.dbId}/${tableName}/${query.schema}/`; $.get(url, (data) => { const dataPreviewQuery = { diff --git a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx index 21d5f2bfceecc..544336fda16e7 100644 --- a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx +++ b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx @@ -27,11 +27,12 @@ class SqlEditorLeftBar extends React.PureComponent { tableLoading: false, tableOptions: [], networkOn: true, + tableLength: 0, }; } componentWillMount() { - this.fetchSchemas(); - this.fetchTables(); + this.fetchSchemas(this.props.queryEditor.dbId); + this.fetchTables(this.props.queryEditor.dbId); } onChange(db) { const val = (db) ? db.value : null; @@ -40,8 +41,8 @@ class SqlEditorLeftBar extends React.PureComponent { if (!(db)) { this.setState({ tableOptions: [] }); } else { - this.fetchTables(val, this.props.queryEditor.schema); this.fetchSchemas(val); + this.fetchTables(val, this.props.queryEditor.schema); } } dbMutator(data) { @@ -58,19 +59,30 @@ class SqlEditorLeftBar extends React.PureComponent { resetState() { this.props.actions.resetState(); } - fetchTables(dbId, schema) { - const actualDbId = dbId || this.props.queryEditor.dbId; - if (actualDbId) { - const actualSchema = schema || this.props.queryEditor.schema; - this.setState({ tableLoading: true }); - this.setState({ tableOptions: [] }); + getTableNamesBySubStr(input, callback) { + this.fetchTables( + this.props.queryEditor.dbId, + this.props.queryEditor.schema, + input); + callback(null, { options: this.state.tableOptions }); + } + fetchTables(dbId, schema, substr) { + if (!dbId) { + this.setState({ + tableLoading: true, + tableOptions: [], + }); + const url = `/caravel/tables/${dbId}/${schema}?substr=${substr}`; const url = `/superset/tables/${actualDbId}/${actualSchema}`; $.get(url, (data) => { - let tableOptions = data.tables.map((s) => ({ value: s, label: s })); - const views = data.views.map((s) => ({ value: s, label: '[view] ' + s })); - tableOptions = [...tableOptions, ...views]; - this.setState({ tableOptions }); - this.setState({ tableLoading: false }); + let tableOptions = data.tables.map((s) => ({value: s, label: s})); + const views = data.views.map((s) => ({value: s, label: '[view] ' + s})); + this.setState({ + tableOptions: [...tables, ...views], + tableLength: data.views_length + data.tables_length, + tableLoading: false + }); + return; }); } } @@ -80,12 +92,11 @@ class SqlEditorLeftBar extends React.PureComponent { this.fetchTables(this.props.queryEditor.dbId, schema); } fetchSchemas(dbId) { - const actualDbId = dbId || this.props.queryEditor.dbId; - if (actualDbId) { + if (dbId) { this.setState({ schemaLoading: true }); - const url = `/databasetablesasync/api/read?_flt_0_id=${actualDbId}`; + const url = `/caravel/schemas/${dbId}`; $.get(url, (data) => { - const schemas = data.result[0].all_schema_names; + const schemas = data.schemas; const schemaOptions = schemas.map((s) => ({ value: s, label: s })); this.setState({ schemaOptions }); this.setState({ schemaLoading: false }); @@ -96,12 +107,14 @@ class SqlEditorLeftBar extends React.PureComponent { this.refs[ref].hide(); } changeTable(tableOpt) { - const tableName = tableOpt.value; + // tableOpt.value is schema.tableName or tableName const qe = this.props.queryEditor; - this.setState({ tableLoading: true }); - this.props.actions.addTable(qe, tableName); + this.props.actions.addTable(qe, tableOpt); this.setState({ tableLoading: false }); + + // reset the list of tables + this.fetchTables(qe.dbId, qe.schema); } render() { let networkAlert = null; @@ -118,6 +131,56 @@ class SqlEditorLeftBar extends React.PureComponent { dataEndpoint="/databaseasync/api/read?_flt_0_expose_in_sqllab=1" onChange={this.onChange.bind(this)} value={this.props.queryEditor.dbId} + databaseId={this.props.queryEditor.dbId} +
+ {networkAlert} +
+ ( +
+ Database: {o.label} +
+ )} + /> +
+
+ ") - def all_tables(self, db_id): - """Endpoint that returns all tables and views from the database""" + @expose("/schemas/") + @cached_view(timeout=600) + def schemas(self, db_id): + # db_id = request.args.get('db_id') database = ( db.session .query(models.Database) .filter_by(id=db_id) .one() ) - all_tables = [] - all_views = [] - schemas = database.all_schema_names() - for schema in schemas: - all_tables.extend(database.all_table_names(schema=schema)) - all_views.extend(database.all_view_names(schema=schema)) - if not schemas: - all_tables.extend(database.all_table_names()) - all_views.extend(database.all_view_names()) - return Response( - json.dumps({"tables": all_tables, "views": all_views}), + json.dumps({'schemas': database.all_schema_names()}), mimetype="application/json") @api @has_access_api - @expose("/tables//") + @expose("/tables///") + @cached_view(timeout=600) def tables(self, db_id, schema): """endpoint to power the calendar heatmap on the welcome page""" - schema = None if schema in ('null', 'undefined') else schema + schema = utils.js_string_to_python(schema) + substr = utils.js_string_to_python(request.args.get('substr')) database = ( db.session .query(models.Database) .filter_by(id=db_id) .one() ) - tables = [t for t in database.all_table_names(schema) if - self.datasource_access_by_name(database, t, schema=schema)] - views = [v for v in database.all_view_names(schema) if - self.datasource_access_by_name(database, v, schema=schema)] - payload = {'tables': tables, 'views': views} + table_names = [ + t for t in database.all_table_names(schema) if + self.datasource_access_by_name(database, t, schema=schema)] + view_names = [ + v for v in database.all_view_names(schema) if + self.datasource_access_by_name(database, v, schema=schema)] + if substr: + table_names = [tn for tn in table_names if substr in tn] + view_names = [vn for vn in view_names if substr in vn] + + max_items = config.get('MAX_TABLE_NAMES') or len(table_names) + total_items = len(table_names) + len(view_names) + max_tables = len(table_names) + max_views = len(view_names) + if total_items: + max_tables = max_items * len(table_names) // total_items + max_views = max_items * len(view_names) // total_items + + payload = { + 'tables': table_names[:max_tables], + 'tables_length': len(table_names), + 'views': view_names[:max_views], + 'views_length': len(view_names), + } return Response( json.dumps(payload), mimetype="application/json") @@ -2362,7 +2373,7 @@ def sqllab_viz(self): @expose("/table////") @log_this def table(self, database_id, table_name, schema): - schema = None if schema in ('null', 'undefined') else schema + schema = utils.js_string_to_python(schema) mydb = db.session.query(models.Database).filter_by(id=database_id).one() cols = [] indexes = [] @@ -2419,7 +2430,7 @@ def table(self, database_id, table_name, schema): @expose("/extra_table_metadata////") @log_this def extra_table_metadata(self, database_id, table_name, schema): - schema = None if schema in ('null', 'undefined') else schema + schema = utils.js_string_to_python(schema) mydb = db.session.query(models.Database).filter_by(id=database_id).one() payload = mydb.db_engine_spec.extra_table_metadata( mydb, table_name, schema) From 2a894b45d79bf99d5d419e42d46c0e52f727315a Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Mon, 31 Oct 2016 11:55:46 -0700 Subject: [PATCH 2/8] Rename rv => o in the decorator. --- superset/__init__.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/superset/__init__.py b/superset/__init__.py index ef45c438f515a..577d474f8e604 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -48,12 +48,12 @@ def cached_cls_func(timeout=5 * 60, key=None): def wrap(f): def wrapped_f(cls, *args, **kwargs): cache_key = key(*args) - rv = cache.get(cache_key) - if rv is not None: - return rv - rv = f(cls, *args, **kwargs) - cache.set(cache_key, rv, timeout=timeout) - return rv + o = cache.get(cache_key) + if o is not None: + return o + o = f(cls, *args, **kwargs) + cache.set(cache_key, o, timeout=timeout) + return o return wrapped_f return wrap @@ -68,12 +68,12 @@ def wrap(f): def wrapped_f(self, *args, **kwargs): cache_key = key.format( request.path, hash(frozenset(request.args.items()))) - rv = cache.get(cache_key) - if rv is not None: - return rv - rv = f(self, *args, **kwargs) - cache.set(cache_key, rv, timeout=timeout) - return rv + o = cache.get(cache_key) + if o is not None: + return o + o = f(self, *args, **kwargs) + cache.set(cache_key, o, timeout=timeout) + return o return wrapped_f return wrap From 543d983cb5c7e18f27a4b2f98dd7845629b8d5dd Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Tue, 1 Nov 2016 14:31:04 -0700 Subject: [PATCH 3/8] Address comments. --- caravel/cache_util.py | 26 ++++++++++++++ superset/__init__.py | 39 +-------------------- superset/config.py | 5 +-- superset/db_engine_specs.py | 70 +++++++++++++++++++++++++++---------- superset/models.py | 27 +++++++------- superset/views.py | 30 ++++++++++++---- 6 files changed, 118 insertions(+), 79 deletions(-) create mode 100644 caravel/cache_util.py diff --git a/caravel/cache_util.py b/caravel/cache_util.py new file mode 100644 index 0000000000000..51d612a690753 --- /dev/null +++ b/caravel/cache_util.py @@ -0,0 +1,26 @@ +from caravel import simple_cache + + +def view_cache_key(*args, **kwargs): + args_hash = hash(frozenset(request.args.items())) + return 'view/{}/{}'.format(request.path, args_hash) + + +def memoized_func(timeout=5 * 60, key=view_cache_key): + """Use this decorator to cache functions that have predefined first arg. + + memoized_func uses simple_cache and stored the data in memory. + Key is a callable function that takes function arguments and + returns the caching key. + """ + def wrap(f): + def wrapped_f(cls, *args, **kwargs): + cache_key = key(*args, **kwargs) + o = simple_cache.get(cache_key) + if o is not None: + return o + o = f(cls, *args, **kwargs) + simple_cache.set(cache_key, o, timeout=timeout) + return o + return wrapped_f + return wrap diff --git a/superset/__init__.py b/superset/__init__.py index 577d474f8e604..a08cc3033f78f 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -4,7 +4,6 @@ from __future__ import print_function from __future__ import unicode_literals -import functools import logging import os from logging.handlers import TimedRotatingFileHandler @@ -38,44 +37,8 @@ cache = Cache(app, config=app.config.get('CACHE_CONFIG')) +simple_cache = Cache(app, config=app.config.get('IN_MEMORY_CACHE_CONFIG')) -def cached_cls_func(timeout=5 * 60, key=None): - """Use this decorator to cache class functions. - - Key is a callable function that takes function arguments and - returns the caching key. - """ - def wrap(f): - def wrapped_f(cls, *args, **kwargs): - cache_key = key(*args) - o = cache.get(cache_key) - if o is not None: - return o - o = f(cls, *args, **kwargs) - cache.set(cache_key, o, timeout=timeout) - return o - return wrapped_f - return wrap - - -def cached_view(timeout=5 * 60, key='view/{}/{}'): - """Use this decorator to cache the view functions. - - Function uses the request context to generate key form the - uri and get attributes. - """ - def wrap(f): - def wrapped_f(self, *args, **kwargs): - cache_key = key.format( - request.path, hash(frozenset(request.args.items()))) - o = cache.get(cache_key) - if o is not None: - return o - o = f(self, *args, **kwargs) - cache.set(cache_key, o, timeout=timeout) - return o - return wrapped_f - return wrap migrate = Migrate(app, db, directory=APP_DIR + "/migrations") diff --git a/superset/config.py b/superset/config.py index 0341d016f3430..059e5798b0014 100644 --- a/superset/config.py +++ b/superset/config.py @@ -152,7 +152,8 @@ # IMG_SIZE = (300, 200, True) CACHE_DEFAULT_TIMEOUT = None -CACHE_CONFIG = {'CACHE_TYPE': 'simple'} +CACHE_CONFIG = {'CACHE_TYPE': 'null'} +IN_MEMORY_CACHE_CONFIG = {'CACHE_TYPE': 'simple'} # CORS Options ENABLE_CORS = False @@ -210,7 +211,7 @@ DISPLAY_SQL_MAX_ROW = 1000 # Maximum number of tables/views displayed in the dropdown window in SQL Lab. -MAX_TABLE_NAMES = 100 +MAX_TABLE_NAMES = 3000 # If defined, shows this text in an alert-warning box in the navbar # one example use case may be "STAGING" to make it clear that this is diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index ac457ec680a81..2508229c74961 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -16,14 +16,14 @@ from __future__ import print_function from __future__ import unicode_literals -from collections import namedtuple +from collections import namedtuple, defaultdict import inspect import textwrap import time from flask_babel import lazy_gettext as _ -from caravel import cached_cls_func +from caravel import cache_util Grain = namedtuple('Grain', 'name label function') @@ -57,16 +57,44 @@ def convert_dttm(cls, target_type, dttm): return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S')) @classmethod - @cached_cls_func(timeout=120, key=lambda x: 'db:{}:tables'.format(x.id)) - def all_table_full_names(cls, database): - schemas = database.all_schema_names() - if not schemas: - return database.all_table_names() - table_names = [] + @cache_util.memoized_func( + timeout=600, + key=lambda *args, **kwargs: 'db:{}:tables'.format(args[0].id)) + def fetch_tables(cls, database): + """ Returns the dictionary with schemas and table list. + + Empty schema corresponds to the list of all tables that are names + .. + """ + schemas = database.inspector.get_schema_names() + tables = {} + all_tables = [] + for schema in schemas: + tables[schema] = sorted(database.inspector.get_table_names(schema)) + all_tables += ['{}.{}'.format(schema, t) for t in tables[schema]] + if all_tables: + tables[""] = all_tables + return tables + + @classmethod + @cache_util.memoized_func( + timeout=600, + key=lambda *args, **kwargs: 'db:{}:views'.format(args[0].id)) + def fetch_views(cls, database): + schemas = database.inspector.get_schema_names() + views = {} + all_views = [] for schema in schemas: - table_names += ['{}.{}'.format(schema, t) for t in - database.all_table_names(schema=schema)] - return table_names + try: + views[schema] = sorted( + database.inspector.get_view_names(schema)) + all_views += [ + '{}.{}'.format(schema, t) for t in tables[schema]] + except Exception as e: + pass + if all_views: + views[""] = all_views + return views @classmethod def handle_cursor(cls, cursor, query, session): @@ -240,13 +268,19 @@ def epoch_to_dttm(cls): return "from_unixtime({col})" @classmethod - @cached_cls_func(timeout=120, key=lambda x: 'db:{}:tables'.format(x.id)) - def all_table_full_names(cls, database): - return database.get_df(""" - SELECT concat(table_schema, '.', table_name) as fullname - FROM INFORMATION_SCHEMA.TABLES - ORDER BY concat(table_schema, '.', table_name) - """, None)['fullname'].tolist() + @cache_util.memoized_func( + timeout=600, + key=lambda *args, **kwargs: 'db:{}:tables'.format(args[0].id)) + def fetch_tables(cls, database): + tables_df = database.get_df( + """SELECT table_schema, table_name FROM INFORMATION_SCHEMA.TABLES + ORDER BY concat(table_schema, '.', table_name)""", None) + tables = defaultdict(list) + for _, row in tables_df.iterrows(): + tables[row['table_schema']].append(row['table_name']) + tables[""].append('{}.{}'.format( + row['table_schema'], row['table_name'])) + return tables @classmethod def extra_table_metadata(cls, database, table_name, schema_name): diff --git a/superset/models.py b/superset/models.py index be062e572f5df..ca9a1a1497f00 100644 --- a/superset/models.py +++ b/superset/models.py @@ -845,24 +845,23 @@ def inspector(self): return sqla.inspect(engine) def all_table_names(self, schema=None): - if schema: - return sorted(self.inspector.get_table_names(schema)) - try: - return self.db_engine_spec.all_table_full_names(self) - except NotImplementedError: - return sorted(self.inspector.get_table_names(schema)) + if not schema: + schema = "" + return self.db_engine_spec.fetch_tables(self).get(schema, []) def all_view_names(self, schema=None): - views = [] - try: - views = self.inspector.get_view_names(schema) - except Exception as e: - pass - return views + if not schema: + schema = "" + views_dict = self.db_engine_spec.fetch_views(self) + return views_dict.get(schema, []) - @utils.memoized def all_schema_names(self): - return sorted(self.inspector.get_schema_names()) + schema_names = sorted(self.db_engine_spec.fetch_tables(self).keys()) + # first element in schema names is empty that contains all table names + if schema_names: + return schema_names[1:] + else: + return [] @property def db_engine_spec(self): diff --git a/superset/views.py b/superset/views.py index c67be9e96048e..014a73eb81ec5 100755 --- a/superset/views.py +++ b/superset/views.py @@ -1782,8 +1782,28 @@ def activity_per_day(self): @api @has_access_api +<<<<<<< 309bede63194423bdeb2341fa959da0e19a6b75c @expose("/schemas/") @cached_view(timeout=600) +======= + @expose("/all_tables/") + def all_tables(self, db_id): + """Endpoint that returns all tables and views from the database""" + all_tables = [] + all_views = [] + schemas = database.all_schema_names() + for schema in schemas: + all_tables.extend(database.all_table_names(schema=schema)) + all_views.extend(database.all_view_names(schema=schema)) + if not schemas: + all_tables.extend(database.all_table_names()) + all_views.extend(database.all_view_names()) + + return Response( + json.dumps({"tables": all_tables, "views": all_views}), + + @expose("/schemas/") +>>>>>>> Address comments. def schemas(self, db_id): # db_id = request.args.get('db_id') database = ( @@ -1804,18 +1824,14 @@ def tables(self, db_id, schema): """endpoint to power the calendar heatmap on the welcome page""" schema = utils.js_string_to_python(schema) substr = utils.js_string_to_python(request.args.get('substr')) - database = ( - db.session - .query(models.Database) - .filter_by(id=db_id) - .one() - ) + database = db.session.query(models.Database).filter_by(id=db_id).one() table_names = [ t for t in database.all_table_names(schema) if self.datasource_access_by_name(database, t, schema=schema)] view_names = [ - v for v in database.all_view_names(schema) if + v for v in database.all_table_names(schema) if self.datasource_access_by_name(database, v, schema=schema)] + if substr: table_names = [tn for tn in table_names if substr in tn] view_names = [vn for vn in view_names if substr in vn] From 6243407818e1aa83a2b548ee04c17b891ff83ab4 Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Thu, 5 Jan 2017 09:24:44 -0800 Subject: [PATCH 4/8] Updates --- superset/__init__.py | 3 +- superset/assets/javascripts/SqlLab/actions.js | 20 +- .../SqlLab/components/SqlEditorLeftBar.jsx | 155 +++++------ superset/assets/package.json | 2 + {caravel => superset}/cache_util.py | 4 +- superset/config.py | 2 +- superset/db_engine_specs.py | 82 +++--- superset/models.py | 23 +- superset/source_registry.py | 24 +- superset/utils.py | 20 +- superset/views.py | 262 +++++++----------- 11 files changed, 258 insertions(+), 339 deletions(-) rename {caravel => superset}/cache_util.py (87%) diff --git a/superset/__init__.py b/superset/__init__.py index a08cc3033f78f..5d54e543c21dc 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -8,7 +8,7 @@ import os from logging.handlers import TimedRotatingFileHandler -from flask import Flask, redirect, request +from flask import Flask, redirect from flask_appbuilder import SQLA, AppBuilder, IndexView from flask_appbuilder.baseviews import expose from flask_cache import Cache @@ -29,6 +29,7 @@ # In production mode, add log handler to sys.stderr. app.logger.addHandler(logging.StreamHandler()) app.logger.setLevel(logging.INFO) +logging.getLogger('pyhive.presto').setLevel(logging.INFO) db = SQLA(app) diff --git a/superset/assets/javascripts/SqlLab/actions.js b/superset/assets/javascripts/SqlLab/actions.js index 398e599987ff0..d7d20f49d3e1c 100644 --- a/superset/assets/javascripts/SqlLab/actions.js +++ b/superset/assets/javascripts/SqlLab/actions.js @@ -213,19 +213,9 @@ export function mergeTable(table, query) { return { type: MERGE_TABLE, table, query }; } -export function addTable(query, tableOpt) { +export function addTable(query, tableName, schemaName) { return function (dispatch) { - const namePieces = tableOpt.value.split('.'); - let tableName; - let schemaName; - if (namePieces.length === 1) { - schemaName = query.schema; - tableName = namePieces[0]; - } else { - schemaName = namePieces[0]; - tableName = namePieces[1]; - } - let url = `/superset/table/${query.dbId}/${tableName}/${query.schema}/`; + let url = `/superset/table/${query.dbId}/${tableName}/${schemaName}/`; $.get(url, (data) => { const dataPreviewQuery = { id: shortid.generate(), @@ -242,7 +232,7 @@ export function addTable(query, tableOpt) { Object.assign(data, { dbId: query.dbId, queryEditorId: query.id, - schema: query.schema, + schema: schemaName, expanded: true, }), dataPreviewQuery) ); @@ -258,12 +248,12 @@ export function addTable(query, tableOpt) { ); }); - url = `/superset/extra_table_metadata/${query.dbId}/${tableName}/${query.schema}/`; + url = `/superset/extra_table_metadata/${query.dbId}/${tableName}/${schemaName}/`; $.get(url, (data) => { const table = { dbId: query.dbId, queryEditorId: query.id, - schema: query.schema, + schema: schemaName, name: tableName, }; Object.assign(table, data); diff --git a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx index 544336fda16e7..5288005cd0043 100644 --- a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx +++ b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx @@ -1,9 +1,12 @@ +require('es6-promise').polyfill(); +require('isomorphic-fetch'); const $ = window.$ = require('jquery'); import React from 'react'; import Select from 'react-select'; import { Label, Button } from 'react-bootstrap'; import TableElement from './TableElement'; import AsyncSelect from '../../components/AsyncSelect'; +import fetch from 'isomorphic-fetch'; const propTypes = { queryEditor: React.PropTypes.object.isRequired, @@ -27,7 +30,6 @@ class SqlEditorLeftBar extends React.PureComponent { tableLoading: false, tableOptions: [], networkOn: true, - tableLength: 0, }; } componentWillMount() { @@ -41,8 +43,8 @@ class SqlEditorLeftBar extends React.PureComponent { if (!(db)) { this.setState({ tableOptions: [] }); } else { - this.fetchSchemas(val); this.fetchTables(val, this.props.queryEditor.schema); + this.fetchSchemas(val); } } dbMutator(data) { @@ -59,44 +61,75 @@ class SqlEditorLeftBar extends React.PureComponent { resetState() { this.props.actions.resetState(); } - getTableNamesBySubStr(input, callback) { - this.fetchTables( - this.props.queryEditor.dbId, - this.props.queryEditor.schema, - input); - callback(null, { options: this.state.tableOptions }); + getTableNamesBySubStr(input) { + if (!this.props.queryEditor.dbId || !input) { + return Promise.resolve({ options: [] }); + } + // issues with redirects + return fetch( + `/superset/tables/${this.props.queryEditor.dbId}/${this.props.queryEditor.schema}/${input}`, + { + method: 'GET', + mode: 'no-cors', + credentials: 'include', + headers: { + 'Access-Control-Allow-Origin':' | *', + 'Accept': 'application/json, application/xml, text/plain, text/html, *.*', + 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8' + }, + }) + .then((response) =>{ return response.json() }) + .then((json) => { + this.setState({ tableLength: json.tableLength }); + return {options: json.options}; + }); } fetchTables(dbId, schema, substr) { - if (!dbId) { - this.setState({ - tableLoading: true, - tableOptions: [], - }); - const url = `/caravel/tables/${dbId}/${schema}?substr=${substr}`; - const url = `/superset/tables/${actualDbId}/${actualSchema}`; + if (dbId) { + this.setState({ tableLoading: true, tableOptions: []}); + const url = `/superset/tables/${dbId}/${schema}/${substr}/`; $.get(url, (data) => { - let tableOptions = data.tables.map((s) => ({value: s, label: s})); - const views = data.views.map((s) => ({value: s, label: '[view] ' + s})); this.setState({ - tableOptions: [...tables, ...views], - tableLength: data.views_length + data.tables_length, - tableLoading: false + tableLoading: false, + tableOptions: data.options, + tableLength: data.tableLength, }); - return; }); } } + changeTable(tableOpt) { + if (!tableOpt) { + this.setState({ tableName: '' }); + return; + } + const namePieces = tableOpt.value.split('.'); + let tableName = namePieces[0]; + let schemaName = this.props.queryEditor.schema; + if (namePieces.length === 1) { + this.setState({ tableName: tableName }); + } else { + schemaName = namePieces[0]; + tableName = namePieces[1]; + this.setState({ tableName: tableName }); + this.props.actions.queryEditorSetSchema(this.props.queryEditor, schemaName); + this.fetchTables(this.props.queryEditor.dbId, schemaName); + } + this.setState({ tableLoading: true }); + this.props.actions.addTable(this.props.queryEditor, tableName, schemaName); + this.setState({ tableLoading: false }); + } changeSchema(schemaOpt) { const schema = (schemaOpt) ? schemaOpt.value : null; this.props.actions.queryEditorSetSchema(this.props.queryEditor, schema); this.fetchTables(this.props.queryEditor.dbId, schema); } fetchSchemas(dbId) { - if (dbId) { + const actualDbId = dbId || this.props.queryEditor.dbId; + if (actualDbId) { this.setState({ schemaLoading: true }); - const url = `/caravel/schemas/${dbId}`; + const url = `/databasetablesasync/api/read?_flt_0_id=${actualDbId}`; $.get(url, (data) => { - const schemas = data.schemas; + const schemas = data.result[0].all_schema_names; const schemaOptions = schemas.map((s) => ({ value: s, label: s })); this.setState({ schemaOptions }); this.setState({ schemaLoading: false }); @@ -106,16 +139,6 @@ class SqlEditorLeftBar extends React.PureComponent { closePopover(ref) { this.refs[ref].hide(); } - changeTable(tableOpt) { - // tableOpt.value is schema.tableName or tableName - const qe = this.props.queryEditor; - this.setState({ tableLoading: true }); - this.props.actions.addTable(qe, tableOpt); - this.setState({ tableLoading: false }); - - // reset the list of tables - this.fetchTables(qe.dbId, qe.schema); - } render() { let networkAlert = null; if (!this.props.networkOn) { @@ -132,54 +155,6 @@ class SqlEditorLeftBar extends React.PureComponent { onChange={this.onChange.bind(this)} value={this.props.queryEditor.dbId} databaseId={this.props.queryEditor.dbId} -
- {networkAlert} -
- ( -
- Database: {o.label} -
- )} - /> -
-
- + />} + {!this.props.queryEditor.schema && }

diff --git a/superset/assets/package.json b/superset/assets/package.json index c3484559b8759..bcf88908d8975 100644 --- a/superset/assets/package.json +++ b/superset/assets/package.json @@ -52,10 +52,12 @@ "datamaps": "^0.5.8", "datatables-bootstrap3-plugin": "^0.5.0", "datatables.net-bs": "^1.10.12", + "es6-promise": "^4.0.5", "font-awesome": "^4.6.3", "gridster": "^0.5.6", "immutability-helper": "^2.0.0", "immutable": "^3.8.1", + "isomorphic-fetch": "^2.2.1", "jquery": "^2.2.1", "jquery-ui": "1.10.5", "lodash.throttle": "^4.1.1", diff --git a/caravel/cache_util.py b/superset/cache_util.py similarity index 87% rename from caravel/cache_util.py rename to superset/cache_util.py index 51d612a690753..91a1ae20dea2a 100644 --- a/caravel/cache_util.py +++ b/superset/cache_util.py @@ -1,4 +1,5 @@ -from caravel import simple_cache +from superset import simple_cache +from flask import request def view_cache_key(*args, **kwargs): @@ -20,6 +21,7 @@ def wrapped_f(cls, *args, **kwargs): if o is not None: return o o = f(cls, *args, **kwargs) + print('cache_key: {}'.format(cache_key)) simple_cache.set(cache_key, o, timeout=timeout) return o return wrapped_f diff --git a/superset/config.py b/superset/config.py index 059e5798b0014..431e163a284ba 100644 --- a/superset/config.py +++ b/superset/config.py @@ -153,7 +153,7 @@ CACHE_DEFAULT_TIMEOUT = None CACHE_CONFIG = {'CACHE_TYPE': 'null'} -IN_MEMORY_CACHE_CONFIG = {'CACHE_TYPE': 'simple'} +IN_MEMORY_CACHE_CONFIG = {'CACHE_TYPE': 'null'} # CORS Options ENABLE_CORS = False diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 2508229c74961..2c7a449446abd 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -23,7 +23,7 @@ from flask_babel import lazy_gettext as _ -from caravel import cache_util +from superset import cache_util Grain = namedtuple('Grain', 'name label function') @@ -59,42 +59,29 @@ def convert_dttm(cls, target_type, dttm): @classmethod @cache_util.memoized_func( timeout=600, - key=lambda *args, **kwargs: 'db:{}:tables'.format(args[0].id)) - def fetch_tables(cls, database): - """ Returns the dictionary with schemas and table list. + key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1])) + def fetch_result_sets(cls, db, datasource_type): + """ Returns the dictionary {schema : [result_set_name]}. - Empty schema corresponds to the list of all tables that are names - .
. + Datasource_type can be 'table' or 'view'. + Empty schema corresponds to the list of full names of the all + tables or views: .. """ - schemas = database.inspector.get_schema_names() - tables = {} - all_tables = [] + schemas = db.inspector.get_schema_names() + result_sets = {} + all_result_sets = [] for schema in schemas: - tables[schema] = sorted(database.inspector.get_table_names(schema)) - all_tables += ['{}.{}'.format(schema, t) for t in tables[schema]] - if all_tables: - tables[""] = all_tables - return tables - - @classmethod - @cache_util.memoized_func( - timeout=600, - key=lambda *args, **kwargs: 'db:{}:views'.format(args[0].id)) - def fetch_views(cls, database): - schemas = database.inspector.get_schema_names() - views = {} - all_views = [] - for schema in schemas: - try: - views[schema] = sorted( - database.inspector.get_view_names(schema)) - all_views += [ - '{}.{}'.format(schema, t) for t in tables[schema]] - except Exception as e: - pass - if all_views: - views[""] = all_views - return views + if datasource_type == 'table': + result_sets[schema] = sorted( + db.inspector.get_table_names(schema)) + elif datasource_type == 'view': + result_sets[schema] = sorted( + db.inspector.get_view_names(schema)) + all_result_sets += [ + '{}.{}'.format(schema, t) for t in result_sets[schema]] + if all_result_sets: + result_sets[""] = all_result_sets + return result_sets @classmethod def handle_cursor(cls, cursor, query, session): @@ -270,17 +257,24 @@ def epoch_to_dttm(cls): @classmethod @cache_util.memoized_func( timeout=600, - key=lambda *args, **kwargs: 'db:{}:tables'.format(args[0].id)) - def fetch_tables(cls, database): - tables_df = database.get_df( - """SELECT table_schema, table_name FROM INFORMATION_SCHEMA.TABLES - ORDER BY concat(table_schema, '.', table_name)""", None) - tables = defaultdict(list) - for _, row in tables_df.iterrows(): - tables[row['table_schema']].append(row['table_name']) - tables[""].append('{}.{}'.format( + key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1])) + def fetch_result_sets(cls, db, datasource_type): + """ Returns the dictionary {schema : [result_set_name]}. + + Datasource_type can be 'table' or 'view'. + Empty schema corresponds to the list of full names of the all + tables or views: .. + """ + result_set_df = db.get_df( + """SELECT table_schema, table_name FROM INFORMATION_SCHEMA.{}S + ORDER BY concat(table_schema, '.', table_name)""".format( + datasource_type.upper()), None) + result_sets = defaultdict(list) + for _, row in result_set_df.iterrows(): + result_sets[row['table_schema']].append(row['table_name']) + result_sets[""].append('{}.{}'.format( row['table_schema'], row['table_name'])) - return tables + return result_sets @classmethod def extra_table_metadata(cls, database, table_name, schema_name): diff --git a/superset/models.py b/superset/models.py index ca9a1a1497f00..99af95499dc8f 100644 --- a/superset/models.py +++ b/superset/models.py @@ -846,22 +846,23 @@ def inspector(self): def all_table_names(self, schema=None): if not schema: - schema = "" - return self.db_engine_spec.fetch_tables(self).get(schema, []) + tables_dict = self.db_engine_spec.fetch_result_sets(self, 'table') + return tables_dict.get("", []) + return sorted(self.inspector.get_table_names(schema)) def all_view_names(self, schema=None): if not schema: - schema = "" - views_dict = self.db_engine_spec.fetch_views(self) - return views_dict.get(schema, []) + views_dict = self.db_engine_spec.fetch_result_sets(self, 'view') + return views_dict.get("", []) + views = [] + try: + views = self.inspector.get_view_names(schema) + except Exception: + pass + return views def all_schema_names(self): - schema_names = sorted(self.db_engine_spec.fetch_tables(self).keys()) - # first element in schema names is empty that contains all table names - if schema_names: - return schema_names[1:] - else: - return [] + return sorted(self.inspector.get_schema_names()) @property def db_engine_spec(self): diff --git a/superset/source_registry.py b/superset/source_registry.py index ff64265d4788c..36c4ac0e30b77 100644 --- a/superset/source_registry.py +++ b/superset/source_registry.py @@ -44,14 +44,19 @@ def get_datasource_by_name(cls, session, datasource_type, datasource_name, return db_ds[0] @classmethod - def query_datasources_by_name( - cls, session, database, datasource_name, schema=None): + def query_datasources_by_names( + cls, session, database, datasource_names, schema=None): + """Datasource_names can be a list or a single name.""" datasource_class = SourceRegistry.sources[database.type] + if not hasattr(datasource_names, '__iter__'): + datasource_names = [datasource_names] + datasource_set = set(datasource_names) + if database.type == 'table': query = ( session.query(datasource_class) .filter_by(database_id=database.id) - .filter_by(table_name=datasource_name)) + .filter(datasource_class.table_name.in_(datasource_set))) if schema: query = query.filter_by(schema=schema) return query.all() @@ -59,11 +64,22 @@ def query_datasources_by_name( return ( session.query(datasource_class) .filter_by(cluster_name=database.id) - .filter_by(datasource_name=datasource_name) + .filter(datasource_class.datasource_name.in_(datasource_set)) .all() ) return None + @classmethod + def query_datasources_by_permissions(cls, session, database, permissions): + """Datasource_names can be a list or a single name.""" + datasource_class = SourceRegistry.sources[database.type] + return ( + session.query(datasource_class) + .filter_by(database_id=database.id) + .filter(datasource_class.perm.in_(permissions)) + .all() + ) + @classmethod def get_eager_datasource(cls, session, datasource_type, datasource_id): """Returns datasource with columns and metrics.""" diff --git a/superset/utils.py b/superset/utils.py index f0ace2ff0d9d6..7e6da2a1ca8c6 100644 --- a/superset/utils.py +++ b/superset/utils.py @@ -8,7 +8,6 @@ import functools import json import logging -import markdown as md import numpy import os import parsedatetime @@ -33,7 +32,7 @@ ) from flask_appbuilder._compat import as_unicode from flask_babel import gettext as __ -from markdown import markdown as md +import markdown as md from past.builtins import basestring from pydruid.utils.having import Having from sqlalchemy import event, exc @@ -127,23 +126,6 @@ def js_string_to_python(item): return None if item in ('null', 'undefined') else item -def get_or_create_main_db(caravel): - db = caravel.db - config = caravel.app.config - DB = caravel.models.Database - logging.info("Creating database reference") - dbobj = db.session.query(DB).filter_by(database_name='main').first() - if not dbobj: - dbobj = DB(database_name="main") - logging.info(config.get("SQLALCHEMY_DATABASE_URI")) - dbobj.set_sqlalchemy_uri(config.get("SQLALCHEMY_DATABASE_URI")) - dbobj.expose_in_sqllab = True - dbobj.allow_run_sync = True - db.session.add(dbobj) - db.session.commit() - return dbobj - - class DimSelector(Having): def __init__(self, **args): # Just a hack to prevent any exceptions diff --git a/superset/views.py b/superset/views.py index 014a73eb81ec5..1e206acf1b947 100755 --- a/superset/views.py +++ b/superset/views.py @@ -31,12 +31,13 @@ from flask_babel import lazy_gettext as _ from sqlalchemy import create_engine +from werkzeug.routing import BaseConverter from wtforms.validators import ValidationError import superset from superset import ( appbuilder, cache, db, models, viz, utils, app, - sm, sql_lab, sql_parse, results_backend, security, cached_view + sm, sql_lab, sql_parse, results_backend, security, ) from superset.utils import has_access from superset.source_registry import SourceRegistry @@ -82,15 +83,14 @@ def datasource_access(self, datasource, user=None): def datasource_access_by_name( self, database, datasource_name, schema=None): - if (self.database_access(database) or - self.all_datasource_access()): + if self.database_access(database) or self.all_datasource_access(): return True schema_perm = utils.get_schema_perm(database, schema) if schema and utils.can_access(sm, 'schema_access', schema_perm, g.user): return True - datasources = SourceRegistry.query_datasources_by_name( + datasources = SourceRegistry.query_datasources_by_names( db.session, database, datasource_name, schema=schema) for datasource in datasources: if self.can_access("datasource_access", datasource.perm): @@ -114,6 +114,29 @@ def rejected_datasources(self, sql, database, schema): return [ t for t in superset_query.tables if not self.datasource_access_by_fullname(database, t, schema)] + def accessible_by_user(self, database, datasource_names, schema=None): + if self.database_access(database) or self.all_datasource_access(): + return datasource_names + + schema_perm = utils.get_schema_perm(database, schema) + if schema and utils.can_access(sm, 'schema_access', schema_perm): + return datasource_names + + role_ids = set([role.id for role in g.user.roles]) + # TODO: cache user_perms or user_datasources + user_pvms = ( + db.session.query(ab_models.PermissionView) + .join(ab_models.Permission) + .filter(ab_models.Permission.name == 'datasource_access') + .filter(ab_models.PermissionView.role.any( + ab_models.Role.id.in_(role_ids))) + .all() + ) + user_perms = set([pvm.view_menu.name for pvm in user_pvms]) + user_datasources = SourceRegistry.query_datasources_by_permissions( + db.session, database, user_perms) + full_names = set([d.full_name for d in user_datasources]) + return [d for d in datasource_names if d in full_names] class ListWidgetWithCheckboxes(ListWidget): @@ -164,6 +187,11 @@ def json_error_response(msg, status=None): json.dumps(data), status=status, mimetype="application/json") +def json_success(json_msg, status=None): + status = status if status else 200 + return Response(json_msg, status=status, mimetype="application/json") + + def api(f): """ A decorator to label an endpoint as an API. Catches uncaught exceptions and @@ -174,13 +202,7 @@ def wraps(self, *args, **kwargs): return f(self, *args, **kwargs) except Exception as e: logging.exception(e) - resp = Response( - json.dumps({ - 'message': get_error_msg() - }), - status=500, - mimetype="application/json") - return resp + return json_error_response(get_error_msg()) return functools.update_wrapper(wraps, f) @@ -1458,28 +1480,18 @@ def explore_json(self, datasource_type, datasource_id): return json_error_response(utils.error_msg_from_exception(e)) if not self.datasource_access(viz_obj.datasource): - return Response( - json.dumps( - {'error': DATASOURCE_ACCESS_ERR}), - status=404, - mimetype="application/json") + return json_error_response(DATASOURCE_ACCESS_ERR, status=404) payload = {} - status = 200 try: payload = viz_obj.get_payload() except Exception as e: logging.exception(e) - status = 500 return json_error_response(utils.error_msg_from_exception(e)) - if payload.get('status') == QueryStatus.FAILED: - status = 500 + return json_error_response(viz_obj.json_dumps(payload)) - return Response( - viz_obj.json_dumps(payload), - status=status, - mimetype="application/json") + return json_success(viz_obj.json_dumps(payload)) @expose("/import_dashboards", methods=['GET', 'POST']) @log_this @@ -1644,12 +1656,7 @@ def filter(self, datasource_type, datasource_id, column): except Exception as e: flash(str(e), "danger") return redirect(error_redirect) - status = 200 - payload = obj.get_values_for_column(column) - return Response( - payload, - status=status, - mimetype="application/json") + return json_success(obj.get_values_for_column(column)) def save_or_overwrite_slice( self, args, slc, slice_add_perm, slice_edit_perm): @@ -1760,7 +1767,7 @@ def checkbox(self, model_view, id_, attr, value): if obj: setattr(obj, attr, value == 'true') db.session.commit() - return Response("OK", mimetype="application/json") + return json_success("OK") @api @has_access_api @@ -1778,32 +1785,11 @@ def activity_per_day(self): ) payload = {str(time.mktime(dt.timetuple())): ccount for dt, ccount in qry if dt} - return Response(json.dumps(payload), mimetype="application/json") + return json_success(json.dumps(payload)) @api @has_access_api -<<<<<<< 309bede63194423bdeb2341fa959da0e19a6b75c @expose("/schemas/") - @cached_view(timeout=600) -======= - @expose("/all_tables/") - def all_tables(self, db_id): - """Endpoint that returns all tables and views from the database""" - all_tables = [] - all_views = [] - schemas = database.all_schema_names() - for schema in schemas: - all_tables.extend(database.all_table_names(schema=schema)) - all_views.extend(database.all_view_names(schema=schema)) - if not schemas: - all_tables.extend(database.all_table_names()) - all_views.extend(database.all_view_names()) - - return Response( - json.dumps({"tables": all_tables, "views": all_views}), - - @expose("/schemas/") ->>>>>>> Address comments. def schemas(self, db_id): # db_id = request.args.get('db_id') database = ( @@ -1818,19 +1804,16 @@ def schemas(self, db_id): @api @has_access_api - @expose("/tables///") - @cached_view(timeout=600) - def tables(self, db_id, schema): + @expose("/tables////") + def tables(self, db_id, schema, substr): """endpoint to power the calendar heatmap on the welcome page""" schema = utils.js_string_to_python(schema) - substr = utils.js_string_to_python(request.args.get('substr')) + substr = utils.js_string_to_python(substr) database = db.session.query(models.Database).filter_by(id=db_id).one() - table_names = [ - t for t in database.all_table_names(schema) if - self.datasource_access_by_name(database, t, schema=schema)] - view_names = [ - v for v in database.all_table_names(schema) if - self.datasource_access_by_name(database, v, schema=schema)] + table_names = self.accessible_by_user( + database, database.all_table_names(schema), schema) + view_names = self.accessible_by_user( + database, database.all_view_names(schema), schema) if substr: table_names = [tn for tn in table_names if substr in tn] @@ -1840,18 +1823,19 @@ def tables(self, db_id, schema): total_items = len(table_names) + len(view_names) max_tables = len(table_names) max_views = len(view_names) - if total_items: + if total_items and substr: max_tables = max_items * len(table_names) // total_items max_views = max_items * len(view_names) // total_items + table_options = [{'value': tn, 'label': tn} + for tn in table_names[:max_tables]] + table_options.extend([{'value': vn, 'label': '[view] {}'.format(vn)} + for vn in view_names[:max_views]]) payload = { - 'tables': table_names[:max_tables], - 'tables_length': len(table_names), - 'views': view_names[:max_views], - 'views_length': len(view_names), + 'tableLength': len(table_names) + len(view_names), + 'options': table_options, } - return Response( - json.dumps(payload), mimetype="application/json") + return json_success(json.dumps(payload)) @api @has_access_api @@ -1875,8 +1859,7 @@ def copy_dash(self, dashboard_id): session.commit() dash_json = dash.json_data session.close() - return Response( - dash_json, mimetype="application/json") + return json_success(dash_json) @api @has_access_api @@ -1959,11 +1942,9 @@ def testconn(self): engine.connect() return json.dumps(engine.table_names(), indent=4) except Exception as e: - return Response(( + return json_error_response(( "Connection failed!\n\n" - "The error message returned was:\n{}").format(e), - status=500, - mimetype="application/json") + "The error message returned was:\n{}").format(e)) @api @has_access_api @@ -2007,9 +1988,8 @@ def recent_activity(self, user_id): 'item_title': item_title, 'time': log.Log.dttm, }) - return Response( - json.dumps(payload, default=utils.json_int_dttm_ser), - mimetype="application/json") + return json_success( + json.dumps(payload, default=utils.json_int_dttm_ser)) @api @has_access_api @@ -2047,9 +2027,8 @@ def fave_dashboards(self, user_id): d['creator_url'] = '/superset/profile/{}/'.format( user.username) payload.append(d) - return Response( - json.dumps(payload, default=utils.json_int_dttm_ser), - mimetype="application/json") + return json_success( + json.dumps(payload, default=utils.json_int_dttm_ser)) @api @has_access_api @@ -2077,9 +2056,8 @@ def created_dashboards(self, user_id): 'url': o.url, 'dttm': o.changed_on, } for o in qry.all()] - return Response( - json.dumps(payload, default=utils.json_int_dttm_ser), - mimetype="application/json") + return json_success( + json.dumps(payload, default=utils.json_int_dttm_ser)) @api @has_access_api @@ -2103,9 +2081,8 @@ def created_slices(self, user_id): 'url': o.slice_url, 'dttm': o.changed_on, } for o in qry.all()] - return Response( - json.dumps(payload, default=utils.json_int_dttm_ser), - mimetype="application/json") + return json_success( + json.dumps(payload, default=utils.json_int_dttm_ser)) @api @has_access_api @@ -2143,9 +2120,8 @@ def fave_slices(self, user_id): d['creator_url'] = '/superset/profile/{}/'.format( user.username) payload.append(d) - return Response( - json.dumps(payload, default=utils.json_int_dttm_ser), - mimetype="application/json") + return json_success( + json.dumps(payload, default=utils.json_int_dttm_ser)) @api @has_access_api @@ -2189,12 +2165,9 @@ def warm_up_cache(self): obj.get_json(force=True) except Exception as e: return json_error_response(utils.error_msg_from_exception(e)) - return Response( - json.dumps( + return json_success(json.dumps( [{"slice_id": session.id, "slice_name": session.slice_name} - for session in slices]), - status=200, - mimetype="application/json") + for session in slices])) @expose("/favstar////") def favstar(self, class_name, obj_id, action): @@ -2222,9 +2195,7 @@ def favstar(self, class_name, obj_id, action): else: count = len(favs) session.commit() - return Response( - json.dumps({'count': count}), - mimetype="application/json") + return json_success(json.dumps({'count': count})) @has_access @expose("/dashboard//") @@ -2400,9 +2371,7 @@ def table(self, database_id, table_name, schema): primary_key = mydb.get_pk_constraint(table_name, schema) foreign_keys = mydb.get_foreign_keys(table_name, schema) except Exception as e: - return Response( - json.dumps({'error': utils.error_msg_from_exception(e)}), - mimetype="application/json") + return json_error_response(utils.error_msg_from_exception(e)) keys = [] if primary_key and primary_key.get('constrained_columns'): primary_key['column_names'] = primary_key.pop('constrained_columns') @@ -2440,7 +2409,7 @@ def table(self, database_id, table_name, schema): 'foreignKeys': foreign_keys, 'indexes': keys, } - return Response(json.dumps(tbl), mimetype="application/json") + return json_success(json.dumps(tbl)) @has_access @expose("/extra_table_metadata////") @@ -2450,7 +2419,7 @@ def extra_table_metadata(self, database_id, table_name, schema): mydb = db.session.query(models.Database).filter_by(id=database_id).one() payload = mydb.db_engine_spec.extra_table_metadata( mydb, table_name, schema) - return Response(json.dumps(payload), mimetype="application/json") + return json_success(json.dumps(payload)) @has_access @expose("/select_star///") @@ -2497,35 +2466,27 @@ def results(self, key): return json_error_response("Results backend isn't configured") blob = results_backend.get(key) - if blob: - query = ( - db.session.query(models.Query) - .filter_by(results_key=key) - .one() + if not blob: + return json_error_response( + "Data could not be retrieved. " + "You may want to re-run the query.", + status=410 ) - rejected_tables = self.rejected_datasources( - query.sql, query.database, query.schema) - if rejected_tables: - return json_error_response(get_datasource_access_error_msg( - '{}'.format(rejected_tables))) - payload = zlib.decompress(blob) - display_limit = app.config.get('DISPLAY_SQL_MAX_ROW', None) - if display_limit: - payload_json = json.loads(payload) - payload_json['data'] = payload_json['data'][:display_limit] - return Response( - json.dumps(payload_json, default=utils.json_iso_dttm_ser), - status=200, mimetype="application/json") - else: - return Response( - json.dumps({ - 'error': ( - "Data could not be retrieved. You may want to " - "re-run the query." - ) - }), - status=410, - mimetype="application/json") + + query = db.session.query(models.Query).filter_by(results_key=key).one() + rejected_tables = self.rejected_datasources( + query.sql, query.database, query.schema) + if rejected_tables: + return json_error_response(get_datasource_access_error_msg( + '{}'.format(rejected_tables))) + + payload = zlib.decompress(blob) + display_limit = app.config.get('DISPLAY_SQL_MAX_ROW', None) + if display_limit: + payload_json = json.loads(payload) + payload_json['data'] = payload_json['data'][:display_limit] + return json_success( + json.dumps(payload_json, default=utils.json_iso_dttm_ser)) @has_access_api @expose("/sql_json/", methods=['POST', 'GET']) @@ -2582,12 +2543,9 @@ def sql_json(self): sql_lab.get_sql_results.delay( query_id, return_results=False, store_results=not query.select_as_cta) - return Response( - json.dumps({'query': query.to_dict()}, - default=utils.json_int_dttm_ser, - allow_nan=False), - status=202, # Accepted - mimetype="application/json") + return json_success(json.dumps( + {'query': query.to_dict()}, default=utils.json_int_dttm_ser, + allow_nan=False), status=202) # Sync request. try: @@ -2602,14 +2560,8 @@ def sql_json(self): data = sql_lab.get_sql_results(query_id, return_results=True) except Exception as e: logging.exception(e) - return Response( - json.dumps({'error': "{}".format(e)}), - status=500, - mimetype="application/json") - return Response( - data, - status=200, - mimetype="application/json") + return json_error_response("{}".format(e)) + return json_success(data) @has_access @expose("/csv/") @@ -2664,21 +2616,15 @@ def fetch_datasource_metadata(self): # Check permission for datasource if not self.datasource_access(datasource): return json_error_response(DATASOURCE_ACCESS_ERR) - - return Response( - json.dumps(datasource.data), - mimetype="application/json" - ) + return json_success(json.dumps(datasource.data)) @has_access @expose("/queries/") def queries(self, last_updated_ms): """Get the updated queries.""" if not g.user.get_id(): - return Response( - json.dumps({'error': "Please login to access the queries."}), - status=403, - mimetype="application/json") + return json_error_response( + "Please login to access the queries.", status=403) # Unix time, milliseconds. last_updated_ms_int = int(float(last_updated_ms)) if last_updated_ms else 0 @@ -2695,10 +2641,8 @@ def queries(self, last_updated_ms): .all() ) dict_queries = {q.client_id: q.to_dict() for q in sql_queries} - return Response( - json.dumps(dict_queries, default=utils.json_int_dttm_ser), - status=200, - mimetype="application/json") + return json_success( + json.dumps(dict_queries, default=utils.json_int_dttm_ser)) @has_access @expose("/search_queries") From be79e36d4c329797d2dceb125a07a1f2038f108e Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Mon, 23 Jan 2017 11:50:25 -0800 Subject: [PATCH 5/8] Rename var and dropdown text --- superset/__init__.py | 3 +-- .../javascripts/SqlLab/components/SqlEditorLeftBar.jsx | 2 +- superset/cache_util.py | 6 +++--- superset/config.py | 2 +- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/superset/__init__.py b/superset/__init__.py index 5d54e543c21dc..b9cc6b0041e69 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -37,8 +37,7 @@ utils.pessimistic_connection_handling(db.engine.pool) cache = Cache(app, config=app.config.get('CACHE_CONFIG')) - -simple_cache = Cache(app, config=app.config.get('IN_MEMORY_CACHE_CONFIG')) +tables_cache = Cache(app, config=app.config.get('TABLE_NAMES_CACHE_CONFIG')) migrate = Migrate(app, db, directory=APP_DIR + "/migrations") diff --git a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx index 5288005cd0043..c16e116aeb55a 100644 --- a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx +++ b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx @@ -195,7 +195,7 @@ class SqlEditorLeftBar extends React.PureComponent { ref="selectTable" // isLoading={this.state.tableLoading} value={this.state.tableName} - placeholder={`Add a table (${this.state.tableLength})`} + placeholder={`Type to search ...`} autosize={false} onChange={this.changeTable.bind(this)} // options={this.state.tableOptions} diff --git a/superset/cache_util.py b/superset/cache_util.py index 91a1ae20dea2a..18150f1aa763f 100644 --- a/superset/cache_util.py +++ b/superset/cache_util.py @@ -1,4 +1,4 @@ -from superset import simple_cache +from superset import tables_cache from flask import request @@ -17,12 +17,12 @@ def memoized_func(timeout=5 * 60, key=view_cache_key): def wrap(f): def wrapped_f(cls, *args, **kwargs): cache_key = key(*args, **kwargs) - o = simple_cache.get(cache_key) + o = tables_cache.get(cache_key) if o is not None: return o o = f(cls, *args, **kwargs) print('cache_key: {}'.format(cache_key)) - simple_cache.set(cache_key, o, timeout=timeout) + tables_cache.set(cache_key, o, timeout=timeout) return o return wrapped_f return wrap diff --git a/superset/config.py b/superset/config.py index 431e163a284ba..78064bfc1eb8c 100644 --- a/superset/config.py +++ b/superset/config.py @@ -153,7 +153,7 @@ CACHE_DEFAULT_TIMEOUT = None CACHE_CONFIG = {'CACHE_TYPE': 'null'} -IN_MEMORY_CACHE_CONFIG = {'CACHE_TYPE': 'null'} +TABLE_NAMES_CACHE_CONFIG = {'CACHE_TYPE': 'null'} # CORS Options ENABLE_CORS = False From 428a3d082ca976b8a96e7d8f53917bf31ffed548 Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Mon, 23 Jan 2017 12:10:54 -0800 Subject: [PATCH 6/8] Cleanup --- .../SqlLab/components/SqlEditorLeftBar.jsx | 35 ++++++------------- superset/cache_util.py | 2 +- superset/db_engine_specs.py | 13 +++---- superset/source_registry.py | 27 -------------- superset/views.py | 6 ++-- tests/core_tests.py | 8 ----- 6 files changed, 19 insertions(+), 72 deletions(-) diff --git a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx index c16e116aeb55a..4526946de8880 100644 --- a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx +++ b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx @@ -65,28 +65,17 @@ class SqlEditorLeftBar extends React.PureComponent { if (!this.props.queryEditor.dbId || !input) { return Promise.resolve({ options: [] }); } - // issues with redirects return fetch( - `/superset/tables/${this.props.queryEditor.dbId}/${this.props.queryEditor.schema}/${input}`, - { - method: 'GET', - mode: 'no-cors', - credentials: 'include', - headers: { - 'Access-Control-Allow-Origin':' | *', - 'Accept': 'application/json, application/xml, text/plain, text/html, *.*', - 'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8' - }, - }) - .then((response) =>{ return response.json() }) - .then((json) => { - this.setState({ tableLength: json.tableLength }); - return {options: json.options}; - }); + `/superset/tables/${this.props.queryEditor.dbId}/` + `${this.props.queryEditor.schema}/${input}`, + { method: 'GET', credentials: 'include' } + ) + .then(response => response.json()) + .then(json => ({ options: json.options })); } fetchTables(dbId, schema, substr) { if (dbId) { - this.setState({ tableLoading: true, tableOptions: []}); + this.setState({ tableLoading: true, tableOptions: [] }); const url = `/superset/tables/${dbId}/${schema}/${substr}/`; $.get(url, (data) => { this.setState({ @@ -106,11 +95,11 @@ class SqlEditorLeftBar extends React.PureComponent { let tableName = namePieces[0]; let schemaName = this.props.queryEditor.schema; if (namePieces.length === 1) { - this.setState({ tableName: tableName }); + this.setState({ tableName }); } else { schemaName = namePieces[0]; tableName = namePieces[1]; - this.setState({ tableName: tableName }); + this.setState({ tableName }); this.props.actions.queryEditorSetSchema(this.props.queryEditor, schemaName); this.fetchTables(this.props.queryEditor.dbId, schemaName); } @@ -190,15 +179,13 @@ class SqlEditorLeftBar extends React.PureComponent { onChange={this.changeTable.bind(this)} options={this.state.tableOptions} />} - {!this.props.queryEditor.schema && } diff --git a/superset/cache_util.py b/superset/cache_util.py index 18150f1aa763f..e7c5917961d07 100644 --- a/superset/cache_util.py +++ b/superset/cache_util.py @@ -2,7 +2,7 @@ from flask import request -def view_cache_key(*args, **kwargs): +def view_cache_key(*unused_args, **unused_kwargs): args_hash = hash(frozenset(request.args.items())) return 'view/{}/{}'.format(request.path, args_hash) diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 2c7a449446abd..60d43bc8a7820 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -17,12 +17,11 @@ from __future__ import unicode_literals from collections import namedtuple, defaultdict +from flask_babel import lazy_gettext as _ import inspect import textwrap import time -from flask_babel import lazy_gettext as _ - from superset import cache_util Grain = namedtuple('Grain', 'name label function') @@ -61,7 +60,7 @@ def convert_dttm(cls, target_type, dttm): timeout=600, key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1])) def fetch_result_sets(cls, db, datasource_type): - """ Returns the dictionary {schema : [result_set_name]}. + """Returns the dictionary {schema : [result_set_name]}. Datasource_type can be 'table' or 'view'. Empty schema corresponds to the list of full names of the all @@ -250,16 +249,12 @@ def show_partition_pql( {limit_clause} """).format(**locals()) - @classmethod - def epoch_to_dttm(cls): - return "from_unixtime({col})" - @classmethod @cache_util.memoized_func( timeout=600, key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1])) def fetch_result_sets(cls, db, datasource_type): - """ Returns the dictionary {schema : [result_set_name]}. + """Returns the dictionary {schema : [result_set_name]}. Datasource_type can be 'table' or 'view'. Empty schema corresponds to the list of full names of the all @@ -270,7 +265,7 @@ def fetch_result_sets(cls, db, datasource_type): ORDER BY concat(table_schema, '.', table_name)""".format( datasource_type.upper()), None) result_sets = defaultdict(list) - for _, row in result_set_df.iterrows(): + for unused, row in result_set_df.iterrows(): result_sets[row['table_schema']].append(row['table_name']) result_sets[""].append('{}.{}'.format( row['table_schema'], row['table_name'])) diff --git a/superset/source_registry.py b/superset/source_registry.py index 36c4ac0e30b77..df91762564bbd 100644 --- a/superset/source_registry.py +++ b/superset/source_registry.py @@ -43,35 +43,8 @@ def get_datasource_by_name(cls, session, datasource_type, datasource_name, d.name == datasource_name and schema == schema] return db_ds[0] - @classmethod - def query_datasources_by_names( - cls, session, database, datasource_names, schema=None): - """Datasource_names can be a list or a single name.""" - datasource_class = SourceRegistry.sources[database.type] - if not hasattr(datasource_names, '__iter__'): - datasource_names = [datasource_names] - datasource_set = set(datasource_names) - - if database.type == 'table': - query = ( - session.query(datasource_class) - .filter_by(database_id=database.id) - .filter(datasource_class.table_name.in_(datasource_set))) - if schema: - query = query.filter_by(schema=schema) - return query.all() - if database.type == 'druid': - return ( - session.query(datasource_class) - .filter_by(cluster_name=database.id) - .filter(datasource_class.datasource_name.in_(datasource_set)) - .all() - ) - return None - @classmethod def query_datasources_by_permissions(cls, session, database, permissions): - """Datasource_names can be a list or a single name.""" datasource_class = SourceRegistry.sources[database.type] return ( session.query(datasource_class) diff --git a/superset/views.py b/superset/views.py index 1e206acf1b947..7809b4dbc3374 100755 --- a/superset/views.py +++ b/superset/views.py @@ -90,7 +90,7 @@ def datasource_access_by_name( if schema and utils.can_access(sm, 'schema_access', schema_perm, g.user): return True - datasources = SourceRegistry.query_datasources_by_names( + datasources = SourceRegistry.query_datasources_by_name( db.session, database, datasource_name, schema=schema) for datasource in datasources: if self.can_access("datasource_access", datasource.perm): @@ -2166,8 +2166,8 @@ def warm_up_cache(self): except Exception as e: return json_error_response(utils.error_msg_from_exception(e)) return json_success(json.dumps( - [{"slice_id": session.id, "slice_name": session.slice_name} - for session in slices])) + [{"slice_id": session.id, "slice_name": session.slice_name} + for session in slices])) @expose("/favstar////") def favstar(self, class_name, obj_id, action): diff --git a/tests/core_tests.py b/tests/core_tests.py index 61b5e4c1f64be..7df53c567ecdc 100644 --- a/tests/core_tests.py +++ b/tests/core_tests.py @@ -556,14 +556,6 @@ def test_fetch_datasource_metadata(self): for k in keys: self.assertIn(k, resp.keys()) - def test_fetch_all_tables(self): - self.login(username='admin') - database = self.get_main_database(db.session) - url = '/superset/all_tables/{}'.format(database.id) - resp = json.loads(self.get_resp(url)) - self.assertIn('tables', resp) - self.assertIn('views', resp) - def test_user_profile(self): self.login(username='admin') slc = self.get_slice("Girls", db.session) From ef4d1ded8d17ef7b68279f0c94629baf68d2235c Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Thu, 26 Jan 2017 11:49:23 -0800 Subject: [PATCH 7/8] Resolve comments. --- .../SqlLab/components/SqlEditorLeftBar.jsx | 60 +++++++++---------- superset/assets/package.json | 2 - superset/cache_util.py | 1 - superset/views.py | 2 +- 4 files changed, 31 insertions(+), 34 deletions(-) diff --git a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx index 4526946de8880..c36d659ca6744 100644 --- a/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx +++ b/superset/assets/javascripts/SqlLab/components/SqlEditorLeftBar.jsx @@ -1,12 +1,9 @@ -require('es6-promise').polyfill(); -require('isomorphic-fetch'); const $ = window.$ = require('jquery'); import React from 'react'; import Select from 'react-select'; import { Label, Button } from 'react-bootstrap'; import TableElement from './TableElement'; import AsyncSelect from '../../components/AsyncSelect'; -import fetch from 'isomorphic-fetch'; const propTypes = { queryEditor: React.PropTypes.object.isRequired, @@ -34,7 +31,7 @@ class SqlEditorLeftBar extends React.PureComponent { } componentWillMount() { this.fetchSchemas(this.props.queryEditor.dbId); - this.fetchTables(this.props.queryEditor.dbId); + this.fetchTables(this.props.queryEditor.dbId, this.props.queryEditor.schema); } onChange(db) { const val = (db) ? db.value : null; @@ -65,14 +62,11 @@ class SqlEditorLeftBar extends React.PureComponent { if (!this.props.queryEditor.dbId || !input) { return Promise.resolve({ options: [] }); } - return fetch( - `/superset/tables/${this.props.queryEditor.dbId}/` - `${this.props.queryEditor.schema}/${input}`, - { method: 'GET', credentials: 'include' } - ) - .then(response => response.json()) - .then(json => ({ options: json.options })); + const url = `/superset/tables/${this.props.queryEditor.dbId}/\ +${this.props.queryEditor.schema}/${input}`; + return $.get(url).then((data) => ({ options: data.options })); } + // TODO: move fetching methods to the actions. fetchTables(dbId, schema, substr) { if (dbId) { this.setState({ tableLoading: true, tableOptions: [] }); @@ -104,6 +98,8 @@ class SqlEditorLeftBar extends React.PureComponent { this.fetchTables(this.props.queryEditor.dbId, schemaName); } this.setState({ tableLoading: true }); + // TODO: handle setting the tableLoading state depending on success or + // failure of the addTable async call in the action. this.props.actions.addTable(this.props.queryEditor, tableName, schemaName); this.setState({ tableLoading: false }); } @@ -169,25 +165,29 @@ class SqlEditorLeftBar extends React.PureComponent { />
- {this.props.queryEditor.schema && + } + {!this.props.queryEditor.schema && + + }

diff --git a/superset/assets/package.json b/superset/assets/package.json index bcf88908d8975..c3484559b8759 100644 --- a/superset/assets/package.json +++ b/superset/assets/package.json @@ -52,12 +52,10 @@ "datamaps": "^0.5.8", "datatables-bootstrap3-plugin": "^0.5.0", "datatables.net-bs": "^1.10.12", - "es6-promise": "^4.0.5", "font-awesome": "^4.6.3", "gridster": "^0.5.6", "immutability-helper": "^2.0.0", "immutable": "^3.8.1", - "isomorphic-fetch": "^2.2.1", "jquery": "^2.2.1", "jquery-ui": "1.10.5", "lodash.throttle": "^4.1.1", diff --git a/superset/cache_util.py b/superset/cache_util.py index e7c5917961d07..ef8835c55cce5 100644 --- a/superset/cache_util.py +++ b/superset/cache_util.py @@ -21,7 +21,6 @@ def wrapped_f(cls, *args, **kwargs): if o is not None: return o o = f(cls, *args, **kwargs) - print('cache_key: {}'.format(cache_key)) tables_cache.set(cache_key, o, timeout=timeout) return o return wrapped_f diff --git a/superset/views.py b/superset/views.py index 7809b4dbc3374..5eceb679c4a4c 100755 --- a/superset/views.py +++ b/superset/views.py @@ -114,6 +114,7 @@ def rejected_datasources(self, sql, database, schema): return [ t for t in superset_query.tables if not self.datasource_access_by_fullname(database, t, schema)] + def accessible_by_user(self, database, datasource_names, schema=None): if self.database_access(database) or self.all_datasource_access(): return datasource_names @@ -1791,7 +1792,6 @@ def activity_per_day(self): @has_access_api @expose("/schemas/") def schemas(self, db_id): - # db_id = request.args.get('db_id') database = ( db.session .query(models.Database) From d7345ee819dbeb3db8714887c029253f03cf4da0 Mon Sep 17 00:00:00 2001 From: Bogdan Kyryliuk Date: Mon, 13 Feb 2017 14:58:20 -0800 Subject: [PATCH 8/8] Add user to the perm check. --- superset/views.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/superset/views.py b/superset/views.py index 5eceb679c4a4c..ce98b967198b6 100755 --- a/superset/views.py +++ b/superset/views.py @@ -87,7 +87,8 @@ def datasource_access_by_name( return True schema_perm = utils.get_schema_perm(database, schema) - if schema and utils.can_access(sm, 'schema_access', schema_perm, g.user): + if schema and utils.can_access( + sm, 'schema_access', schema_perm, g.user): return True datasources = SourceRegistry.query_datasources_by_name( @@ -120,7 +121,8 @@ def accessible_by_user(self, database, datasource_names, schema=None): return datasource_names schema_perm = utils.get_schema_perm(database, schema) - if schema and utils.can_access(sm, 'schema_access', schema_perm): + if schema and utils.can_access( + sm, 'schema_access', schema_perm, g.user): return datasource_names role_ids = set([role.id for role in g.user.roles])