Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[utils.py] gathering/refactoring into a "utils/" folder #6095

Merged
merged 4 commits into from
Oct 17, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 7 additions & 5 deletions superset/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,11 @@
from flask_wtf.csrf import CSRFProtect
from werkzeug.contrib.fixers import ProxyFix

from superset import config, utils
from superset import config
from superset.connectors.connector_registry import ConnectorRegistry
from superset.security import SupersetSecurityManager
from superset.utils.core import (
get_update_perms_flag, pessimistic_connection_handling, setup_cache)

APP_DIR = os.path.dirname(__file__)
CONFIG_MODULE = os.environ.get('SUPERSET_CONFIG', 'superset.config')
Expand Down Expand Up @@ -112,10 +114,10 @@ def get_manifest():
for ex in csrf_exempt_list:
csrf.exempt(ex)

utils.pessimistic_connection_handling(db.engine)
pessimistic_connection_handling(db.engine)

cache = utils.setup_cache(app, conf.get('CACHE_CONFIG'))
tables_cache = utils.setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))
cache = setup_cache(app, conf.get('CACHE_CONFIG'))
tables_cache = setup_cache(app, conf.get('TABLE_NAMES_CACHE_CONFIG'))

migrate = Migrate(app, db, directory=APP_DIR + '/migrations')

Expand Down Expand Up @@ -183,7 +185,7 @@ def index(self):
base_template='superset/base.html',
indexview=MyIndexView,
security_manager_class=custom_sm,
update_perms=utils.get_update_perms_flag(),
update_perms=get_update_perms_flag(),
)

security_manager = appbuilder.sm
Expand Down
16 changes: 9 additions & 7 deletions superset/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@
import yaml

from superset import (
app, dashboard_import_export_util, data, db,
dict_import_export_util, security_manager, utils,
app, data, db, security_manager,
)
from superset.utils import (
core as utils, dashboard_import_export, dict_import_export)

config = app.config
celery_app = utils.get_celery_app(config)
Expand Down Expand Up @@ -241,7 +242,7 @@ def import_dashboards(path, recursive=False):
logging.info('Importing dashboard from file %s', f)
try:
with f.open() as data_stream:
dashboard_import_export_util.import_dashboards(
dashboard_import_export.import_dashboards(
db.session, data_stream)
except Exception as e:
logging.error('Error when importing dashboard from file %s', f)
Expand All @@ -257,7 +258,7 @@ def import_dashboards(path, recursive=False):
help='Print JSON to stdout')
def export_dashboards(print_stdout, dashboard_file):
"""Export dashboards to JSON"""
data = dashboard_import_export_util.export_dashboards(db.session)
data = dashboard_import_export.export_dashboards(db.session)
if print_stdout or not dashboard_file:
print(data)
if dashboard_file:
Expand Down Expand Up @@ -296,7 +297,7 @@ def import_datasources(path, sync, recursive=False):
logging.info('Importing datasources from file %s', f)
try:
with f.open() as data_stream:
dict_import_export_util.import_from_dict(
dict_import_export.import_from_dict(
db.session,
yaml.safe_load(data_stream),
sync=sync_array)
Expand All @@ -321,7 +322,7 @@ def import_datasources(path, sync, recursive=False):
def export_datasources(print_stdout, datasource_file,
back_references, include_defaults):
"""Export datasources to YAML"""
data = dict_import_export_util.export_to_dict(
data = dict_import_export.export_to_dict(
session=db.session,
recursive=True,
back_references=back_references,
Expand All @@ -340,7 +341,7 @@ def export_datasources(print_stdout, datasource_file,
help='Include parent back references')
def export_datasource_schema(back_references):
"""Export datasource YAML schema to stdout"""
data = dict_import_export_util.export_schema_to_dict(
data = dict_import_export.export_schema_to_dict(
back_references=back_references)
yaml.safe_dump(data, stdout, default_flow_style=False)

Expand Down Expand Up @@ -416,6 +417,7 @@ def load_test_users():

Syncs permissions for those users/roles
"""
print(Fore.GREEN + 'Loading a set of users for unit tests')
load_test_users_run()


Expand Down
2 changes: 1 addition & 1 deletion superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import foreign, relationship

from superset import utils
from superset.models.core import Slice
from superset.models.helpers import AuditMixinNullable, ImportMixin
from superset.utils import core as utils


class BaseDatasource(AuditMixinNullable, ImportMixin):
Expand Down
11 changes: 6 additions & 5 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,14 @@
)
from sqlalchemy.orm import backref, relationship

from superset import conf, db, import_util, security_manager, utils
from superset import conf, db, security_manager
from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
from superset.exceptions import MetricPermException, SupersetException
from superset.models.helpers import (
AuditMixinNullable, ImportMixin, QueryResult,
)
from superset.utils import (
from superset.utils import core as utils, import_datasource
from superset.utils.core import (
DimSelector, DTTM_ALIAS, flasher,
)

Expand Down Expand Up @@ -392,7 +393,7 @@ def lookup_obj(lookup_column):
DruidColumn.datasource_id == lookup_column.datasource_id,
DruidColumn.column_name == lookup_column.column_name).first()

return import_util.import_simple_obj(db.session, i_column, lookup_obj)
return import_datasource.import_simple_obj(db.session, i_column, lookup_obj)


class DruidMetric(Model, BaseMetric):
Expand Down Expand Up @@ -444,7 +445,7 @@ def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_id == lookup_metric.datasource_id,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
return import_datasource.import_simple_obj(db.session, i_metric, lookup_obj)


class DruidDatasource(Model, BaseDatasource):
Expand Down Expand Up @@ -580,7 +581,7 @@ def lookup_datasource(d):
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
return import_datasource.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)

Expand Down
3 changes: 2 additions & 1 deletion superset/connectors/druid/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@
from flask_babel import gettext as __
from flask_babel import lazy_gettext as _

from superset import appbuilder, db, security_manager, utils
from superset import appbuilder, db, security_manager
from superset.connectors.base.views import DatasourceModelView
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import core as utils
from superset.views.base import (
BaseSupersetView, DatasourceFilter, DeleteMixin,
get_datasource_exist_error_msg, ListWidgetWithCheckboxes, SupersetModelView,
Expand Down
22 changes: 11 additions & 11 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@
from sqlalchemy.sql.expression import TextAsFrom
import sqlparse

from superset import app, db, import_util, security_manager, utils
from superset import app, db, security_manager
from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric
from superset.jinja_context import get_template_processor
from superset.models.annotations import Annotation
from superset.models.core import Database
from superset.models.helpers import QueryResult
from superset.utils import DTTM_ALIAS, QueryStatus
from superset.utils import core as utils, import_datasource

config = app.config

Expand All @@ -44,11 +44,11 @@ def query(self, query_obj):
qry = qry.filter(Annotation.start_dttm >= query_obj['from_dttm'])
if query_obj['to_dttm']:
qry = qry.filter(Annotation.end_dttm <= query_obj['to_dttm'])
status = QueryStatus.SUCCESS
status = utils.QueryStatus.SUCCESS
try:
df = pd.read_sql_query(qry.statement, db.engine)
except Exception as e:
status = QueryStatus.FAILED
status = utils.QueryStatus.FAILED
logging.exception(e)
error_message = (
utils.error_msg_from_exception(e))
Expand Down Expand Up @@ -120,7 +120,7 @@ def get_timestamp_expression(self, time_grain):
pdf = self.python_date_format
is_epoch = pdf in ('epoch_s', 'epoch_ms')
if not self.expression and not time_grain and not is_epoch:
return column(self.column_name, type_=DateTime).label(DTTM_ALIAS)
return column(self.column_name, type_=DateTime).label(utils.DTTM_ALIAS)

expr = self.expression or self.column_name
if is_epoch:
Expand All @@ -134,15 +134,15 @@ def get_timestamp_expression(self, time_grain):
grain = self.table.database.grains_dict().get(time_grain)
if grain:
expr = grain.function.format(col=expr)
return literal_column(expr, type_=DateTime).label(DTTM_ALIAS)
return literal_column(expr, type_=DateTime).label(utils.DTTM_ALIAS)

@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(TableColumn).filter(
TableColumn.table_id == lookup_column.table_id,
TableColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
return import_datasource.import_simple_obj(db.session, i_column, lookup_obj)

def dttm_sql_literal(self, dttm):
"""Convert datetime object to a SQL expression string
Expand Down Expand Up @@ -243,7 +243,7 @@ def lookup_obj(lookup_metric):
return db.session.query(SqlMetric).filter(
SqlMetric.table_id == lookup_metric.table_id,
SqlMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
return import_datasource.import_simple_obj(db.session, i_metric, lookup_obj)


class SqlaTable(Model, BaseDatasource):
Expand Down Expand Up @@ -776,13 +776,13 @@ def _get_top_groups(self, df, dimensions):
def query(self, query_obj):
qry_start_dttm = datetime.now()
sql = self.get_query_str(query_obj)
status = QueryStatus.SUCCESS
status = utils.QueryStatus.SUCCESS
error_message = None
df = None
try:
df = self.database.get_df(sql, self.schema)
except Exception as e:
status = QueryStatus.FAILED
status = utils.QueryStatus.FAILED
logging.exception(e)
error_message = (
self.database.db_engine_spec.extract_error_message(e))
Expand Down Expand Up @@ -881,7 +881,7 @@ def lookup_sqlatable(table):
def lookup_database(table):
return db.session.query(Database).filter_by(
database_name=table.params_dict['database_name']).one()
return import_util.import_datasource(
return import_datasource.import_datasource(
db.session, i_datasource, lookup_database, lookup_sqlatable,
import_time)

Expand Down
3 changes: 2 additions & 1 deletion superset/connectors/sqla/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
from flask_babel import lazy_gettext as _
from past.builtins import basestring

from superset import appbuilder, db, security_manager, utils
from superset import appbuilder, db, security_manager
from superset.connectors.base.views import DatasourceModelView
from superset.utils import core as utils
from superset.views.base import (
DatasourceFilter, DeleteMixin, get_datasource_exist_error_msg,
ListWidgetWithCheckboxes, SupersetModelView, YamlExportMixin,
Expand Down
Loading