Skip to content

Commit

Permalink
Some code refactoring (#2139)
Browse files Browse the repository at this point in the history
  • Loading branch information
asdf2014 authored and mistercrunch committed Feb 8, 2017
1 parent f2bf316 commit 1f58e18
Show file tree
Hide file tree
Showing 14 changed files with 35 additions and 36 deletions.
2 changes: 1 addition & 1 deletion superset/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1110,7 +1110,7 @@ def load_multiformat_time_series_data():


def load_misc_dashboard():
"""Loading a dasbhoard featuring misc charts"""
"""Loading a dashboard featuring misc charts"""

print("Creating the dashboard")
db.session.expunge_all()
Expand Down
4 changes: 2 additions & 2 deletions superset/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ def datetime_conversion_rate(data_series):
success = 0
total = 0
for value in data_series:
total = total + 1
total += 1
try:
pd.to_datetime(value)
success = success + 1
success += 1
except Exception:
continue
return 100 * success / total
Expand Down
4 changes: 2 additions & 2 deletions superset/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
config = app.config

TIMESTAMP_CHOICES = [
('smart_date', 'Adaptative formating'),
('smart_date', 'Adaptive formatting'),
("%m/%d/%Y", '"%m/%d/%Y" | 01/14/2019'),
("%Y-%m-%d", '"%Y-%m-%d" | 2019-01-14'),
("%Y-%m-%d %H:%M:%S",
Expand Down Expand Up @@ -354,7 +354,7 @@ def __init__(self, viz):
"choices": self.choicify(['auto', 50, 75, 100, 125, 150, 200]),
"default": 'auto',
"description": _(
"Bottom marging, in pixels, allowing for more room for "
"Bottom margin, in pixels, allowing for more room for "
"axis labels"),
}),
'page_length': (FreeFormSelectField, {
Expand Down
4 changes: 2 additions & 2 deletions superset/jinja_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def _partition_query(table_name, limit=0, order_by=None, filters=None):
order
:type order_by: list of (str, bool) tuples
:param filters: a list of filters to apply
:param filters: dict of field anme and filter value combinations
:param filters: dict of field name and filter value combinations
"""
limit_clause = "LIMIT {}".format(limit) if limit else ''
order_by_clause = ''
Expand Down Expand Up @@ -173,7 +173,7 @@ def latest_sub_partition(self, table_name, **kwargs):
part_fields = indexes[0]['column_names']
for k in kwargs.keys():
if k not in k in part_fields:
msg = "Field [{k}] is not part of the partionning key"
msg = "Field [{k}] is not part of the portioning key"
raise SupersetTemplateException(msg)
if len(kwargs.keys()) != len(part_fields) - 1:
msg = (
Expand Down
2 changes: 1 addition & 1 deletion superset/migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def process_revision_directives(context, revision, directives):

context.configure(connection=connection,
target_metadata=target_metadata,
#compare_type=True,
# compare_type=True,
process_revision_directives=process_revision_directives,
**kwargs)

Expand Down
13 changes: 6 additions & 7 deletions superset/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import numpy
import pickle
import re
import six
import textwrap
from copy import deepcopy, copy
from datetime import timedelta, datetime, date
Expand Down Expand Up @@ -233,10 +232,10 @@ class CssTemplate(Model, AuditMixinNullable):


slice_user = Table('slice_user', Model.metadata,
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('ab_user.id')),
Column('slice_id', Integer, ForeignKey('slices.id'))
)
Column('id', Integer, primary_key=True),
Column('user_id', Integer, ForeignKey('ab_user.id')),
Column('slice_id', Integer, ForeignKey('slices.id'))
)


class Slice(Model, AuditMixinNullable, ImportMixin):
Expand Down Expand Up @@ -1987,7 +1986,7 @@ def get_metric_obj(self, metric_name):
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overrided if exists.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
Expand Down Expand Up @@ -2258,7 +2257,7 @@ def values_for_column(self,
to_dttm,
limit=500):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMeticSpec onces supported by PyDruid
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
from_dttm = from_dttm.replace(tzinfo=config.get("DRUID_TZ"))
to_dttm = to_dttm.replace(tzinfo=config.get("DRUID_TZ"))

Expand Down
14 changes: 7 additions & 7 deletions superset/security.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,22 @@
from superset import conf, db, models, sm, source_registry


READ_ONLY_MODELVIEWS = {
READ_ONLY_MODEL_VIEWS = {
'DatabaseAsync',
'DatabaseView',
'DruidClusterModelView',
}

GAMMA_READ_ONLY_MODELVIEWS = {
GAMMA_READ_ONLY_MODEL_VIEWS = {
'SqlMetricInlineView',
'TableColumnInlineView',
'TableModelView',
'DruidColumnInlineView',
'DruidDatasourceModelView',
'DruidMetricInlineView',
} | READ_ONLY_MODELVIEWS
} | READ_ONLY_MODEL_VIEWS

ADMIN_ONLY_VIEW_MENUES = {
ADMIN_ONLY_VIEW_MENUS = {
'AccessRequestsModelView',
'Manage',
'SQL Lab',
Expand Down Expand Up @@ -103,15 +103,15 @@ def get_or_create_main_db():

def is_admin_only(pvm):
# not readonly operations on read only model views allowed only for admins
if (pvm.view_menu.name in READ_ONLY_MODELVIEWS and
if (pvm.view_menu.name in READ_ONLY_MODEL_VIEWS and
pvm.permission.name not in READ_ONLY_PERMISSION):
return True
return (pvm.view_menu.name in ADMIN_ONLY_VIEW_MENUES or
return (pvm.view_menu.name in ADMIN_ONLY_VIEW_MENUS or
pvm.permission.name in ADMIN_ONLY_PERMISSIONS)


def is_alpha_only(pvm):
if (pvm.view_menu.name in GAMMA_READ_ONLY_MODELVIEWS and
if (pvm.view_menu.name in GAMMA_READ_ONLY_MODEL_VIEWS and
pvm.permission.name not in READ_ONLY_PERMISSION):
return True
return pvm.permission.name in ALPHA_ONLY_PERMISSIONS
Expand Down
4 changes: 2 additions & 2 deletions superset/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,8 @@ def markdown(s, markup_wrap=False):
return s


def readfile(filepath):
with open(filepath) as f:
def readfile(file_path):
with open(file_path) as f:
content = f.read()
return content

Expand Down
4 changes: 2 additions & 2 deletions superset/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1348,7 +1348,7 @@ def clean_fulfilled_requests(session):
user = sm.get_user_by_id(r.created_by_fk)
if not datasource or \
self.datasource_access(datasource, user):
# datasource doesnot exist anymore
# datasource does not exist anymore
session.delete(r)
session.commit()
datasource_type = request.args.get('datasource_type')
Expand Down Expand Up @@ -2493,7 +2493,7 @@ def results(self, key):
return Response(
json.dumps({
'error': (
"Data could not be retrived. You may want to "
"Data could not be retrieved. You may want to "
"re-run the query."
)
}),
Expand Down
1 change: 1 addition & 0 deletions tests/access_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
DB_ACCESS_ROLE = 'db_access_role'
SCHEMA_ACCESS_ROLE = 'schema_access_role'


def create_access_request(session, ds_type, ds_name, role_name, user_name):
ds_class = SourceRegistry.sources[ds_type]
# TODO: generalize datasource names
Expand Down
3 changes: 1 addition & 2 deletions tests/base_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(self, *args, **kwargs):
if (
self.requires_examples and
not os.environ.get('SOLO_TEST') and
not os.environ.get('examples_loaded')
not os.environ.get('examples_loaded')
):
logging.info("Loading examples")
cli.load_examples(load_test_data=True)
Expand Down Expand Up @@ -275,4 +275,3 @@ def assert_can_all(view_menu):
self.assertIn(('can_fave_slices', 'Superset'), gamma_perm_set)
self.assertIn(('can_save_dash', 'Superset'), gamma_perm_set)
self.assertIn(('can_slice', 'Superset'), gamma_perm_set)

1 change: 0 additions & 1 deletion tests/email_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ def test_send_bcc_smtp(self, mock_send_mime):
mimeapp = MIMEApplication('attachment')
assert msg.get_payload()[-1].get_payload() == mimeapp.get_payload()


@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime(self, mock_smtp, mock_smtp_ssl):
Expand Down
5 changes: 3 additions & 2 deletions tests/superset_test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
if 'SUPERSET__SQLALCHEMY_DATABASE_URI' in os.environ:
SQLALCHEMY_DATABASE_URI = os.environ.get('SUPERSET__SQLALCHEMY_DATABASE_URI')

SQL_CELERY_DB_FILE_PATH = os.path.join(DATA_DIR, 'celerydb.sqlite')
SQL_CELERY_RESULTS_DB_FILE_PATH = os.path.join(DATA_DIR, 'celery_results.sqlite')
SQL_CELERY_DB_FILE_PATH = os.path.join(DATA_DIR, 'celerydb.sqlite')
SQL_CELERY_RESULTS_DB_FILE_PATH = os.path.join(DATA_DIR, 'celery_results.sqlite')
SQL_SELECT_AS_CTA = True
SQL_MAX_ROW = 666

Expand All @@ -23,6 +23,7 @@
AUTH_ROLE_PUBLIC = 'Public'
EMAIL_NOTIFICATIONS = False


class CeleryConfig(object):
BROKER_URL = 'sqla+sqlite:///' + SQL_CELERY_DB_FILE_PATH
CELERY_IMPORTS = ('superset.sql_lab', )
Expand Down
10 changes: 5 additions & 5 deletions tests/utils_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,11 @@ def test_json_iso_dttm_ser(self):
json_iso_dttm_ser("this is not a date")

def test_base_json_conv(self):
assert isinstance(base_json_conv(numpy.bool_(1)), bool) == True
assert isinstance(base_json_conv(numpy.int64(1)), int) == True
assert isinstance(base_json_conv(set([1])), list) == True
assert isinstance(base_json_conv(Decimal('1.0')), float) == True
assert isinstance(base_json_conv(uuid.uuid4()), str) == True
assert isinstance(base_json_conv(numpy.bool_(1)), bool) is True
assert isinstance(base_json_conv(numpy.int64(1)), int) is True
assert isinstance(base_json_conv(set([1])), list) is True
assert isinstance(base_json_conv(Decimal('1.0')), float) is True
assert isinstance(base_json_conv(uuid.uuid4()), str) is True

@patch('superset.utils.datetime')
def test_parse_human_timedelta(self, mock_now):
Expand Down

0 comments on commit 1f58e18

Please sign in to comment.