Skip to content

Commit

Permalink
Fix pylint issues with recent pylint
Browse files Browse the repository at this point in the history
Summary:
More recent pylint checks more stuff, and specifically it doesn't seem to understand `six.with_metaclass(ABCMeta)` so we need to disable those checks for now.

I don't move pylint to 2.5.2 until D3109, because of pylint-dev/pylint#3648 - the long list of args to pylint seems to trigger a newly-introduced pylint bug in 2.5.x; the refactor in D3109 means we no longer trigger that bug.

Test Plan: buildkite

Reviewers: alangenfeld, sandyryza, max

Reviewed By: sandyryza

Subscribers: schrockn

Differential Revision: https://dagster.phacility.com/D3114
  • Loading branch information
Nate Kupp committed May 27, 2020
1 parent 53bf468 commit fe744b8
Show file tree
Hide file tree
Showing 18 changed files with 46 additions and 54 deletions.
3 changes: 2 additions & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@
# C - convention related checks
# W0511 disable TODO warning
# W1201, W1202 disable log format warning. False positives (I think)
# W0231 disable super-init-not-called - pylint doesn't understand six.with_metaclass(ABCMeta)

disable=C,R,duplicate-code,W0511,W1201,W1202,no-init
disable=C,R,duplicate-code,W0231,W0511,W1201,W1202,no-init

# See: https://github.com/getsentry/responses/issues/74
[TYPECHECK]
Expand Down
3 changes: 1 addition & 2 deletions bin/dagster_module_publisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def check_for_cruft(self, autoclean):

found_pyc_files = []

for root, dir_, files in os.walk(git_repo_root()):
for root, _, files in os.walk(git_repo_root()):
for file_ in files:
if file_.endswith('.pyc'):
found_pyc_files.append(os.path.join(root, file_))
Expand Down Expand Up @@ -136,7 +136,6 @@ def check_directory_structure(self):
expected_modules_not_found = []

module_directories = get_core_module_directories()
library_directories = get_library_module_directories()

for module_dir in module_directories:
if module_dir.name not in expected_python_modules_subdirectories:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ def update_normalized_cereals(self, records):
Base.metadata.bind = self._engine
Base.metadata.drop_all(self._engine)
Base.metadata.create_all(self._engine)
NormalizedCereal.__table__.insert().execute(records)
# fmt: off
NormalizedCereal.__table__.insert().execute(records) # pylint: disable=no-member
# fmt: on


@resource(config={'conn_str': Field(String)})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,9 @@ def update_normalized_cereals(self, records):
Base.metadata.bind = self._engine
Base.metadata.drop_all(self._engine)
Base.metadata.create_all(self._engine)
NormalizedCereal.__table__.insert().execute(records)
# fmt: off
NormalizedCereal.__table__.insert().execute(records) # pylint: disable=no-member
# fmt: on


@resource(config={'conn_str': Field(String)})
Expand Down
4 changes: 3 additions & 1 deletion examples/dagster_examples/intro_tutorial/modes.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,9 @@ def update_normalized_cereals(self, records):
Base.metadata.bind = self._engine
Base.metadata.drop_all(self._engine)
Base.metadata.create_all(self._engine)
NormalizedCereal.__table__.insert().execute(records)
# fmt: off
NormalizedCereal.__table__.insert().execute(records) # pylint: disable=no-member
# fmt: on


@resource(config={'conn_str': Field(String)})
Expand Down
4 changes: 3 additions & 1 deletion examples/dagster_examples/intro_tutorial/presets.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,9 @@ def update_normalized_cereals(self, records):
Base.metadata.bind = self._engine
Base.metadata.drop_all(self._engine)
Base.metadata.create_all(self._engine)
NormalizedCereal.__table__.insert().execute(records)
# fmt: off
NormalizedCereal.__table__.insert().execute(records) # pylint: disable=no-member
# fmt: on


@resource(config={'conn_str': Field(String)})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,9 @@ def types():
def create_registering_metaclass(registry):
class RegisteringMetaclass(SubclassWithMeta_Meta):
def __init__(cls, name, bases, namespaces):
super(RegisteringMetaclass, cls).__init__(name, bases, namespaces)
super(RegisteringMetaclass, cls).__init__( # pylint: disable=no-value-for-parameter
name, bases, namespaces
)
if any(base for base in bases if getattr(base, '__dauphinCoreType', False)):
registry.addType(cls)

Expand Down
10 changes: 5 additions & 5 deletions python_modules/dagster-graphql/dagster_graphql/schema/runs.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,7 @@ class Meta(object):
op = dauphin.NonNull('ObjectStoreOperationType')

def resolve_metadataEntries(self, _graphene_info):
return _to_dauphin_metadata_entries(self.metadata_entries)
return _to_dauphin_metadata_entries(self.metadata_entries) # pylint: disable=no-member


class DauphinMaterialization(dauphin.ObjectType):
Expand All @@ -565,7 +565,7 @@ class Meta(object):
interfaces = (DauphinDisplayableEvent,)

def resolve_metadataEntries(self, _graphene_info):
return _to_dauphin_metadata_entries(self.metadata_entries)
return _to_dauphin_metadata_entries(self.metadata_entries) # pylint: disable=no-member


class DauphinExpectationResult(dauphin.ObjectType):
Expand All @@ -576,7 +576,7 @@ class Meta(object):
success = dauphin.NonNull(dauphin.Boolean)

def resolve_metadataEntries(self, _graphene_info):
return _to_dauphin_metadata_entries(self.metadata_entries)
return _to_dauphin_metadata_entries(self.metadata_entries) # pylint: disable=no-member


class DauphinTypeCheck(dauphin.ObjectType):
Expand All @@ -587,7 +587,7 @@ class Meta(object):
success = dauphin.NonNull(dauphin.Boolean)

def resolve_metadataEntries(self, _graphene_info):
return _to_dauphin_metadata_entries(self.metadata_entries)
return _to_dauphin_metadata_entries(self.metadata_entries) # pylint: disable=no-member


class DauphinFailureMetadata(dauphin.ObjectType):
Expand All @@ -596,7 +596,7 @@ class Meta(object):
interfaces = (DauphinDisplayableEvent,)

def resolve_metadataEntries(self, _graphene_info):
return _to_dauphin_metadata_entries(self.metadata_entries)
return _to_dauphin_metadata_entries(self.metadata_entries) # pylint: disable=no-member


class DauphinExecutionStepInputEvent(dauphin.ObjectType):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# NOTE: pylint isn't smart enough to figure out what's going on with dauphin
# pylint: disable=unexpected-keyword-arg, no-value-for-parameter

from dagster_graphql import dauphin

from dagster import check
Expand Down Expand Up @@ -257,7 +260,9 @@ def resolve_type(self, _graphene_info):
)

def resolve_solid_definition(self, _):
return build_dauphin_solid_definition(self._represented_pipeline, self._solid_def_snap.name)
return build_dauphin_solid_definition(
self._represented_pipeline, self._solid_def_snap.name # pylint: disable=no-member
)


class DauphinOutputDefinition(dauphin.ObjectType):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ def start(context):


@solid(required_resource_keys={'b'})
def will_fail(context, num):
def will_fail(context, num): # pylint: disable=unused-argument
assert context.resources.b == 'B'
raise Exception('fail')

Expand All @@ -690,7 +690,7 @@ def retry_resource_pipeline():
OutputDefinition(str, 'start_skip', is_required=False),
],
)
def can_fail(context, inp):
def can_fail(context, inp): # pylint: disable=unused-argument
if context.solid_config['fail']:
raise Exception('blah')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,6 @@ def execute(pipeline_context, execution_plan):
check.inst_param(pipeline_context, 'pipeline_context', SystemPipelineExecutionContext)
check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)

intermediates_manager = pipeline_context.intermediates_manager

limit = pipeline_context.executor_config.max_concurrent

yield DagsterEvent.engine_event(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,6 @@ def _dagster_event_sequence_for_step(step_context, retries):
step_failure_data=StepFailureData(error=fail_err, user_failure_data=None),
)
else:
attempt_num = prev_attempts + 1
yield DagsterEvent.step_retry_event(
step_context,
StepRetryData(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def conn_string_for_run_id(self, run_id):
check.str_param(run_id, 'run_id')
return create_db_conn_string(self._base_dir, run_id)

def _initdb(self, engine, run_id):
def _initdb(self, engine):

alembic_config = get_alembic_config(__file__)

Expand Down Expand Up @@ -134,7 +134,7 @@ def connect(self, run_id=None):
engine = create_engine(conn_string, poolclass=NullPool)

if not os.path.exists(self.path_for_run_id(run_id)):
self._initdb(engine, run_id)
self._initdb(engine)

conn = engine.connect()
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ def resource_solid(_):
mode_defs=[ModeDefinition(resource_defs={'a': resource_a, 'b': resource_b})],
)

with pytest.raises(Exception) as exc_info:
with pytest.raises(Exception):
execute_pipeline(pipeline)

assert called == ['A', 'B']
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,6 @@ def _core_celery_execution_loop(pipeline_context, execution_plan, step_execution

celery_config = pipeline_context.executor_config

storage = pipeline_context.environment_dict.get('storage')

# https://github.com/dagster-io/dagster/issues/2440
check.invariant(
pipeline_context.system_storage_def.is_persistent,
Expand Down Expand Up @@ -91,7 +89,7 @@ def _core_celery_execution_loop(pipeline_context, execution_plan, step_execution
if result.ready():
try:
step_events = result.get()
except Exception as e: # pylint: disable=broad-except
except Exception: # pylint: disable=broad-except
# We will want to do more to handle the exception here.. maybe subclass Task
# Certainly yield an engine or pipeline event
step_events = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class SystemCronScheduler(Scheduler, ConfigurableClass):
Enable this scheduler by adding it to your ``dagster.yaml`` in ``$DAGSTER_HOME``.
'''

def __init__( # pylint: disable=super-init-not-called
def __init__(
self, inst_data=None,
):
self._inst_data = inst_data
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import google.api_core.exceptions
import six
from google.cloud import bigquery
from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES
from google.cloud.bigquery.retry import DEFAULT_RETRY

from dagster import check, resource

Expand All @@ -15,27 +13,6 @@ def __init__(self, project=None):
check.opt_str_param(project, 'project')
super(BigQueryClient, self).__init__(project=project)

def create_dataset(self, dataset, exists_ok=False, retry=DEFAULT_RETRY):
try:
super(BigQueryClient, self).create_dataset(dataset, exists_ok, retry)
except google.api_core.exceptions.Conflict:
six.raise_from(
BigQueryError('Dataset "%s" already exists and exists_ok is false' % dataset), None
)

def delete_dataset(
self, dataset, delete_contents=False, retry=DEFAULT_RETRY, not_found_ok=False
):
try:
super(BigQueryClient, self).delete_dataset(
dataset, delete_contents=delete_contents, retry=retry, not_found_ok=not_found_ok
)
except google.api_core.exceptions.NotFound:
six.raise_from(
BigQueryError('Dataset "%s" does not exist and not_found_ok is false' % dataset),
None,
)

def load_table_from_dataframe(
self,
dataframe,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
import sys
import uuid

import google.api_core.exceptions
import pandas as pd
import pytest
from dagster_gcp import (
BigQueryError,
bigquery_resource,
bq_create_dataset,
bq_delete_dataset,
Expand Down Expand Up @@ -151,6 +151,7 @@ def test_config_pipeline():


def test_create_delete_dataset():
client = bigquery.Client()
dataset = get_dataset()

@pipeline(mode_defs=bq_modes())
Expand All @@ -162,9 +163,11 @@ def create_pipeline():
assert execute_pipeline(create_pipeline, config).result_for_solid('create_solid').success

config = {'solids': {'create_solid': {'config': {'dataset': dataset, 'exists_ok': False}}}}
with pytest.raises(BigQueryError) as exc_info:
with pytest.raises(
google.api_core.exceptions.Conflict,
match='Already Exists: Dataset %s:%s' % (client.project, dataset),
):
execute_pipeline(create_pipeline, config)
assert 'Dataset "%s" already exists and exists_ok is false' % dataset in str(exc_info.value)

@pipeline(mode_defs=bq_modes())
def delete_pipeline():
Expand All @@ -180,9 +183,11 @@ def delete_pipeline():

# Delete non-existent with "not_found_ok" False should fail
config = {'solids': {'delete_solid': {'config': {'dataset': dataset, 'not_found_ok': False}}}}
with pytest.raises(BigQueryError) as exc_info:
with pytest.raises(
google.api_core.exceptions.NotFound,
match='Not found: Dataset %s:%s' % (client.project, dataset),
):
execute_pipeline(delete_pipeline, config)
assert 'Dataset "%s" does not exist and not_found_ok is false' % dataset in str(exc_info.value)

assert not dataset_exists(dataset)

Expand Down

0 comments on commit fe744b8

Please sign in to comment.