Skip to content

Commit

Permalink
Merge pull request #243 from janosh/master
Browse files Browse the repository at this point in the history
refactor setting loggers
  • Loading branch information
ardunn authored Oct 11, 2019
2 parents 210e95a + f65621a commit 63dfaff
Show file tree
Hide file tree
Showing 7 changed files with 33 additions and 25 deletions.
5 changes: 2 additions & 3 deletions automatminer/automl/adaptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __init__(self, logger=True, **tpot_kwargs):
self._fitted_target = None
self._backend = None
self._features = None
self._logger = self.get_logger(logger)
self.logger = logger

@log_progress(AMM_LOG_FIT_STR)
@set_fitted
Expand Down Expand Up @@ -245,7 +245,7 @@ class SinglePipelineAdaptor(DFMLAdaptor, LoggableMixin):

def __init__(self, regressor, classifier, logger=True):
self.mode = None
self._logger = self.get_logger(logger)
self.logger = logger
self._regressor = regressor
self._classifier = classifier
self._features = None
Expand Down Expand Up @@ -295,4 +295,3 @@ def features(self):
def fitted_target(self):
return self._fitted_target


31 changes: 22 additions & 9 deletions automatminer/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,34 +19,47 @@ def logger(self):
If the logger is None, the logging calls will be redirected to a dummy
logger that has no output.
"""
if hasattr(self, "_logger"):
return self._logger
else:
raise AttributeError("Loggable object has no _logger attribute!")
return self._logger

@logger.setter
def logger(self, new_logger):
"""Set a new logger.
Args:
new_logger (Logger, bool): A boolean or custom logger object to use
for logging. Alternatively, if set to True, the default automatminer
logger will be used. If set to False, then no logging will occur.
"""
new_logger = self.get_logger(new_logger)
assert isinstance(
new_logger, logging.Logger
), "The new logger must be an instance of the logger class."
self._logger = new_logger
if hasattr(self, "autofeaturizer"):
for x in ["autofeaturizer", "cleaner", "reducer", "learner"]:
getattr(self, x)._logger = new_logger

@staticmethod
def get_logger(logger, level=None):
"""Set the class logger.
def get_logger(logger):
"""Handle boolean logger.
Args:
logger (Logger, bool): A custom logger object to use for logging.
Alternatively, if set to True, the default automatminer logger
will be used. If set to False, then no logging will occur.
level (int): The log level. For example logging.DEBUG.
"""
# need comparison to True and False to avoid overwriting Logger objects
if logger is True:
logger = logging.getLogger(AMM_LOGGER_BASENAME)

if not logger.handlers:
initialize_logger(AMM_LOGGER_BASENAME, level=level)
initialize_logger(AMM_LOGGER_BASENAME)

elif logger is False:
logger = logging.getLogger(AMM_LOGGER_BASENAME + "_null")

if not logger.handlers:
initialize_null_logger(AMM_LOGGER_BASENAME)

logger.setLevel(logging.INFO)
return logger

@property
Expand Down
2 changes: 1 addition & 1 deletion automatminer/featurization/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def __init__(self, cache_src=None, preset=None, featurizers=None,

self.cache_src = cache_src
self.preset = "express" if preset is None else preset
self._logger = self.get_logger(logger)
self.logger = logger
self.featurizers = featurizers
self.exclude = exclude if exclude else []
self.functionalize = functionalize
Expand Down
12 changes: 4 additions & 8 deletions automatminer/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,15 +92,13 @@ def __init__(self, autofeaturizer=None, cleaner=None, reducer=None,
reducer = config["reducer"]
learner = config["learner"]

self._logger = self.get_logger(logger, level=log_level)
self.autofeaturizer = autofeaturizer
self.cleaner = cleaner
self.reducer = reducer
self.learner = learner
self.autofeaturizer._logger = self.get_logger(logger)
self.cleaner._logger = self.get_logger(logger)
self.reducer._logger = self.get_logger(logger)
self.learner._logger = self.get_logger(logger)
self.logger = logger
if log_level:
self.logger.setLevel(log_level)
self.pre_fit_df = None
self.post_fit_df = None
self.is_fit = False
Expand Down Expand Up @@ -340,9 +338,7 @@ def load(cls, filename, logger=True):
with open(filename, 'rb') as f:
pipe = pickle.load(f)

for obj in [pipe, pipe.learner, pipe.reducer, pipe.cleaner,
pipe.autofeaturizer]:
obj._logger = cls.get_logger(logger)
pipe.logger = logger

pipe.logger.info("Loaded MatPipe from file {}.".format(filename))
pipe.logger.warning("Only use this model to make predictions (do not "
Expand Down
4 changes: 2 additions & 2 deletions automatminer/preprocessing/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __init__(self, max_na_frac=0.01, feature_na_method="drop",
encode_categories=True, encoder='one-hot',
drop_na_targets=True, na_method_fit="drop",
na_method_transform="fill", logger=True):
self._logger = self.get_logger(logger)
self.logger = logger
self.max_na_frac = max_na_frac
self.feature_na_method = feature_na_method
self.encoder = encoder
Expand Down Expand Up @@ -482,7 +482,7 @@ def __init__(self, reducers=('pca',), corr_threshold=0.95,
self.n_pca_features = n_pca_features
self.tree_importance_percentile = tree_importance_percentile
self.n_rebate_features = n_rebate_features
self._logger = self.get_logger(logger)
self.logger = logger
self._keep_features = keep_features or []
self._remove_features = remove_features or []
self.removed_features = {}
Expand Down
2 changes: 1 addition & 1 deletion automatminer/preprocessing/feature_selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class TreeFeatureReducer(DFTransformer, LoggableMixin):

def __init__(self, mode, importance_percentile=0.95,
logger=True, random_state=0):
self._logger = self.get_logger(logger)
self.logger = logger
self.mode = mode
self.importance_percentile = importance_percentile
self.selected_features = None
Expand Down
2 changes: 1 addition & 1 deletion automatminer/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ class TestLoggableMixin(LoggableMixin):
"""

def __init__(self, logger=True):
self._logger = self.get_logger(logger)
self.logger = logger


class TestBaseTransformers(unittest.TestCase):
Expand Down

0 comments on commit 63dfaff

Please sign in to comment.