diff --git a/composer/trainer/trainer.py b/composer/trainer/trainer.py index cb42094f37..eb5080eaee 100644 --- a/composer/trainer/trainer.py +++ b/composer/trainer/trainer.py @@ -1723,12 +1723,9 @@ def __init__( # Load Checkpoint self._rng_state = None # If autoresume is enabled, first check for existing checkpoints to load - self.autoresume = autoresume - if self.autoresume: + if autoresume: log.info('Searching for a previous checkpoint to autoresume') error_message = '' - if max_duration is None: - error_message += 'The `max_duration` must be specified on trainer.__init__ when autoresume is enabled. ' if save_folder is None: error_message += 'The `save_folder` must be specified when autoresume is enabled. ' if save_overwrite: @@ -2191,21 +2188,10 @@ def fit( # Reset Time if reset_time: - if self.autoresume: - raise ValueError( - 'Cannot specify `reset_time=True` when autoresume is enabled. Please instead ' - 'specify `load_ignore_keys` when constructing the Trainer, which will only ' - 'run on the initial load and not any subsequent autoresumptions.', - ) self.state.timestamp = Timestamp() # Max Duration if duration is not None: - if self.autoresume: - raise ValueError( - '`duration` cannot be specified when autoresume is enabled. Please instead ' - 'specify `max_duration` when constructing the Trainer.', - ) duration = ensure_time(duration, TimeUnit.EPOCH) if duration.unit == TimeUnit.SECOND: raise ValueError('Wall clock time not an allowed time unit.') diff --git a/tests/trainer/test_checkpoint.py b/tests/trainer/test_checkpoint.py index dc887fa5e2..d23b55875f 100644 --- a/tests/trainer/test_checkpoint.py +++ b/tests/trainer/test_checkpoint.py @@ -667,7 +667,6 @@ def get_trainer( max_duration: str = '2ep', latest_filename: str = 'latest-rank{rank}.pt', file_extension: str = '.pt', - use_scheduler: bool = True, **kwargs, ): if model is None: @@ -705,7 +704,7 @@ def get_trainer( save_filename='ep{epoch}' + file_extension, max_duration=max_duration, optimizers=optimizer, - schedulers=ExponentialScheduler(gamma=0.9) if use_scheduler else None, + schedulers=ExponentialScheduler(gamma=0.9), callbacks=callbacks, **kwargs, ) @@ -1213,43 +1212,24 @@ def test_load_weights_object_store(self, tmp_path): ) @pytest.mark.parametrize( - 'run_name,save_folder,save_overwrite,latest_filename,max_duration', + 'run_name,save_folder,save_overwrite,latest_filename', [ - [None, 'first', False, 'latest-rank{rank}.pt', '2ep'], - ['big-chungus', None, False, 'latest-rank{rank}.pt', '2ep'], - ['big-chungus', 'first', True, 'latest-rank{rank}.pt', '2ep'], - ['big-chungus', 'first', False, None, '2ep'], - ['big-chungus', 'first', False, 'latest-rank{rank}.pt', None], + [None, 'first', False, 'latest-rank{rank}.pt'], + ['big-chungus', None, False, 'latest-rank{rank}.pt'], + ['big-chungus', 'first', True, 'latest-rank{rank}.pt'], + ['big-chungus', 'first', False, None], ], ) - def test_autoresume_fail_init(self, run_name, save_folder, save_overwrite, latest_filename, max_duration): + def test_autoresume_fail(self, run_name, save_folder, save_overwrite, latest_filename): with pytest.raises(ValueError): self.get_trainer( latest_filename=latest_filename, save_overwrite=save_overwrite, save_folder=save_folder, run_name=run_name, - max_duration=max_duration, autoresume=True, - use_scheduler=False, ) - @pytest.mark.parametrize( - 'duration,reset_time', - [ - ['1ep', False], - [None, True], - ], - ) - def test_autoresume_fail_fit(self, duration: Optional[str], reset_time: bool): - trainer = self.get_trainer( - run_name='bigtrainer', - save_folder='first', - autoresume=True, - ) - with pytest.raises(ValueError): - trainer.fit(duration=duration, reset_time=reset_time) - def test_different_run_names(self): trainer_1 = self.get_trainer(