Skip to content

Commit

Permalink
Add docstring
Browse files Browse the repository at this point in the history
  • Loading branch information
araffin committed Oct 28, 2019
1 parent d678227 commit df1e7aa
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 1 deletion.
8 changes: 8 additions & 0 deletions torchy_baselines/common/base_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,14 @@ def _update_current_progress(self, num_timesteps, total_timesteps):
self._current_progress = 1.0 - float(num_timesteps) / float(total_timesteps)

def _update_learning_rate(self, optimizers):
"""
Update the optimizers learning rate using the current learning rate schedule
and the current progress (from 1 to 0).
:param optimizers: ([th.optim.Optimizer] or Optimizer) An optimizer
or a list of optimizer.
"""
# Log the current learning rate
logger.logkv("learning_rate", self.learning_rate(self._current_progress))

if not isinstance(optimizers, list):
Expand Down
1 change: 0 additions & 1 deletion torchy_baselines/ppo/policies.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,6 @@ def _build(self, learning_rate):
self.value_net: 1
}[module]
module.apply(partial(self.init_weights, gain=gain))
# TODO: support linear decay of the learning rate
self.optimizer = th.optim.Adam(self.parameters(), lr=learning_rate(1), eps=self.adam_epsilon)

def forward(self, obs, deterministic=False):
Expand Down

0 comments on commit df1e7aa

Please sign in to comment.