Skip to content

Commit

Permalink
Remove unused variables // adam & adamw (#444)
Browse files Browse the repository at this point in the history
* Adam remove unused vars

* Adamw remove unused vars
  • Loading branch information
Frightera authored Jul 11, 2023
1 parent 431accc commit 8d8056f
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 4 deletions.
2 changes: 0 additions & 2 deletions keras_core/optimizers/adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,6 @@ def build(self, var_list):

def update_step(self, gradient, variable, learning_rate):
"""Update step given gradient and the associated model variable."""
beta_1_power = None
beta_2_power = None
lr = ops.cast(learning_rate, variable.dtype)
gradient = ops.cast(gradient, variable.dtype)
local_step = ops.cast(self.iterations + 1, variable.dtype)
Expand Down
2 changes: 0 additions & 2 deletions keras_core/optimizers/adamw.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,6 @@ def build(self, var_list):

def update_step(self, gradient, variable, learning_rate):
"""Update step given gradient and the associated model variable."""
beta_1_power = None
beta_2_power = None
lr = ops.cast(learning_rate, variable.dtype)
gradient = ops.cast(gradient, variable.dtype)
local_step = ops.cast(self.iterations + 1, variable.dtype)
Expand Down

0 comments on commit 8d8056f

Please sign in to comment.