From 7fd7edfae4e5cee15ae28687dbefa6d53614937f Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Tue, 24 Mar 2020 14:07:23 +0000 Subject: [PATCH 1/3] Fix the serialization bug of rectified adam. --- tensorflow_addons/optimizers/rectified_adam.py | 7 +++++-- tensorflow_addons/optimizers/rectified_adam_test.py | 9 +++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/tensorflow_addons/optimizers/rectified_adam.py b/tensorflow_addons/optimizers/rectified_adam.py index ee2c29efb7..5b6cbe7772 100644 --- a/tensorflow_addons/optimizers/rectified_adam.py +++ b/tensorflow_addons/optimizers/rectified_adam.py @@ -79,7 +79,10 @@ def __init__( weight_decay: FloatTensorLike = 0.0, amsgrad: bool = False, sma_threshold: FloatTensorLike = 5.0, - total_steps: int = 0, + # float for total_steps is here to be able to load models created before + # https://github.com/tensorflow/addons/pull/1375 was merged. It should be + # removed at some point. + total_steps: Union[int, float] = 0, warmup_proportion: FloatTensorLike = 0.1, min_lr: FloatTensorLike = 0.0, name: str = "RectifiedAdam", @@ -123,7 +126,7 @@ def __init__( self._set_hyper("decay", self._initial_decay) self._set_hyper("weight_decay", weight_decay) self._set_hyper("sma_threshold", sma_threshold) - self._set_hyper("total_steps", float(total_steps)) + self._set_hyper("total_steps", total_steps) self._set_hyper("warmup_proportion", warmup_proportion) self._set_hyper("min_lr", min_lr) self.epsilon = epsilon or tf.keras.backend.epsilon() diff --git a/tensorflow_addons/optimizers/rectified_adam_test.py b/tensorflow_addons/optimizers/rectified_adam_test.py index 5950fcdd31..73e0a56b6b 100644 --- a/tensorflow_addons/optimizers/rectified_adam_test.py +++ b/tensorflow_addons/optimizers/rectified_adam_test.py @@ -172,5 +172,14 @@ def test_get_config(self): self.assertEqual(config["total_steps"], 0) +def test_serialization(): + optimizer = RectifiedAdam( + lr=1e-3, total_steps=10000, warmup_proportion=0.1, min_lr=1e-5, + ) + config = tf.keras.optimizers.serialize(optimizer) + new_optimizer = tf.keras.optimizers.deserialize(config) + assert new_optimizer.get_config() == optimizer.get_config() + + if __name__ == "__main__": sys.exit(pytest.main([__file__])) From 140429a9e047879491d5e6641b4f787f9ca45b13 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Tue, 24 Mar 2020 14:16:14 +0000 Subject: [PATCH 2/3] Better error message. --- tensorflow_addons/optimizers/rectified_adam.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tensorflow_addons/optimizers/rectified_adam.py b/tensorflow_addons/optimizers/rectified_adam.py index 5b6cbe7772..40b49ec826 100644 --- a/tensorflow_addons/optimizers/rectified_adam.py +++ b/tensorflow_addons/optimizers/rectified_adam.py @@ -13,6 +13,7 @@ # limitations under the License. # ============================================================================== """Rectified Adam (RAdam) optimizer.""" +import warnings import tensorflow as tf from tensorflow_addons.utils.types import FloatTensorLike @@ -81,7 +82,7 @@ def __init__( sma_threshold: FloatTensorLike = 5.0, # float for total_steps is here to be able to load models created before # https://github.com/tensorflow/addons/pull/1375 was merged. It should be - # removed at some point. + # removed for Addons 0.11. total_steps: Union[int, float] = 0, warmup_proportion: FloatTensorLike = 0.1, min_lr: FloatTensorLike = 0.0, @@ -126,7 +127,16 @@ def __init__( self._set_hyper("decay", self._initial_decay) self._set_hyper("weight_decay", weight_decay) self._set_hyper("sma_threshold", sma_threshold) - self._set_hyper("total_steps", total_steps) + if isinstance(total_steps, float): + warnings.warn( + "The parameter `total_steps` passed to the __init__ of RectifiedAdam " + "is a float. This behavior is deprecated and in Addons 0.11, this " + "will raise an error. Use a int instead. If you get this message " + "when loading a model, save it again and the `total_steps` parameter " + "will automatically be converted to a int.", + DeprecationWarning, + ) + self._set_hyper("total_steps", int(total_steps)) self._set_hyper("warmup_proportion", warmup_proportion) self._set_hyper("min_lr", min_lr) self.epsilon = epsilon or tf.keras.backend.epsilon() From 3271420fcb284d98b561487163005f9168af1ef4 Mon Sep 17 00:00:00 2001 From: Gabriel de Marmiesse Date: Tue, 24 Mar 2020 18:42:36 +0100 Subject: [PATCH 3/3] Update tensorflow_addons/optimizers/rectified_adam.py --- tensorflow_addons/optimizers/rectified_adam.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_addons/optimizers/rectified_adam.py b/tensorflow_addons/optimizers/rectified_adam.py index 40b49ec826..97a318fb36 100644 --- a/tensorflow_addons/optimizers/rectified_adam.py +++ b/tensorflow_addons/optimizers/rectified_adam.py @@ -131,7 +131,7 @@ def __init__( warnings.warn( "The parameter `total_steps` passed to the __init__ of RectifiedAdam " "is a float. This behavior is deprecated and in Addons 0.11, this " - "will raise an error. Use a int instead. If you get this message " + "will raise an error. Use an int instead. If you get this message " "when loading a model, save it again and the `total_steps` parameter " "will automatically be converted to a int.", DeprecationWarning,