Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Use public Keras object registration #669

Merged
merged 9 commits into from
Nov 5, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@
In order to conform with the current API standard, all activations
must:
* Be a `tf.function`.
* [Register as a keras global object](https://github.com/tensorflow/addons/blob/master/tensorflow_addons/utils/keras_utils.py)
so it can be serialized properly.
* Register as a keras global object so it can be serialized properly: `@tf.keras.utils.register_keras_serializable(package='Addons')`
* Add the addon to the `py_library` in this sub-package's BUILD file.

#### Testing Requirements
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/gelu.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def gelu(x, approximate=True):
"""Gaussian Error Linear Unit.
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/hardshrink.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def hardshrink(x, lower=-0.5, upper=0.5):
"""Hard shrink function.
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/lisht.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def lisht(x):
"""LiSHT: Non-Parameteric Linearly Scaled Hyperbolic Tangent Activation Function.
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/mish.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def mish(x):
"""Mish: A Self Regularized Non-Monotonic Neural Activation Function.
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/rrelu.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def rrelu(x, lower=0.125, upper=0.3333333333333333, training=None, seed=None):
"""rrelu function.
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/softshrink.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def softshrink(x, lower=-0.5, upper=0.5):
"""Soft shrink function.
Expand Down
4 changes: 1 addition & 3 deletions tensorflow_addons/activations/sparsemax.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,8 @@

import tensorflow as tf

from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def sparsemax(logits, axis=-1):
"""Sparsemax activation function [1].
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/activations/tanhshrink.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_activation_ops_so = tf.load_op_library(
get_path_to_datafile("custom_ops/activations/_activation_ops.so"))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def tanhshrink(x):
"""Applies the element-wise function: x - tanh(x)
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/callbacks/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
In order to conform with the current API standard, all callbacks
must:
* Inherit from `tf.keras.callbacks.Callback`.
* [Register as a keras global object](https://github.com/tensorflow/addons/blob/master/tensorflow_addons/utils/keras_utils.py)
so it can be serialized properly.
* Register as a keras global object so it can be serialized properly: `@tf.keras.utils.register_keras_serializable(package='Addons')`
* Add the addon to the `py_library` in this sub-package's BUILD file.

#### Testing Requirements
Expand Down
4 changes: 2 additions & 2 deletions tensorflow_addons/callbacks/tqdm_progress_bar.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@
from __future__ import absolute_import, division, print_function

import time
import tensorflow as tf
from collections import defaultdict

from tensorflow.keras.callbacks import Callback
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class TQDMProgressBar(Callback):
"""TQDM Progress Bar for Tensorflow Keras.

Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/layers/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,7 @@
In order to conform with the current API standard, all layers
must:
* Inherit from either `keras.layers.Layer` or its subclasses.
* [Register as a keras global object](https://github.com/tensorflow/addons/blob/master/tensorflow_addons/utils/keras_utils.py)
so it can be serialized properly.
* Register as a keras global object so it can be serialized properly: `@tf.keras.utils.register_keras_serializable(package='Addons')`
* Add the addon to the `py_library` in this sub-package's BUILD file.

#### Testing Requirements
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/layers/gelu.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,10 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.activations import gelu


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class GeLU(tf.keras.layers.Layer):
"""Gaussian Error Linear Unit.

Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/layers/maxout.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class Maxout(tf.keras.layers.Layer):
"""Applies Maxout to the input.

Expand Down
5 changes: 2 additions & 3 deletions tensorflow_addons/layers/normalizations.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@

import logging
import tensorflow as tf
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class GroupNormalization(tf.keras.layers.Layer):
"""Group normalization layer.

Expand Down Expand Up @@ -273,7 +272,7 @@ def _create_broadcast_shape(self, input_shape):
return broadcast_shape


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class InstanceNormalization(GroupNormalization):
"""Instance normalization layer.

Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/layers/optical_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils
from tensorflow_addons.utils.resource_loader import get_path_to_datafile

_correlation_cost_op_so = tf.load_op_library(
Expand Down Expand Up @@ -139,7 +138,7 @@ def _correlation_cost_grad(op, grad_output):
return [grad_input_a, grad_input_b]


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class CorrelationCost(tf.keras.layers.Layer):
"""Correlation Cost Layer.

Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/layers/poincare.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class PoincareNormalize(tf.keras.layers.Layer):
"""Project into the Poincare ball with norm <= 1.0 - epsilon.

Expand Down
4 changes: 1 addition & 3 deletions tensorflow_addons/layers/sparsemax.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,10 @@
from __future__ import print_function

import tensorflow as tf

from tensorflow_addons.activations.sparsemax import sparsemax
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class Sparsemax(tf.keras.layers.Layer):
"""Sparsemax activation function [1].

Expand Down
1 change: 0 additions & 1 deletion tensorflow_addons/layers/sparsemax_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def _np_sparsemax(z):
class SparsemaxTest(tf.test.TestCase):
def test_sparsemax_layer_against_numpy(self, dtype):
"""check sparsemax kernel against numpy."""
self.skipTest('Wait #33614 to be fixed')
random = np.random.RandomState(1)

z = random.uniform(low=-3, high=3, size=(test_obs, 10)).astype(dtype)
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/layers/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class WeightNormalization(tf.keras.layers.Wrapper):
"""This wrapper reparameterizes a layer by decoupling the weight's
magnitude and direction.
Expand Down
3 changes: 1 addition & 2 deletions tensorflow_addons/losses/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@
In order to conform with the current API standard, all losses
must:
* Inherit from `keras.losses.Loss`.
* [Register as a keras global object](https://github.com/tensorflow/addons/blob/master/tensorflow_addons/utils/keras_utils.py)
so it can be serialized properly.
* Register as a keras global object so it can be serialized properly: `@tf.keras.utils.register_keras_serializable(package='Addons')`
* Add the addon to the `py_library` in this sub-package's BUILD file.

#### Testing Requirements
Expand Down
5 changes: 2 additions & 3 deletions tensorflow_addons/losses/contrastive.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def contrastive_loss(y_true, y_pred, margin=1.0):
"""Computes the contrastive loss between `y_true` and `y_pred`.
Expand Down Expand Up @@ -58,7 +57,7 @@ def contrastive_loss(y_true, y_pred, margin=1.0):
(1. - y_true) * tf.math.square(tf.math.maximum(margin - y_pred, 0.)))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class ContrastiveLoss(tf.keras.losses.Loss):
"""Computes the contrastive loss between `y_true` and `y_pred`.

Expand Down
5 changes: 2 additions & 3 deletions tensorflow_addons/losses/focal_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@

import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class SigmoidFocalCrossEntropy(tf.keras.losses.Loss):
"""Implements the focal loss function.

Expand Down Expand Up @@ -98,7 +97,7 @@ def get_config(self):
return dict(list(base_config.items()) + list(config.items()))


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def sigmoid_focal_crossentropy(y_true,
y_pred,
Expand Down
5 changes: 2 additions & 3 deletions tensorflow_addons/losses/lifted.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,9 @@

import tensorflow as tf
from tensorflow_addons.losses import metric_learning
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def lifted_struct_loss(labels, embeddings, margin=1.0):
"""Computes the lifted structured loss.
Expand Down Expand Up @@ -103,7 +102,7 @@ def lifted_struct_loss(labels, embeddings, margin=1.0):
return lifted_loss


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class LiftedStructLoss(tf.keras.losses.Loss):
"""Computes the lifted structured loss.

Expand Down
9 changes: 4 additions & 5 deletions tensorflow_addons/losses/npairs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
from __future__ import print_function

import tensorflow as tf
from tensorflow_addons.utils import keras_utils


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def npairs_loss(y_true, y_pred):
"""Computes the npairs loss between `y_true` and `y_pred`.
Expand Down Expand Up @@ -64,7 +63,7 @@ def npairs_loss(y_true, y_pred):
return tf.math.reduce_mean(loss)


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
@tf.function
def npairs_multilabel_loss(y_true, y_pred):
"""Computes the npairs loss between multilabel data `y_true` and `y_pred`.
Expand Down Expand Up @@ -129,7 +128,7 @@ def npairs_multilabel_loss(y_true, y_pred):
return tf.math.reduce_mean(loss)


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class NpairsLoss(tf.keras.losses.Loss):
"""Computes the npairs loss between `y_true` and `y_pred`.

Expand Down Expand Up @@ -160,7 +159,7 @@ def call(self, y_true, y_pred):
return npairs_loss(y_true, y_pred)


@keras_utils.register_keras_custom_object
@tf.keras.utils.register_keras_serializable(package='Addons')
class NpairsMultilabelLoss(tf.keras.losses.Loss):
"""Computes the npairs loss between multilabel data `y_true` and `y_pred`.

Expand Down
Loading