From 15030c170c74ae4cee2e25dc87d31c3729eb2e39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kaan=20B=C4=B1=C3=A7akc=C4=B1?= <46622558+Frightera@users.noreply.github.com> Date: Tue, 8 Aug 2023 13:40:50 +0100 Subject: [PATCH 1/4] Small typo --- keras_core/activations/activations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/keras_core/activations/activations.py b/keras_core/activations/activations.py index 82dda0671..31d6e79e9 100644 --- a/keras_core/activations/activations.py +++ b/keras_core/activations/activations.py @@ -429,7 +429,7 @@ def log_softmax(x, axis=-1): is applied along. Args: - x : Input tensor. + x: Input tensor. axis: Integer, axis along which the softmax is applied. """ return ops.log_softmax(x, axis=axis) From a7e0f533090d0b65cc8f8071201fbc453751898c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kaan=20B=C4=B1=C3=A7akc=C4=B1?= <46622558+Frightera@users.noreply.github.com> Date: Tue, 8 Aug 2023 14:26:59 +0100 Subject: [PATCH 2/4] Added some docstrings --- keras_core/ops/nn.py | 198 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 198 insertions(+) diff --git a/keras_core/ops/nn.py b/keras_core/ops/nn.py index b2fbd3a44..25206b017 100644 --- a/keras_core/ops/nn.py +++ b/keras_core/ops/nn.py @@ -22,6 +22,24 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.relu", "keras_core.ops.nn.relu"]) def relu(x): + """Rectified linear unit activation function. + + It is defined as `f(x) = max(0, x)`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Examples: + + >>> x = np.array([-1, 0, 1]) + >>> x_relu = keras_core.ops.relu(x) + >>> print(x_relu) + array([0, 0, 1], shape=(3,), dtype=int64) + + """ if any_symbolic_tensors((x,)): return Relu().symbolic_call(x) return backend.nn.relu(x) @@ -37,6 +55,24 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.relu6", "keras_core.ops.nn.relu6"]) def relu6(x): + """Rectified linear unit activation function with upper bound of 6. + + It is defined as `f(x) = np.clip(x, 0, 6)`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1, 6, 7]) + >>> x_relu6 = keras_core.ops.relu6(x) + >>> print(x_relu6) + array([0, 0, 1, 6, 6], shape=(5,), dtype=int64) + + """ if any_symbolic_tensors((x,)): return Relu6().symbolic_call(x) return backend.nn.relu6(x) @@ -52,6 +88,26 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.sigmoid", "keras_core.ops.nn.sigmoid"]) def sigmoid(x): + """Sigmoid activation function. + + + + It is defined as `f(x) = 1 / (1 + exp(-x))`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1]) + >>> x_sigmoid = keras_core.ops.sigmoid(x) + >>> print(x_sigmoid) + array([0.26894143, 0.5, 0.7310586 ], dtype=float32) + + """ if any_symbolic_tensors((x,)): return Sigmoid().symbolic_call(x) return backend.nn.sigmoid(x) @@ -67,6 +123,25 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.softplus", "keras_core.ops.nn.softplus"]) def softplus(x): + """Softplus activation function. + + It is defined as `f(x) = log(exp(x) + 1)`, where `log` is the natural + logarithm and `exp` is the exponential function. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1]) + >>> x_softplus = keras_core.ops.softplus(x) + >>> print(x_softplus) + array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float32) + + """ if any_symbolic_tensors((x,)): return Softplus().symbolic_call(x) return backend.nn.softplus(x) @@ -82,6 +157,24 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.softsign", "keras_core.ops.nn.softsign"]) def softsign(x): + """Softsign activation function. + + It is defined as `f(x) = x / (abs(x) + 1)`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1]) + >>> x_softsign = keras_core.ops.softsign(x) + >>> print(x_softsign) + array([-0.5, 0. , 0.5], shape=(3,), dtype=float64) + + """ if any_symbolic_tensors((x,)): return Softsign().symbolic_call(x) return backend.nn.softsign(x) @@ -97,6 +190,24 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.silu", "keras_core.ops.nn.silu"]) def silu(x): + """Sigmoid-weighted linear unit activation function. + + It is defined as `f(x) = x * sigmoid(x)`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1]) + >>> x_silu = keras_core.ops.silu(x) + >>> print(x_silu) + array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float32) + + """ if any_symbolic_tensors((x,)): return Silu().symbolic_call(x) return backend.nn.silu(x) @@ -132,6 +243,24 @@ def compute_output_spec(self, x): ] ) def log_sigmoid(x): + """Logarithm of the sigmoid activation function. + + It is defined as `f(x) = log(1 / (1 + exp(-x)))`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1]) + >>> x_log_sigmoid = keras_core.ops.log_sigmoid(x) + >>> print(x_log_sigmoid) + array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float32) + + """ if any_symbolic_tensors((x,)): return LogSigmoid().symbolic_call(x) return backend.nn.log_sigmoid(x) @@ -153,6 +282,28 @@ def compute_output_spec(self, x): ["keras_core.ops.leaky_relu", "keras_core.ops.nn.leaky_relu"] ) def leaky_relu(x, negative_slope=0.2): + """Leaky version of a Rectified Linear Unit. + + It allows a small gradient when the unit is not active, it is defined as: + + `f(x) = alpha * x for x < 0` or `f(x) = x for x >= 0`. + + Args: + x: Input tensor. + negative_slope: Slope of the activation function at x < 0. + Defaults to `0.2`. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1, 0, 1]) + >>> x_leaky_relu = keras_core.ops.leaky_relu(x) + >>> print(x_leaky_relu) + array([-0.2, 0. , 1. ], shape=(3,), dtype=float32) + + """ if any_symbolic_tensors((x,)): return LeakyRelu(negative_slope).symbolic_call(x) return backend.nn.leaky_relu(x, negative_slope=negative_slope) @@ -173,6 +324,25 @@ def compute_output_spec(self, x): ] ) def hard_sigmoid(x): + """Hard sigmoid activation function. + + It is defined as: + + `0 if x < -2.5`, `1 if x > 2.5`, `(0.2 * x) + 0.5 if -2.5 <= x <= 2.5`. + + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + >>> x = np.array([-1, 0, 1]) + >>> x_hard_sigmoid = keras_core.ops.hard_sigmoid(x) + >>> print(x_hard_sigmoid) + array([0.3, 0.5, 0.7], shape=(3,), dtype=float32) + + """ if any_symbolic_tensors((x,)): return HardSigmoid().symbolic_call(x) return backend.nn.hard_sigmoid(x) @@ -192,6 +362,26 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.elu", "keras_core.ops.nn.elu"]) def elu(x, alpha=1.0): + """Exponential Linear Unit. + + It is defined as: + + `f(x) = alpha * (exp(x) - 1.) for x < 0`, `f(x) = x for x >= 0`. + + Args: + x: Input tensor. + alpha: A scalar, slope of positive section. Defaults to `1.0`. + + Returns: + A tensor with the same shape as `x`. + + Example: + >>> x = np.array([-1, 0, 1]) + >>> x_elu = keras_core.ops.elu(x) + >>> print(x_elu) + array([-0.63212055, 0., 1.], shape=(3,), dtype=float32) + + """ if any_symbolic_tensors((x,)): return Elu(alpha).symbolic_call(x) return backend.nn.elu(x, alpha=alpha) @@ -207,6 +397,14 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.selu", "keras_core.ops.nn.selu"]) def selu(x): + """Scaled Exponential Linear Unit (SELU). + + It is defined as: + + `f(x) = scale * alpha * (exp(x) - 1.) for x < 0`, + `f(x) = scale * x for x >= 0`. + + """ if any_symbolic_tensors((x,)): return Selu().symbolic_call(x) return backend.nn.selu(x) From 16203b2d04bd9468d9013bf73c78264181be4a85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kaan=20B=C4=B1=C3=A7akc=C4=B1?= <46622558+Frightera@users.noreply.github.com> Date: Tue, 8 Aug 2023 14:40:41 +0100 Subject: [PATCH 3/4] Remove extra lines --- keras_core/ops/nn.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/keras_core/ops/nn.py b/keras_core/ops/nn.py index 25206b017..db12c7d2f 100644 --- a/keras_core/ops/nn.py +++ b/keras_core/ops/nn.py @@ -90,8 +90,6 @@ def compute_output_spec(self, x): def sigmoid(x): """Sigmoid activation function. - - It is defined as `f(x) = 1 / (1 + exp(-x))`. Args: From ca9de76ce38bbb2368956aabf2dd9822645f85f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kaan=20B=C4=B1=C3=A7akc=C4=B1?= <46622558+Frightera@users.noreply.github.com> Date: Tue, 8 Aug 2023 17:11:18 +0100 Subject: [PATCH 4/4] Unindent examples // use floats in example section --- keras_core/ops/nn.py | 97 +++++++++++++++++++++++++------------------- 1 file changed, 56 insertions(+), 41 deletions(-) diff --git a/keras_core/ops/nn.py b/keras_core/ops/nn.py index db12c7d2f..dbd92024b 100644 --- a/keras_core/ops/nn.py +++ b/keras_core/ops/nn.py @@ -32,12 +32,12 @@ def relu(x): Returns: A tensor with the same shape as `x`. - Examples: + Example: - >>> x = np.array([-1, 0, 1]) - >>> x_relu = keras_core.ops.relu(x) - >>> print(x_relu) - array([0, 0, 1], shape=(3,), dtype=int64) + >>> x = np.array([-1., 0., 1.]) + >>> x_relu = keras_core.ops.relu(x) + >>> print(x_relu) + array([0., 0., 1.], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -67,10 +67,10 @@ def relu6(x): Example: - >>> x = np.array([-1, 0, 1, 6, 7]) - >>> x_relu6 = keras_core.ops.relu6(x) - >>> print(x_relu6) - array([0, 0, 1, 6, 6], shape=(5,), dtype=int64) + >>> x = np.array([-1., 0., 1., 6., 7.]) + >>> x_relu6 = keras_core.ops.relu6(x) + >>> print(x_relu6) + array([0., 0., 1., 6., 6.], shape=(5,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -100,10 +100,10 @@ def sigmoid(x): Example: - >>> x = np.array([-1, 0, 1]) - >>> x_sigmoid = keras_core.ops.sigmoid(x) - >>> print(x_sigmoid) - array([0.26894143, 0.5, 0.7310586 ], dtype=float32) + >>> x = np.array([-1., 0., 1.]) + >>> x_sigmoid = keras_core.ops.sigmoid(x) + >>> print(x_sigmoid) + array([0.26894143, 0.5, 0.7310586 ], dtype=float64) """ if any_symbolic_tensors((x,)): @@ -134,10 +134,10 @@ def softplus(x): Example: - >>> x = np.array([-1, 0, 1]) - >>> x_softplus = keras_core.ops.softplus(x) - >>> print(x_softplus) - array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float32) + >>> x = np.array([-1., 0., 1.]) + >>> x_softplus = keras_core.ops.softplus(x) + >>> print(x_softplus) + array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -167,10 +167,10 @@ def softsign(x): Example: - >>> x = np.array([-1, 0, 1]) - >>> x_softsign = keras_core.ops.softsign(x) - >>> print(x_softsign) - array([-0.5, 0. , 0.5], shape=(3,), dtype=float64) + >>> x = np.array([-1., 0., 1.]) + >>> x_softsign = keras_core.ops.softsign(x) + >>> print(x_softsign) + array([-0.5, 0., 0.5], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -200,10 +200,10 @@ def silu(x): Example: - >>> x = np.array([-1, 0, 1]) - >>> x_silu = keras_core.ops.silu(x) - >>> print(x_silu) - array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float32) + >>> x = np.array([-1., 0., 1.]) + >>> x_silu = keras_core.ops.silu(x) + >>> print(x_silu) + array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -253,10 +253,10 @@ def log_sigmoid(x): Example: - >>> x = np.array([-1, 0, 1]) - >>> x_log_sigmoid = keras_core.ops.log_sigmoid(x) - >>> print(x_log_sigmoid) - array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float32) + >>> x = np.array([-1., 0., 1.]) + >>> x_log_sigmoid = keras_core.ops.log_sigmoid(x) + >>> print(x_log_sigmoid) + array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -296,10 +296,10 @@ def leaky_relu(x, negative_slope=0.2): Example: - >>> x = np.array([-1, 0, 1]) - >>> x_leaky_relu = keras_core.ops.leaky_relu(x) - >>> print(x_leaky_relu) - array([-0.2, 0. , 1. ], shape=(3,), dtype=float32) + >>> x = np.array([-1., 0., 1.]) + >>> x_leaky_relu = keras_core.ops.leaky_relu(x) + >>> print(x_leaky_relu) + array([-0.2, 0. , 1. ], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -335,10 +335,11 @@ def hard_sigmoid(x): A tensor with the same shape as `x`. Example: - >>> x = np.array([-1, 0, 1]) - >>> x_hard_sigmoid = keras_core.ops.hard_sigmoid(x) - >>> print(x_hard_sigmoid) - array([0.3, 0.5, 0.7], shape=(3,), dtype=float32) + + >>> x = np.array([-1., 0., 1.]) + >>> x_hard_sigmoid = keras_core.ops.hard_sigmoid(x) + >>> print(x_hard_sigmoid) + array([0.3, 0.5, 0.7], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -374,10 +375,11 @@ def elu(x, alpha=1.0): A tensor with the same shape as `x`. Example: - >>> x = np.array([-1, 0, 1]) - >>> x_elu = keras_core.ops.elu(x) - >>> print(x_elu) - array([-0.63212055, 0., 1.], shape=(3,), dtype=float32) + + >>> x = np.array([-1., 0., 1.]) + >>> x_elu = keras_core.ops.elu(x) + >>> print(x_elu) + array([-0.63212055, 0., 1.], shape=(3,), dtype=float64) """ if any_symbolic_tensors((x,)): @@ -402,6 +404,19 @@ def selu(x): `f(x) = scale * alpha * (exp(x) - 1.) for x < 0`, `f(x) = scale * x for x >= 0`. + Args: + x: Input tensor. + + Returns: + A tensor with the same shape as `x`. + + Example: + + >>> x = np.array([-1., 0., 1.]) + >>> x_selu = keras_core.ops.selu(x) + >>> print(x_selu) + array([-1.11133055, 0., 1.05070098], shape=(3,), dtype=float64) + """ if any_symbolic_tensors((x,)): return Selu().symbolic_call(x)