From 0f40d6f775f17ae4c903af8d22333729fc93d8c7 Mon Sep 17 00:00:00 2001 From: Faisal Alsrheed <47912291+Faisal-Alsrheed@users.noreply.github.com> Date: Mon, 21 Aug 2023 08:37:04 +0000 Subject: [PATCH 1/2] Enhance docstrings + easy to understand examples --- keras_core/ops/nn.py | 67 ++++++++++++++++++++++---------------------- 1 file changed, 33 insertions(+), 34 deletions(-) diff --git a/keras_core/ops/nn.py b/keras_core/ops/nn.py index fd5a78754..f6e68f5fb 100644 --- a/keras_core/ops/nn.py +++ b/keras_core/ops/nn.py @@ -32,12 +32,14 @@ def relu(x): Returns: A tensor with the same shape as `x`. - Example: + Examples: - >>> x = np.array([-1., 0., 1.]) - >>> x_relu = keras_core.ops.relu(x) - >>> print(x_relu) - array([0., 0., 1.], shape=(3,), dtype=float64) + >>> x1 = keras_core.ops.convert_to_tensor([-1.0, 0.0, 1.0]) + >>> keras_core.ops.relu(x1) + array([0.0, 0.0, 1.0], dtype=float32) + >>> x2 = keras_core.ops.convert_to_tensor([-100.0, 0.0, 100.0]) + >>> keras_core.ops.relu(x2) + array([0.0, 0.0, 100.0], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -67,10 +69,9 @@ def relu6(x): Example: - >>> x = np.array([-1., 0., 1., 6., 7.]) - >>> x_relu6 = keras_core.ops.relu6(x) - >>> print(x_relu6) - array([0., 0., 1., 6., 6.], shape=(5,), dtype=float64) + >>> x = keras_core.ops.convert_to_tensor([-66.0, -6.0, 0.0, 6.0, 66.0]) + >>> keras_core.ops.relu6(x) + array([0.0, 0.0, 0.0, 6.0, 6.0], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -100,10 +101,9 @@ def sigmoid(x): Example: - >>> x = np.array([-1., 0., 1.]) - >>> x_sigmoid = keras_core.ops.sigmoid(x) - >>> print(x_sigmoid) - array([0.26894143, 0.5, 0.7310586 ], dtype=float64) + >>> x = keras_core.ops.convert_to_tensor([-6.0, 1.0, 0.0, 1.0, 6.0]) + >>> keras_core.ops.sigmoid(x) + array([0.00247262, 0.7310586, 0.5, 0.7310586, 0.9975274], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -134,10 +134,9 @@ def softplus(x): Example: - >>> x = np.array([-1., 0., 1.]) - >>> x_softplus = keras_core.ops.softplus(x) - >>> print(x_softplus) - array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float64) + >>> x = keras_core.ops.convert_to_tensor([-0.555, 0.0, 0.555]) + >>> keras_core.ops.softplus(x) + array([0.45366603, 0.6931472, 1.008666], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -167,10 +166,9 @@ def softsign(x): Example: - >>> x = np.array([-1., 0., 1.]) - >>> x_softsign = keras_core.ops.softsign(x) - >>> print(x_softsign) - array([-0.5, 0., 0.5], shape=(3,), dtype=float64) + >>> x = keras_core.ops.convert_to_tensor([-0.100, -10.0, 1.0, 0.0, 10.0, 100.0]) + >>> keras_core.ops.softsign(x) + Array([-0.09090909, -0.90909094, 0.5, 0.0, 0.90909094, 0.990099], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -195,9 +193,10 @@ def compute_output_spec(self, x): ] ) def silu(x): - """Sigmoid-weighted linear unit activation function. + """Sigmoid Linear Unit (SiLU) activation function, also known as Swish. - It is defined as `f(x) = x * sigmoid(x)`. + The SiLU activation function is computed by the sigmoid function multiplied + by its input. It is defined as `f(x) = x * sigmoid(x)`. Args: x: Input tensor. @@ -207,10 +206,11 @@ def silu(x): Example: - >>> x = np.array([-1., 0., 1.]) - >>> x_silu = keras_core.ops.silu(x) - >>> print(x_silu) - array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float64) + >>> x = keras_core.ops.convert_to_tensor([-6.0, 1.0, 0.0, 1.0, 6.0]) + >>> keras_core.ops.sigmoid(x) + array([0.00247262, 0.7310586, 0.5, 0.7310586, 0.9975274], dtype=float32) + >>> keras_core.ops.silu(x) + array([-0.0148357, 0.7310586, 0.0, 0.7310586, 5.9851646], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -245,10 +245,9 @@ def log_sigmoid(x): Example: - >>> x = np.array([-1., 0., 1.]) - >>> x_log_sigmoid = keras_core.ops.log_sigmoid(x) - >>> print(x_log_sigmoid) - array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float64) + >>> x = keras_core.ops.convert_to_tensor([-0.541391, 0.0, 0.50, 5.0]) + >>> keras_core.ops.log_sigmoid(x) + array([-1.0000418, -0.6931472, -0.474077, -0.00671535], dtype=float32) """ if any_symbolic_tensors((x,)): @@ -272,7 +271,7 @@ def compute_output_spec(self, x): ["keras_core.ops.leaky_relu", "keras_core.ops.nn.leaky_relu"] ) def leaky_relu(x, negative_slope=0.2): - """Leaky version of a Rectified Linear Unit. + """Leaky version of a Rectified Linear Unit activation function. It allows a small gradient when the unit is not active, it is defined as: @@ -353,7 +352,7 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.elu", "keras_core.ops.nn.elu"]) def elu(x, alpha=1.0): - """Exponential Linear Unit. + """Exponential Linear Unit activation function. It is defined as: @@ -389,7 +388,7 @@ def compute_output_spec(self, x): @keras_core_export(["keras_core.ops.selu", "keras_core.ops.nn.selu"]) def selu(x): - """Scaled Exponential Linear Unit (SELU). + """Scaled Exponential Linear Unit (SELU) activation function. It is defined as: From cfe5d48ecf20810092a776eb528ad07d66da954c Mon Sep 17 00:00:00 2001 From: Faisal Alsrheed <47912291+Faisal-Alsrheed@users.noreply.github.com> Date: Mon, 21 Aug 2023 08:55:11 +0000 Subject: [PATCH 2/2] Enhance docstrings + easy to understand examplesv2 --- keras_core/ops/nn.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/keras_core/ops/nn.py b/keras_core/ops/nn.py index f6e68f5fb..0068123f6 100644 --- a/keras_core/ops/nn.py +++ b/keras_core/ops/nn.py @@ -166,9 +166,9 @@ def softsign(x): Example: - >>> x = keras_core.ops.convert_to_tensor([-0.100, -10.0, 1.0, 0.0, 10.0, 100.0]) + >>> x = keras_core.ops.convert_to_tensor([-0.100, -10.0, 1.0, 0.0, 100.0]) >>> keras_core.ops.softsign(x) - Array([-0.09090909, -0.90909094, 0.5, 0.0, 0.90909094, 0.990099], dtype=float32) + Array([-0.09090909, -0.90909094, 0.5, 0.0, 0.990099], dtype=float32) """ if any_symbolic_tensors((x,)):