Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added some docstrings ops/nn.py #677

Merged
merged 4 commits into from
Aug 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion keras_core/activations/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,7 @@ def log_softmax(x, axis=-1):
is applied along.

Args:
x : Input tensor.
x: Input tensor.
axis: Integer, axis along which the softmax is applied.
"""
return ops.log_softmax(x, axis=axis)
211 changes: 211 additions & 0 deletions keras_core/ops/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,24 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.relu", "keras_core.ops.nn.relu"])
def relu(x):
"""Rectified linear unit activation function.

It is defined as `f(x) = max(0, x)`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_relu = keras_core.ops.relu(x)
>>> print(x_relu)
array([0., 0., 1.], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Relu().symbolic_call(x)
return backend.nn.relu(x)
Expand All @@ -37,6 +55,24 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.relu6", "keras_core.ops.nn.relu6"])
def relu6(x):
"""Rectified linear unit activation function with upper bound of 6.

It is defined as `f(x) = np.clip(x, 0, 6)`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1., 6., 7.])
>>> x_relu6 = keras_core.ops.relu6(x)
>>> print(x_relu6)
array([0., 0., 1., 6., 6.], shape=(5,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Relu6().symbolic_call(x)
return backend.nn.relu6(x)
Expand All @@ -52,6 +88,24 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.sigmoid", "keras_core.ops.nn.sigmoid"])
def sigmoid(x):
"""Sigmoid activation function.

It is defined as `f(x) = 1 / (1 + exp(-x))`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_sigmoid = keras_core.ops.sigmoid(x)
>>> print(x_sigmoid)
array([0.26894143, 0.5, 0.7310586 ], dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Sigmoid().symbolic_call(x)
return backend.nn.sigmoid(x)
Expand All @@ -67,6 +121,25 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.softplus", "keras_core.ops.nn.softplus"])
def softplus(x):
"""Softplus activation function.

It is defined as `f(x) = log(exp(x) + 1)`, where `log` is the natural
logarithm and `exp` is the exponential function.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_softplus = keras_core.ops.softplus(x)
>>> print(x_softplus)
array([0.31326166, 0.6931472 , 1.3132616 ], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Softplus().symbolic_call(x)
return backend.nn.softplus(x)
Expand All @@ -82,6 +155,24 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.softsign", "keras_core.ops.nn.softsign"])
def softsign(x):
"""Softsign activation function.

It is defined as `f(x) = x / (abs(x) + 1)`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_softsign = keras_core.ops.softsign(x)
>>> print(x_softsign)
array([-0.5, 0., 0.5], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Softsign().symbolic_call(x)
return backend.nn.softsign(x)
Expand All @@ -97,6 +188,24 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.silu", "keras_core.ops.nn.silu"])
def silu(x):
"""Sigmoid-weighted linear unit activation function.

It is defined as `f(x) = x * sigmoid(x)`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_silu = keras_core.ops.silu(x)
>>> print(x_silu)
array([-0.26894143, 0., 0.7310586], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Silu().symbolic_call(x)
return backend.nn.silu(x)
Expand Down Expand Up @@ -132,6 +241,24 @@ def compute_output_spec(self, x):
]
)
def log_sigmoid(x):
"""Logarithm of the sigmoid activation function.

It is defined as `f(x) = log(1 / (1 + exp(-x)))`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_log_sigmoid = keras_core.ops.log_sigmoid(x)
>>> print(x_log_sigmoid)
array([-1.3132616, -0.6931472, -0.3132616], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return LogSigmoid().symbolic_call(x)
return backend.nn.log_sigmoid(x)
Expand All @@ -153,6 +280,28 @@ def compute_output_spec(self, x):
["keras_core.ops.leaky_relu", "keras_core.ops.nn.leaky_relu"]
)
def leaky_relu(x, negative_slope=0.2):
"""Leaky version of a Rectified Linear Unit.

It allows a small gradient when the unit is not active, it is defined as:

`f(x) = alpha * x for x < 0` or `f(x) = x for x >= 0`.

Args:
x: Input tensor.
negative_slope: Slope of the activation function at x < 0.
Defaults to `0.2`.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_leaky_relu = keras_core.ops.leaky_relu(x)
>>> print(x_leaky_relu)
array([-0.2, 0. , 1. ], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return LeakyRelu(negative_slope).symbolic_call(x)
return backend.nn.leaky_relu(x, negative_slope=negative_slope)
Expand All @@ -173,6 +322,26 @@ def compute_output_spec(self, x):
]
)
def hard_sigmoid(x):
"""Hard sigmoid activation function.

It is defined as:

`0 if x < -2.5`, `1 if x > 2.5`, `(0.2 * x) + 0.5 if -2.5 <= x <= 2.5`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_hard_sigmoid = keras_core.ops.hard_sigmoid(x)
>>> print(x_hard_sigmoid)
array([0.3, 0.5, 0.7], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return HardSigmoid().symbolic_call(x)
return backend.nn.hard_sigmoid(x)
Expand All @@ -192,6 +361,27 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.elu", "keras_core.ops.nn.elu"])
def elu(x, alpha=1.0):
"""Exponential Linear Unit.

It is defined as:

`f(x) = alpha * (exp(x) - 1.) for x < 0`, `f(x) = x for x >= 0`.

Args:
x: Input tensor.
alpha: A scalar, slope of positive section. Defaults to `1.0`.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_elu = keras_core.ops.elu(x)
>>> print(x_elu)
array([-0.63212055, 0., 1.], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Elu(alpha).symbolic_call(x)
return backend.nn.elu(x, alpha=alpha)
Expand All @@ -207,6 +397,27 @@ def compute_output_spec(self, x):

@keras_core_export(["keras_core.ops.selu", "keras_core.ops.nn.selu"])
def selu(x):
"""Scaled Exponential Linear Unit (SELU).

It is defined as:

`f(x) = scale * alpha * (exp(x) - 1.) for x < 0`,
`f(x) = scale * x for x >= 0`.

Args:
x: Input tensor.

Returns:
A tensor with the same shape as `x`.

Example:

>>> x = np.array([-1., 0., 1.])
>>> x_selu = keras_core.ops.selu(x)
>>> print(x_selu)
array([-1.11133055, 0., 1.05070098], shape=(3,), dtype=float64)

"""
if any_symbolic_tensors((x,)):
return Selu().symbolic_call(x)
return backend.nn.selu(x)
Expand Down