Skip to content

Commit

Permalink
Remove cross entropy sigmoid loss as deprecated (#496)
Browse files Browse the repository at this point in the history
* remove loss

* fix copyright

* add reno
  • Loading branch information
adekusar-drl authored Nov 14, 2022
1 parent 4b6191d commit c45a510
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 39 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __init__(
function is applied to the index and weighted with the corresponding probability.
loss: A target loss function to be used in training. Default is `squared_error`,
i.e. L2 loss. Can be given either as a string for 'absolute_error' (i.e. L1 Loss),
'squared_error', 'cross_entropy', 'cross_entropy_sigmoid', or as a loss function
'squared_error', 'cross_entropy', or as a loss function
implementing the Loss interface.
one_hot: Determines in the case of a multi-dimensional result of the
neural_network how to interpret the result. If True it is interpreted as a single
Expand Down
7 changes: 1 addition & 6 deletions qiskit_machine_learning/algorithms/trainable_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,15 @@
L1Loss,
L2Loss,
CrossEntropyLoss,
CrossEntropySigmoidLoss,
)

from .objective_functions import ObjectiveFunction
from .serializable_model import SerializableModelMixin
from ..deprecation import deprecate_values


class TrainableModel(SerializableModelMixin):
"""Base class for ML model that defines a scikit-learn like interface for Estimators."""

@deprecate_values("0.4.0", {"loss": {"cross_entropy_sigmoid": "<unsupported>"}})
def __init__(
self,
neural_network: NeuralNetwork,
Expand All @@ -61,7 +58,7 @@ def __init__(
function is applied to the index and weighted with the corresponding probability.
loss: A target loss function to be used in training. Default is `squared_error`,
i.e. L2 loss. Can be given either as a string for 'absolute_error' (i.e. L1 Loss),
'squared_error', 'cross_entropy', 'cross_entropy_sigmoid', or as a loss function
'squared_error', 'cross_entropy', or as a loss function
implementing the Loss interface.
optimizer: An instance of an optimizer to be used in training. When `None` defaults to SLSQP.
warm_start: Use weights from previous fit to start next fit.
Expand All @@ -87,8 +84,6 @@ def __init__(
self._loss = L2Loss()
elif loss == "cross_entropy":
self._loss = CrossEntropyLoss()
elif loss == "cross_entropy_sigmoid":
self._loss = CrossEntropySigmoidLoss()
else:
raise QiskitMachineLearningError(f"Unknown loss {loss}!")

Expand Down
3 changes: 0 additions & 3 deletions qiskit_machine_learning/utils/loss_functions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
L1Loss
L2Loss
CrossEntropyLoss
CrossEntropySigmoidLoss
SVCLoss
"""

Expand All @@ -48,7 +47,6 @@
L1Loss,
L2Loss,
CrossEntropyLoss,
CrossEntropySigmoidLoss,
)

from .kernel_loss_functions import KernelLoss, SVCLoss
Expand All @@ -59,6 +57,5 @@
"L1Loss",
"L2Loss",
"CrossEntropyLoss",
"CrossEntropySigmoidLoss",
"SVCLoss",
]
29 changes: 0 additions & 29 deletions qiskit_machine_learning/utils/loss_functions/loss_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import numpy as np

from ...deprecation import warn_deprecated, DeprecatedType
from ...exceptions import QiskitMachineLearningError


Expand Down Expand Up @@ -176,31 +175,3 @@ def gradient(self, predict: np.ndarray, target: np.ndarray) -> np.ndarray:
grad = np.einsum("ij,i->ij", predict, np.sum(target, axis=1)) - target

return grad


class CrossEntropySigmoidLoss(Loss):
"""
This class computes the cross entropy sigmoid loss and should be used for binary classification.
"""

def __init__(self) -> None:
warn_deprecated("0.4.0", DeprecatedType.CLASS, "CrossEntropySigmoidLoss")
super().__init__()

def evaluate(self, predict: np.ndarray, target: np.ndarray) -> np.ndarray:
self._validate_shapes(predict, target)

if len(set(target)) != 2:
raise QiskitMachineLearningError(
"Sigmoid Cross Entropy is used for binary classification!"
)

x = CrossEntropyLoss()
return 1.0 / (1.0 + np.exp(-x.evaluate(predict, target)))

def gradient(self, predict: np.ndarray, target: np.ndarray) -> np.ndarray:
self._validate_shapes(predict, target)

return target * (1.0 / (1.0 + np.exp(-predict)) - 1) + (1 - target) * (
1.0 / (1.0 + np.exp(-predict))
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
---
deprecations:
- |
The previously deprecated ``CrossEntropySigmoidLoss`` loss function has been removed.

0 comments on commit c45a510

Please sign in to comment.