Skip to content

Commit

Permalink
Align optimisation result (#334)
Browse files Browse the repository at this point in the history
* Align optimisation result

* Fix scipy with  jac test

* Update CHANGELOG.md

* Remove import

* Rename nit to n_iterations and add Result to init

* Combine imports

Co-authored-by: Brady Planden <55357039+BradyPlanden@users.noreply.github.com>

---------

Co-authored-by: Brady Planden <55357039+BradyPlanden@users.noreply.github.com>
  • Loading branch information
NicolaCourtier and BradyPlanden authored Jun 7, 2024
1 parent fd171d0 commit bd733a8
Show file tree
Hide file tree
Showing 7 changed files with 81 additions and 78 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## Features

- [#271](https://github.com/pybop-team/PyBOP/issues/271) - Aligns the output of the optimisers via a generalisation of Result class.
- [#315](https://github.com/pybop-team/PyBOP/pull/315) - Updates __init__ structure to remove circular import issues and minimises dependancy imports across codebase for faster PyBOP module import. Adds type-hints to BaseModel and refactors rebuild parameter variables.
- [#236](https://github.com/pybop-team/PyBOP/issues/236) - Restructures the optimiser classes, adds a new optimisation API through direct construction and keyword arguments, and fixes the setting of `max_iterations`, and `_minimising`. Introduces `pybop.BaseOptimiser`, `pybop.BasePintsOptimiser`, and `pybop.BaseSciPyOptimiser` classes.
- [#321](https://github.com/pybop-team/PyBOP/pull/321) - Updates Prior classes with BaseClass, adds a `problem.sample_initial_conditions` method to improve stability of SciPy.Minimize optimiser.
Expand Down
14 changes: 8 additions & 6 deletions examples/standalone/optimiser.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
from scipy.optimize import minimize

from pybop import BaseOptimiser
from pybop import BaseOptimiser, Result


class StandaloneOptimiser(BaseOptimiser):
Expand Down Expand Up @@ -65,18 +65,20 @@ def callback(x):
self.log.append([x])

# Run optimiser
self.result = minimize(
result = minimize(
self.cost,
self.x0,
bounds=self._scipy_bounds,
callback=callback,
**self._options,
)

self.result.final_cost = self.cost(self.result.x)
self._iterations = self.result.nit

return self.result.x, self.result.final_cost
return Result(
x=result.x,
final_cost=self.cost(result.x),
n_iterations=result.nit,
scipy_result=result,
)

def name(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@
#
# Optimiser class
#
from .optimisers.base_optimiser import BaseOptimiser
from .optimisers.base_optimiser import BaseOptimiser, Result
from .optimisers.base_pints_optimiser import BasePintsOptimiser
from .optimisers.scipy_optimisers import (
BaseSciPyOptimiser,
Expand Down
34 changes: 32 additions & 2 deletions pybop/optimisers/base_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,17 +143,18 @@ def run(self):
final_cost : float
The final cost associated with the best parameters.
"""
x, final_cost = self._run()
self.result = self._run()

# Store the optimised parameters
x = self.result.x
if hasattr(self.cost, "parameters"):
self.store_optimised_parameters(x)

# Check if parameters are viable
if self.physical_viability:
self.check_optimal_parameters(x)

return x, final_cost
return x, self.result.final_cost

def _run(self):
"""
Expand Down Expand Up @@ -231,3 +232,32 @@ def set_allow_infeasible_solutions(self, allow=True):
# Turn off this feature as there is no model
self.physical_viability = False
self.allow_infeasible_solutions = False


class Result:
"""
Stores the result of the optimisation.
Attributes
----------
x : ndarray
The solution of the optimisation.
final_cost : float
The cost associated with the solution x.
nit : int
Number of iterations performed by the optimiser.
scipy_result : scipy.optimize.OptimizeResult, optional
The result obtained from a SciPy optimiser.
"""

def __init__(
self,
x: np.ndarray = None,
final_cost: float = None,
n_iterations: int = None,
scipy_result=None,
):
self.x = x
self.final_cost = final_cost
self.n_iterations = n_iterations
self.scipy_result = scipy_result
38 changes: 6 additions & 32 deletions pybop/optimisers/base_pints_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from pints import SequentialEvaluator as PintsSequentialEvaluator
from pints import strfloat as PintsStrFloat

from pybop import BaseOptimiser
from pybop import BaseOptimiser, Result


class BasePintsOptimiser(BaseOptimiser):
Expand Down Expand Up @@ -171,10 +171,8 @@ def _run(self):
Returns
-------
x : numpy.ndarray
The best parameter set found by the optimization.
final_cost : float
The final cost associated with the best parameters.
result : pybop.Result
The result of the optimisation including the optimised parameter values and cost.
See Also
--------
Expand Down Expand Up @@ -353,12 +351,9 @@ def f(x, grad=None):
if self._transformation is not None:
x = self._transformation.to_model(x)

# Store result
final_cost = f if self.minimising else -f
self.result = Result(x=x, final_cost=final_cost, nit=self._iterations)

# Return best position and its cost
return x, final_cost
return Result(
x=x, final_cost=f if self.minimising else -f, n_iterations=self._iterations
)

def f_guessed_tracking(self):
"""
Expand Down Expand Up @@ -513,24 +508,3 @@ def set_threshold(self, threshold=None):
self._threshold = None
else:
self._threshold = float(threshold)


class Result:
"""
Stores the result of the optimisation.
Attributes
----------
x : ndarray
The solution of the optimisation.
final_cost : float
The cost associated with the solution x.
nit : int
Number of iterations performed by the optimiser.
"""

def __init__(self, x=None, final_cost=None, nit=None):
self.x = x
self.final_cost = final_cost
self.nit = nit
38 changes: 16 additions & 22 deletions pybop/optimisers/scipy_optimisers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
from scipy.optimize import differential_evolution, minimize

from pybop import BaseOptimiser
from pybop import BaseOptimiser, Result


class BaseSciPyOptimiser(BaseOptimiser):
Expand Down Expand Up @@ -65,17 +65,17 @@ def _run(self):
Returns
-------
x : numpy.ndarray
The best parameter set found by the optimization.
final_cost : float
The final cost associated with the best parameters.
result : pybop.Result
The result of the optimisation including the optimised parameter values and cost.
"""
self.result = self._run_optimiser()
result = self._run_optimiser()

self.result.final_cost = self.cost(self.result.x)
self._iterations = self.result.nit

return self.result.x, self.result.final_cost
return Result(
x=result.x,
final_cost=self.cost(result.x),
n_iterations=result.nit,
scipy_result=result,
)


class SciPyMinimize(BaseSciPyOptimiser):
Expand Down Expand Up @@ -147,9 +147,8 @@ def _run_optimiser(self):
Returns
-------
tuple
A tuple (x, final_cost) containing the optimized parameters and the value of `cost_function`
at the optimum.
result : scipy.optimize.OptimizeResult
The result of the optimisation including the optimised parameter values and cost.
"""
self.log = [[self.x0]]

Expand Down Expand Up @@ -187,16 +186,14 @@ def cost_wrapper(x):
L, dl = self.cost.evaluateS1(x)
return L, dl if self.minimising else -L, -dl

result = minimize(
return minimize(
cost_wrapper,
self.x0,
bounds=self._scipy_bounds,
callback=callback,
**self._options,
)

return result

def name(self):
"""
Provides the name of the optimization strategy.
Expand Down Expand Up @@ -290,9 +287,8 @@ def _run_optimiser(self):
Returns
-------
tuple
A tuple (x, final_cost) containing the optimized parameters and the value of
the cost function at the optimum.
result : scipy.optimize.OptimizeResult
The result of the optimisation including the optimised parameter values and cost.
"""
if self.x0 is not None:
print(
Expand All @@ -307,15 +303,13 @@ def callback(x, convergence):
def cost_wrapper(x):
return self.cost(x) if self.minimising else -self.cost(x)

result = differential_evolution(
return differential_evolution(
cost_wrapper,
self._scipy_bounds,
callback=callback,
**self._options,
)

return result

def name(self):
"""
Provides the name of the optimization strategy.
Expand Down
32 changes: 17 additions & 15 deletions tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def test_optimiser_kwargs(self, cost, optimiser):

# Check maximum iterations
optim.run()
assert optim._iterations == 1
assert optim.result.n_iterations == 1

if optimiser in [pybop.GradientDescent, pybop.Adam, pybop.NelderMead]:
# Ignored bounds
Expand Down Expand Up @@ -218,16 +218,18 @@ def test_optimiser_kwargs(self, cost, optimiser):
assert optim.x0 == x0_new
assert optim.x0 != cost.x0

if optimiser in [pybop.SciPyMinimize]:
# Check a method that uses gradient information
optimiser(cost=cost, method="L-BFGS-B", jac=True, maxiter=10)
optim.run()
assert optim._iterations > 0
with pytest.raises(
ValueError,
match="Expected the jac option to be either True, False or None.",
):
optim = optimiser(cost=cost, jac="Invalid string")
@pytest.mark.unit
def test_scipy_minimize_with_jac(self, cost):
# Check a method that uses gradient information
optim = pybop.SciPyMinimize(cost=cost, method="L-BFGS-B", jac=True, maxiter=10)
optim.run()
assert optim.result.scipy_result.success is True

with pytest.raises(
ValueError,
match="Expected the jac option to be either True, False or None.",
):
optim = pybop.SciPyMinimize(cost=cost, jac="Invalid string")

@pytest.mark.unit
def test_single_parameter(self, cost):
Expand Down Expand Up @@ -336,14 +338,14 @@ def test_halting(self, cost):
# Test max evalutions
optim = pybop.GradientDescent(cost=cost, max_evaluations=1, verbose=True)
x, __ = optim.run()
assert optim._iterations == 1
assert optim.result.n_iterations == 1

# Test max unchanged iterations
optim = pybop.GradientDescent(
cost=cost, max_unchanged_iterations=1, min_iterations=1
)
x, __ = optim.run()
assert optim._iterations == 2
assert optim.result.n_iterations == 2

# Test guessed values
optim.set_f_guessed_tracking(True)
Expand Down Expand Up @@ -385,7 +387,7 @@ def optimiser_error():

optim.pints_optimiser.stop = optimiser_error
optim.run()
assert optim._iterations == 1
assert optim.result.n_iterations == 1

# Test no stopping condition
with pytest.raises(
Expand All @@ -403,7 +405,7 @@ def test_infeasible_solutions(self, cost):
for optimiser in [pybop.SciPyMinimize, pybop.GradientDescent]:
optim = optimiser(cost=cost, allow_infeasible_solutions=False, maxiter=1)
optim.run()
assert optim._iterations == 1
assert optim.result.n_iterations == 1

@pytest.mark.unit
def test_unphysical_result(self, cost):
Expand Down

0 comments on commit bd733a8

Please sign in to comment.