Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hyperparameters optimization with Optuna #150

Merged
merged 10 commits into from
Mar 5, 2024
68 changes: 68 additions & 0 deletions discrete_optimization/generic_tools/callbacks/optuna.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# Copyright (c) 2024 AIRBUS and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from __future__ import annotations # see annotations as str

import logging
from typing import Optional

from discrete_optimization.generic_tools.callbacks.callback import Callback
from discrete_optimization.generic_tools.do_solver import SolverDO
from discrete_optimization.generic_tools.result_storage.result_storage import (
ResultStorage,
)

logger = logging.getLogger(__name__)


try:
import optuna
except ImportError:
logger.warning("You should install optuna to use callbacks for optuna.")


class OptunaPruningSingleFitCallback(Callback):
"""Callback to prune unpromising trials during Optuna hyperparameters tuning.

Adapted to single objective optimization (res.fit is a float)

Args:
trial:
A :class:`optuna.trial.Trial` corresponding to the current evaluation of the
objective function.
optuna_report_nb_steps: report intermediate result every `optuna_report_nb_steps` steps
when the number of iterations is high, setting this to 1 could slow too much run of a single trial

"""

def __init__(
self, trial: optuna.trial.Trial, optuna_report_nb_steps: int = 1, **kwargs
) -> None:
self.report_nb_steps = optuna_report_nb_steps
self.trial = trial

def on_step_end(
self, step: int, res: ResultStorage, solver: SolverDO
) -> Optional[bool]:
"""Called at the end of an optimization step.

Args:
step: index of step
res: current result storage
solver: solvers using the callback

Returns:
If `True`, the optimization process is stopped, else it goes on.

"""
if step % self.report_nb_steps == 0:
fit = res.best_fit

# Report current score and step to Optuna's trial.
self.trial.report(float(fit), step=step)

# Prune trial if needed
if self.trial.should_prune():
message = "Trial was pruned at step {}.".format(step)
raise optuna.TrialPruned(message)
99 changes: 97 additions & 2 deletions discrete_optimization/generic_tools/do_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,31 +3,50 @@
# Copyright (c) 2022 AIRBUS and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import annotations # see annotations as str

from abc import abstractmethod
from typing import Any, List, Optional
from typing import TYPE_CHECKING, Any, Dict, List, Optional

from discrete_optimization.generic_tools.callbacks.callback import Callback
from discrete_optimization.generic_tools.do_problem import (
ParamsObjectiveFunction,
Problem,
build_aggreg_function_and_params_objective,
)
from discrete_optimization.generic_tools.hyperparameters.hyperparameter import (
Hyperparameter,
)
from discrete_optimization.generic_tools.result_storage.result_storage import (
ResultStorage,
)

if TYPE_CHECKING: # only for type checkers
try:
import optuna
except ImportError:
pass


class SolverDO:
"""Base class for a discrete-optimization solver."""

problem: Problem
hyperparameters: List[Hyperparameter] = []
"""Hyperparameters available for this solver.

These hyperparameters are to be feed to **kwargs found in
- __init__()
- init_model() (when available)
- solve()

"""

def __init__(
self,
problem: Problem,
params_objective_function: Optional[ParamsObjectiveFunction] = None,
**kwargs: Any
**kwargs: Any,
):
self.problem = problem
(
Expand All @@ -39,6 +58,82 @@ def __init__(
params_objective_function=params_objective_function,
)

@classmethod
def get_hyperparameters_names(cls) -> List[str]:
"""List of hyperparameters names."""
return [h.name for h in cls.hyperparameters]

@classmethod
def get_hyperparameters_by_name(cls) -> Dict[str, Hyperparameter]:
"""Mapping from name to corresponding hyperparameter."""
return {h.name: h for h in cls.hyperparameters}

@classmethod
def get_hyperparameter(cls, name: str) -> Hyperparameter:
"""Get hyperparameter from given name."""
return cls.get_hyperparameters_by_name()[name]

@classmethod
def suggest_hyperparameter_value_with_optuna(
cls, trial: optuna.trial.Trial, name: str, **kwargs
) -> Any:
"""Suggest hyperparameter value during an Optuna trial.

This can be used during Optuna hyperparameters tuning.

Args:
trial: optuna trial during hyperparameters tuning
name: name of the hyperparameter to choose
**kwargs: options for optuna hyperparameter suggestions

Returns:


kwargs can be used to pass relevant arguments to
- trial.suggest_float()
- trial.suggest_int()
- trial.suggest_categorical()

For instance it can
- add a low/high value if not existing for the hyperparameter
or override it to narrow the search. (for float or int hyperparameters)
- add a step or log argument (for float or int hyperparameters,
see optuna.trial.Trial.suggest_float())
- override choices for categorical or enum parameters to narrow the search

"""
return cls.get_hyperparameter(name=name).suggest_with_optuna(
trial=trial, **kwargs
)

@classmethod
def suggest_hyperparameters_values_with_optuna(
cls,
trial: optuna.trial.Trial,
names: List[str],
kwargs_by_name: Optional[Dict[str, Dict[str, Any]]] = None,
) -> List[Any]:
"""Suggest hyperparameter value during an Optuna trial.

Args:
trial: optuna trial during hyperparameters tuning
names: names of the hyperparameters to choose
kwargs_by_name: options for optuna hyperparameter suggestions, by hyperparameter name

Returns:

kwargs_by_name[some_name] will be passed as **kwargs to suggest_hyperparameter_value_with_optuna(name=some_name)

"""
if kwargs_by_name is None:
kwargs_by_name = {}
return [
cls.suggest_hyperparameter_value_with_optuna(
trial=trial, name=name, **kwargs_by_name.get(name, {})
)
for name in names
]

@abstractmethod
def solve(
self, callbacks: Optional[List[Callback]] = None, **kwargs: Any
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Copyright (c) 2024 AIRBUS and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# Copyright (c) 2024 AIRBUS and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from __future__ import annotations # see annotations as str

from dataclasses import dataclass, field
from enum import Enum
from typing import TYPE_CHECKING, Any, List, Optional

if TYPE_CHECKING: # only for type checkers
try:
import optuna
except ImportError:
pass


@dataclass
class Hyperparameter:
name: str
default: Optional[Any] = None

def suggest_with_optuna(self, trial: optuna.trial.Trial, **kwargs: Any) -> Any:
...


@dataclass
class IntegerHyperparameter(Hyperparameter):
low: Optional[int] = None
high: Optional[int] = None
default: Optional[int] = None

def suggest_with_optuna(self, trial: optuna.trial.Trial, **kwargs: Any) -> Any:
if self.low is not None and "low" not in kwargs:
kwargs["low"] = self.low
if self.high is not None and "high" not in kwargs:
kwargs["high"] = self.high
return trial.suggest_int(name=self.name, **kwargs)


@dataclass
class FloatHyperparameter(Hyperparameter):
low: Optional[float] = None
high: Optional[float] = None
default: Optional[float] = None

def suggest_with_optuna(self, trial: optuna.trial.Trial, **kwargs: Any) -> Any:
if self.low is not None and "low" not in kwargs:
kwargs["low"] = self.low
if self.high is not None and "high" not in kwargs:
kwargs["high"] = self.high
return trial.suggest_float(name=self.name, **kwargs)


@dataclass
class CategoricalHyperparameter(Hyperparameter):
choices: List[Any] = field(default_factory=list)

def suggest_with_optuna(self, trial: optuna.trial.Trial, **kwargs: Any) -> Any:
if self.choices is not None and "choices" not in kwargs:
kwargs["choices"] = self.choices
return trial.suggest_categorical(name=self.name, **kwargs)


class EnumHyperparameter(CategoricalHyperparameter):
def __init__(self, name: str, enum: Enum, default: Optional[Any] = None):
super().__init__(name, choices=list(enum), default=default)
self.enum = enum

def suggest_with_optuna(self, trial: optuna.trial.Trial, **kwargs: Any) -> Any:
choices = kwargs.get("choices", self.choices)
choices_str = [c.name for c in choices]
choice_str = trial.suggest_categorical(name=self.name, choices=choices_str)
return self.enum[choice_str]
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,13 @@ def __init__(
self.min = None
self.max = None

@property
def best_fit(self):
if self.maximize:
return self.max
else:
return self.min

def add_solution(self, solution: Solution, fitness: fitness_class) -> None:
self.list_solution_fits += [(solution, fitness)]
if solution not in self.map_solutions:
Expand Down
5 changes: 3 additions & 2 deletions discrete_optimization/pickup_vrp/solver/lp_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
from discrete_optimization.generic_tools.do_problem import (
ParamsObjectiveFunction,
Solution,
build_aggreg_function_and_params_objective,
)
from discrete_optimization.generic_tools.graph_api import Graph
from discrete_optimization.generic_tools.lp_tools import (
Expand All @@ -39,6 +38,7 @@
TupleFitness,
)
from discrete_optimization.pickup_vrp.gpdp import GPDP, Edge, GPDPSolution, Node
from discrete_optimization.pickup_vrp.solver.pickup_vrp_solver import SolverPickupVrp

try:
import gurobipy as grb
Expand Down Expand Up @@ -157,13 +157,14 @@ def retrieve_solutions(
return list_results


class LinearFlowSolver(GurobiMilpSolver):
class LinearFlowSolver(GurobiMilpSolver, SolverPickupVrp):
problem: GPDP

def __init__(
self,
problem: GPDP,
params_objective_function: Optional[ParamsObjectiveFunction] = None,
**kwargs: Any,
):
super().__init__(
problem=problem, params_objective_function=params_objective_function
Expand Down
4 changes: 3 additions & 1 deletion discrete_optimization/pickup_vrp/solver/lp_solver_pymip.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
convert_temporaryresult_to_gpdpsolution,
reevaluate_result,
)
from discrete_optimization.pickup_vrp.solver.pickup_vrp_solver import SolverPickupVrp

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -89,13 +90,14 @@ def retrieve_ith_solution(
return results, obj


class LinearFlowSolver(PymipMilpSolver):
class LinearFlowSolver(PymipMilpSolver, SolverPickupVrp):
problem: GPDP

def __init__(
self,
problem: GPDP,
params_objective_function: Optional[ParamsObjectiveFunction] = None,
**kwargs: Any,
):
super().__init__(
problem=problem, params_objective_function=params_objective_function
Expand Down
17 changes: 16 additions & 1 deletion discrete_optimization/pickup_vrp/solver/ortools_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@
from discrete_optimization.generic_tools.do_problem import ParamsObjectiveFunction
from discrete_optimization.generic_tools.do_solver import SolverDO
from discrete_optimization.generic_tools.exceptions import SolveEarlyStop
from discrete_optimization.generic_tools.hyperparameters.hyperparameter import (
CategoricalHyperparameter,
EnumHyperparameter,
)
from discrete_optimization.generic_tools.result_storage.result_storage import (
ResultStorage,
)
Expand All @@ -37,6 +41,7 @@
build_matrix_distance,
build_matrix_time,
)
from discrete_optimization.pickup_vrp.solver.pickup_vrp_solver import SolverPickupVrp

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -150,15 +155,25 @@ def apply_cost(
"""


class ORToolsGPDP(SolverDO):
class ORToolsGPDP(SolverPickupVrp):
problem: GPDP
hyperparameters = [
EnumHyperparameter(name="first_solution_strategy", enum=FirstSolutionStrategy),
EnumHyperparameter(
name="local_search_metaheuristic", enum=LocalSearchMetaheuristic
),
CategoricalHyperparameter(name="use_lns", choices=[True, False]),
CategoricalHyperparameter(name="use_cp", choices=[True, False]),
CategoricalHyperparameter(name="use_cp_sat", choices=[True, False]),
]

def __init__(
self,
problem: GPDP,
factor_multiplier_distance: float = 1,
factor_multiplier_time: float = 1,
params_objective_function: Optional[ParamsObjectiveFunction] = None,
**kwargs: Any,
):
super().__init__(
problem=problem, params_objective_function=params_objective_function
Expand Down
Loading