Skip to content

Commit

Permalink
Finish converting max_iterations to max_suggestions (#848)
Browse files Browse the repository at this point in the history
See Also: #713

---------

Co-authored-by: Sergiy Matusevych <sergiym@microsoft.com>
  • Loading branch information
bpkroth and motus authored Aug 19, 2024
1 parent 9183ae6 commit 439df99
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
"example": 10
},
"max_trials": {
"description": "Influence the budget of max number of trials for SMAC. If omitted, will default to max_iterations.",
"description": "Influence the budget of max number of trials for SMAC. If omitted, will default to max_suggestions.",
"type": "integer",
"minimum": 10,
"example": 100
Expand Down
11 changes: 4 additions & 7 deletions mlos_bench/mlos_bench/optimizers/base_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def __init__(
self._start_with_defaults: bool = bool(
strtobool(str(self._config.pop("start_with_defaults", True)))
)
self._max_iter = int(self._config.pop("max_suggestions", 100))
self._max_suggestions = int(self._config.pop("max_suggestions", 100))

opt_targets: Dict[str, str] = self._config.pop("optimization_targets", {"score": "min"})
self._opt_targets: Dict[str, Literal[1, -1]] = {}
Expand Down Expand Up @@ -142,18 +142,15 @@ def current_iteration(self) -> int:
"""
return self._iter

# TODO: finish renaming iterations to suggestions.
# See Also: https://github.com/microsoft/MLOS/pull/713

@property
def max_iterations(self) -> int:
def max_suggestions(self) -> int:
"""
The maximum number of iterations (suggestions) to run.
Note: this may or may not be the same as the number of configurations.
See Also: Scheduler.trial_config_repeat_count and Scheduler.max_trials.
"""
return self._max_iter
return self._max_suggestions

@property
def seed(self) -> int:
Expand Down Expand Up @@ -362,7 +359,7 @@ def not_converged(self) -> bool:
Base implementation just checks the iteration count.
"""
return self._iter < self._max_iter
return self._iter < self._max_suggestions

@abstractmethod
def get_best_observation(
Expand Down
8 changes: 4 additions & 4 deletions mlos_bench/mlos_bench/optimizers/grid_search_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,11 @@ def _sanity_check(self) -> None:
size,
self._tunables,
)
if size > self._max_iter:
if size > self._max_suggestions:
_LOG.warning(
"Grid search size %d, is greater than max iterations %d",
size,
self._max_iter,
self._max_suggestions,
)

def _get_grid(self) -> Tuple[Tuple[str, ...], Dict[Tuple[TunableValue, ...], None]]:
Expand Down Expand Up @@ -147,7 +147,7 @@ def suggest(self) -> TunableGroups:
self._suggested_configs.add(default_config_values)
else:
# Select the first item from the pending configs.
if not self._pending_configs and self._iter <= self._max_iter:
if not self._pending_configs and self._iter <= self._max_suggestions:
_LOG.info("No more pending configs to suggest. Restarting grid.")
self._config_keys, self._pending_configs = self._get_grid()
try:
Expand Down Expand Up @@ -185,7 +185,7 @@ def register(
return registered_score

def not_converged(self) -> bool:
if self._iter > self._max_iter:
if self._iter > self._max_suggestions:
if bool(self._pending_configs):
_LOG.warning(
"Exceeded max iterations, but still have %d pending configs: %s",
Expand Down
11 changes: 6 additions & 5 deletions mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,13 @@ def __init__(
)
)

# Make sure max_trials >= max_iterations.
# Make sure max_trials >= max_suggestions.
if "max_trials" not in self._config:
self._config["max_trials"] = self._max_iter
assert (
int(self._config["max_trials"]) >= self._max_iter
), f"max_trials {self._config.get('max_trials')} <= max_iterations {self._max_iter}"
self._config["max_trials"] = self._max_suggestions
assert int(self._config["max_trials"]) >= self._max_suggestions, (
f"max_trials {self._config.get('max_trials')} "
f"<= max_suggestions{self._max_suggestions}"
)

if "run_name" not in self._config and self.experiment_id:
self._config["run_name"] = self.experiment_id
Expand Down
2 changes: 1 addition & 1 deletion mlos_bench/mlos_bench/optimizers/one_shot_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(
):
super().__init__(tunables, config, global_config, service)
_LOG.info("Run a single iteration for: %s", self._tunables)
self._max_iter = 1 # Always run for just one iteration.
self._max_suggestions = 1 # Always run for just one iteration.

def suggest(self) -> TunableGroups:
"""Always produce the same (initial) suggestion."""
Expand Down
6 changes: 3 additions & 3 deletions mlos_bench/mlos_bench/tests/launcher_parse_args_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def test_launcher_args_parse_defaults(config_paths: List[str]) -> None:
assert isinstance(launcher.optimizer, OneShotOptimizer)
# Check that the optimizer got initialized with defaults.
assert launcher.optimizer.tunable_params.is_defaults()
assert launcher.optimizer.max_iterations == 1 # value for OneShotOptimizer
assert launcher.optimizer.max_suggestions == 1 # value for OneShotOptimizer
# Check that we pick up the right scheduler config:
assert isinstance(launcher.scheduler, SyncScheduler)
assert launcher.scheduler.trial_config_repeat_count == 1 # default
Expand Down Expand Up @@ -155,7 +155,7 @@ def test_launcher_args_parse_1(config_paths: List[str]) -> None:
assert isinstance(launcher.optimizer, OneShotOptimizer)
# Check that the optimizer got initialized with defaults.
assert launcher.optimizer.tunable_params.is_defaults()
assert launcher.optimizer.max_iterations == 1 # value for OneShotOptimizer
assert launcher.optimizer.max_suggestions == 1 # value for OneShotOptimizer
# Check that we pick up the right scheduler config:
assert isinstance(launcher.scheduler, SyncScheduler)
assert (
Expand Down Expand Up @@ -223,7 +223,7 @@ def test_launcher_args_parse_2(config_paths: List[str]) -> None:
"max_suggestions", opt_config.get("config", {}).get("max_suggestions", 100)
)
assert (
launcher.optimizer.max_iterations
launcher.optimizer.max_suggestions
== orig_max_iters
== launcher.global_config["max_suggestions"]
)
Expand Down
2 changes: 1 addition & 1 deletion mlos_bench/mlos_bench/tests/optimizers/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def flaml_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:


# FIXME: SMAC's RF model can be non-deterministic at low iterations, which are
# normally calculated as a percentage of the max_iterations and number of
# normally calculated as a percentage of the max_suggestions and number of
# tunable dimensions, so for now we set the initial random samples equal to the
# number of iterations and control them with a seed.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,11 @@ def grid_search_opt(
assert len(grid_search_tunables) == 3
# Test the convergence logic by controlling the number of iterations to be not a
# multiple of the number of elements in the grid.
max_iterations = len(grid_search_tunables_grid) * 2 - 3
max_suggestions = len(grid_search_tunables_grid) * 2 - 3
return GridSearchOptimizer(
tunables=grid_search_tunables,
config={
"max_suggestions": max_iterations,
"max_suggestions": max_suggestions,
"optimization_targets": {"score": "max", "other_score": "min"},
},
)
Expand Down Expand Up @@ -187,7 +187,7 @@ def test_grid_search(

# But if we still have iterations left, we should be able to suggest again by
# refilling the grid.
assert grid_search_opt.current_iteration < grid_search_opt.max_iterations
assert grid_search_opt.current_iteration < grid_search_opt.max_suggestions
assert grid_search_opt.suggest()
assert list(grid_search_opt.pending_configs)
assert list(grid_search_opt.suggested_configs)
Expand All @@ -198,7 +198,7 @@ def test_grid_search(
suggestion = grid_search_opt.suggest()
grid_search_opt.register(suggestion, status, score)
assert not grid_search_opt.not_converged()
assert grid_search_opt.current_iteration >= grid_search_opt.max_iterations
assert grid_search_opt.current_iteration >= grid_search_opt.max_suggestions
assert list(grid_search_opt.pending_configs)
assert list(grid_search_opt.suggested_configs)

Expand Down

0 comments on commit 439df99

Please sign in to comment.