Skip to content

Commit

Permalink
Migrate to ConfigSpace 1.* (#802)
Browse files Browse the repository at this point in the history
Summary of changes:
* [x] Update dependencies to ConfigSpace >= 1.0.
* [x] Update dependencies to SMAC3 >= 2.2.
* [x] Migrate to new ConfigSpace API.
* [x] Better definition of ConfigSpace fixture in unit tests.
* [x] Fix mypy and pylint issues triggered by the migration.

Notes:
* Side effect: **Works with Python 3.12!**
* ~Supersedes #799~
* ~Depends on upcoming SMAC3 release with [SMAC3
#1124](automl/SMAC3#1124) merged in and a new
release cut with it.~
* Quantization will be added back later:
    * #803 
* ~Tests fail due to LlamaTune issue:~
    * #805 

Closes #727

---------

Co-authored-by: Brian Kroth <bpkroth@users.noreply.github.com>
Co-authored-by: Brian Kroth <bpkroth@microsoft.com>
  • Loading branch information
3 people authored Aug 2, 2024
1 parent 1ad725a commit a5f36a8
Show file tree
Hide file tree
Showing 18 changed files with 183 additions and 144 deletions.
3 changes: 2 additions & 1 deletion conda-envs/mlos-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ dependencies:
- jupyter
- ipykernel
- nb_conda_kernels
- matplotlib
- matplotlib<3.9
- matplotlib-base<3.9
- seaborn
- pandas
- pyarrow
Expand Down
35 changes: 26 additions & 9 deletions mlos_bench/mlos_bench/optimizers/convert_configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,30 @@
"""

import logging
from typing import Dict, List, Optional, Tuple, Union
from typing import Dict, Hashable, List, Optional, Tuple, Union

from ConfigSpace import (
Beta,
BetaFloatHyperparameter,
BetaIntegerHyperparameter,
CategoricalHyperparameter,
Configuration,
ConfigurationSpace,
EqualsCondition,
Float,
Integer,
Normal,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
Uniform,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
from ConfigSpace.types import NotSet

from mlos_bench.tunables.tunable import Tunable, TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups
from mlos_bench.util import nullable, try_parse_val
from mlos_bench.util import try_parse_val

_LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -70,7 +77,9 @@ def _tunable_to_configspace(
cs : ConfigurationSpace
A ConfigurationSpace object that corresponds to the Tunable.
"""
meta = {"group": group_name, "cost": cost} # {"scaling": ""}
meta: Dict[Hashable, TunableValue] = {"cost": cost}
if group_name is not None:
meta["group"] = group_name

if tunable.type == "categorical":
return ConfigurationSpace(
Expand Down Expand Up @@ -101,12 +110,20 @@ def _tunable_to_configspace(
elif tunable.distribution is not None:
raise TypeError(f"Invalid Distribution Type: {tunable.distribution}")

range_hp: Union[
BetaFloatHyperparameter,
BetaIntegerHyperparameter,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
]
if tunable.type == "int":
range_hp = Integer(
name=tunable.name,
bounds=(int(tunable.range[0]), int(tunable.range[1])),
log=bool(tunable.is_log),
q=nullable(int, tunable.quantization),
# TODO: Restore quantization support (#803).
distribution=distribution,
default=(
int(tunable.default)
Expand All @@ -120,8 +137,8 @@ def _tunable_to_configspace(
name=tunable.name,
bounds=tunable.range,
log=bool(tunable.is_log),
q=tunable.quantization, # type: ignore[arg-type]
distribution=distribution, # type: ignore[arg-type]
# TODO: Restore quantization support (#803).
distribution=distribution,
default=(
float(tunable.default)
if tunable.in_range(tunable.default) and tunable.default is not None
Expand Down Expand Up @@ -152,7 +169,7 @@ def _tunable_to_configspace(
name=special_name,
choices=tunable.special,
weights=special_weights,
default_value=tunable.default if tunable.default in tunable.special else None,
default_value=tunable.default if tunable.default in tunable.special else NotSet,
meta=meta,
),
type_name: CategoricalHyperparameter(
Expand All @@ -163,10 +180,10 @@ def _tunable_to_configspace(
),
}
)
conf_space.add_condition(
conf_space.add(
EqualsCondition(conf_space[special_name], conf_space[type_name], TunableValueKind.SPECIAL)
)
conf_space.add_condition(
conf_space.add(
EqualsCondition(conf_space[tunable.name], conf_space[type_name], TunableValueKind.RANGE)
)

Expand Down
70 changes: 46 additions & 24 deletions mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
CategoricalHyperparameter,
ConfigurationSpace,
EqualsCondition,
Integer,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
Expand Down Expand Up @@ -40,45 +41,66 @@ def configuration_space() -> ConfigurationSpace:
special_param_names("kernel_sched_migration_cost_ns")
)

# TODO: Add quantization support tests (#803).

# NOTE: FLAML requires distribution to be uniform
spaces = ConfigurationSpace(
space={
"vmSize": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
"idle": ["halt", "mwait", "noidle"],
"kernel_sched_migration_cost_ns": (0, 500000),
kernel_sched_migration_cost_ns_special: [-1, 0],
kernel_sched_migration_cost_ns_type: [
TunableValueKind.SPECIAL,
TunableValueKind.RANGE,
],
"kernel_sched_latency_ns": (0, 1000000000),
{
"vmSize": CategoricalHyperparameter(
name="vmSize",
choices=["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
default_value="Standard_B4ms",
meta={"group": "provision", "cost": 0},
),
"idle": CategoricalHyperparameter(
name="idle",
choices=["halt", "mwait", "noidle"],
default_value="halt",
meta={"group": "boot", "cost": 0},
),
"kernel_sched_latency_ns": Integer(
name="kernel_sched_latency_ns",
bounds=(0, 1000000000),
log=False,
default=2000000,
meta={"group": "kernel", "cost": 0},
),
"kernel_sched_migration_cost_ns": Integer(
name="kernel_sched_migration_cost_ns",
bounds=(0, 500000),
log=False,
default=250000,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_special: CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_special,
choices=[-1, 0],
weights=[0.5, 0.5],
default_value=-1,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_type: CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_type,
choices=[TunableValueKind.SPECIAL, TunableValueKind.RANGE],
weights=[0.5, 0.5],
default_value=TunableValueKind.SPECIAL,
),
}
)

# NOTE: FLAML requires distribution to be uniform
spaces["vmSize"].default_value = "Standard_B4ms"
spaces["idle"].default_value = "halt"
spaces["kernel_sched_migration_cost_ns"].default_value = 250000
spaces[kernel_sched_migration_cost_ns_special].default_value = -1
spaces[kernel_sched_migration_cost_ns_special].probabilities = (0.5, 0.5)
spaces[kernel_sched_migration_cost_ns_type].default_value = TunableValueKind.SPECIAL
spaces[kernel_sched_migration_cost_ns_type].probabilities = (0.5, 0.5)
spaces["kernel_sched_latency_ns"].default_value = 2000000

spaces.add_condition(
spaces.add(
EqualsCondition(
spaces[kernel_sched_migration_cost_ns_special],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.SPECIAL,
)
)
spaces.add_condition(
spaces.add(
EqualsCondition(
spaces["kernel_sched_migration_cost_ns"],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.RANGE,
)
)

return spaces


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
)
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.spaces.adapters.identity_adapter import IdentityAdapter
from mlos_core.util import drop_nulls


class SmacOptimizer(BaseBayesianOptimizer):
Expand Down Expand Up @@ -350,8 +351,11 @@ def _suggest(
warn(f"Not Implemented: Ignoring context {list(context.columns)}", UserWarning)

trial: TrialInfo = self.base_optimizer.ask()
trial.config.is_valid_configuration()
self.optimizer_parameter_space.check_configuration(trial.config)
trial.config.check_valid_configuration()
ConfigSpace.Configuration(
self.optimizer_parameter_space,
values=trial.config,
).check_valid_configuration()
assert trial.config.config_space == self.optimizer_parameter_space
self.trial_info_map[trial.config] = trial
config_df = pd.DataFrame(
Expand Down Expand Up @@ -441,6 +445,11 @@ def _to_configspace_configs(self, *, configs: pd.DataFrame) -> List[ConfigSpace.
List of ConfigSpace configs.
"""
return [
ConfigSpace.Configuration(self.optimizer_parameter_space, values=config.to_dict())
ConfigSpace.Configuration(
self.optimizer_parameter_space,
# Remove None values for inactive parameters
values=drop_nulls(config.to_dict()),
allow_inactive_with_values=False,
)
for (_, config) in configs.astype("O").iterrows()
]
9 changes: 6 additions & 3 deletions mlos_core/mlos_core/optimizers/flaml_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from mlos_core.optimizers.optimizer import BaseOptimizer
from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.util import normalize_config
from mlos_core.util import drop_nulls, normalize_config


class EvaluatedSample(NamedTuple):
Expand Down Expand Up @@ -124,13 +124,16 @@ def _register(
warn(f"Not Implemented: Ignoring metadata {list(metadata.columns)}", UserWarning)

for (_, config), (_, score) in zip(configs.astype("O").iterrows(), scores.iterrows()):
# Remove None values for inactive config parameters
config_dict = drop_nulls(config.to_dict())
cs_config: ConfigSpace.Configuration = ConfigSpace.Configuration(
self.optimizer_parameter_space, values=config.to_dict()
self.optimizer_parameter_space,
values=config_dict,
)
if cs_config in self.evaluated_samples:
warn(f"Configuration {config} was already registered", UserWarning)
self.evaluated_samples[cs_config] = EvaluatedSample(
config=config.to_dict(),
config=config_dict,
score=float(np.average(score.astype(float), weights=self._objective_weights)),
)

Expand Down
37 changes: 20 additions & 17 deletions mlos_core/mlos_core/spaces/adapters/llamatune.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
#
"""Implementation of LlamaTune space adapter."""
import os
from typing import Dict, Optional
from typing import Dict, List, Optional, Union
from warnings import warn

import ConfigSpace
Expand All @@ -16,7 +16,7 @@
from sklearn.preprocessing import MinMaxScaler

from mlos_core.spaces.adapters.adapter import BaseSpaceAdapter
from mlos_core.util import normalize_config
from mlos_core.util import drop_nulls, normalize_config


class LlamaTuneAdapter(BaseSpaceAdapter): # pylint: disable=too-many-instance-attributes
Expand Down Expand Up @@ -102,7 +102,7 @@ def inverse_transform(self, configurations: pd.DataFrame) -> pd.DataFrame:
for _, config in configurations.astype("O").iterrows():
configuration = ConfigSpace.Configuration(
self.orig_parameter_space,
values=config.to_dict(),
values=drop_nulls(config.to_dict()),
)

target_config = self._suggested_configs.get(configuration, None)
Expand Down Expand Up @@ -222,7 +222,10 @@ def _try_inverse_transform_config(

# But the inverse mapping should at least be valid in the target space.
try:
self.target_parameter_space.check_configuration(target_config)
ConfigSpace.Configuration(
self.target_parameter_space,
values=target_config,
).check_valid_configuration()
except ConfigSpace.exceptions.IllegalValueError as e:
raise ValueError(
f"Invalid configuration {target_config} generated by "
Expand All @@ -249,7 +252,10 @@ def transform(self, configuration: pd.DataFrame) -> pd.DataFrame:

# Validate that the configuration is in the original space.
try:
self.orig_parameter_space.check_configuration(orig_configuration)
ConfigSpace.Configuration(
self.orig_parameter_space,
values=orig_configuration,
).check_valid_configuration()
except ConfigSpace.exceptions.IllegalValueError as e:
raise ValueError(
f"Invalid configuration {orig_configuration} generated by "
Expand Down Expand Up @@ -282,6 +288,9 @@ def _construct_low_dim_space(
"""
# Define target space parameters
q_scaler = None
hyperparameters: List[
Union[ConfigSpace.UniformFloatHyperparameter, ConfigSpace.UniformIntegerHyperparameter]
]
if max_unique_values_per_param is None:
hyperparameters = [
ConfigSpace.UniformFloatHyperparameter(name=f"dim_{idx}", lower=-1, upper=1)
Expand Down Expand Up @@ -316,7 +325,7 @@ def _construct_low_dim_space(
config_space = ConfigSpace.ConfigurationSpace(name=self.orig_parameter_space.name)
# use same random state as in original parameter space
config_space.random = self._random_state
config_space.add_hyperparameters(hyperparameters)
config_space.add(hyperparameters)
self._target_config_space = config_space

def _transform(self, configuration: dict) -> dict:
Expand Down Expand Up @@ -366,7 +375,7 @@ def _transform(self, configuration: dict) -> dict:
if param.name in self._special_param_values_dict:
value = self._special_param_value_scaler(param, value)

orig_value = param._transform(value) # pylint: disable=protected-access
orig_value = param.to_value(value)
orig_value = np.clip(orig_value, param.lower, param.upper)
else:
raise NotImplementedError(
Expand All @@ -379,7 +388,7 @@ def _transform(self, configuration: dict) -> dict:

def _special_param_value_scaler(
self,
param: ConfigSpace.UniformIntegerHyperparameter,
param: NumericalHyperparameter,
input_value: float,
) -> float:
"""
Expand All @@ -388,7 +397,7 @@ def _special_param_value_scaler(
Parameters
----------
param: ConfigSpace.UniformIntegerHyperparameter
param: NumericalHyperparameter
Parameter of the original parameter space.
input_value: float
Expand All @@ -403,19 +412,13 @@ def _special_param_value_scaler(

# Check if input value corresponds to some special value
perc_sum = 0.0
ret: float
for special_value, biasing_perc in special_values_list:
perc_sum += biasing_perc
if input_value < perc_sum:
ret = param._inverse_transform(special_value) # pylint: disable=protected-access
return ret
return float(param.to_vector(special_value))

# Scale input value uniformly to non-special values
# pylint: disable=protected-access
ret = param._inverse_transform(
param._transform_scalar((input_value - perc_sum) / (1 - perc_sum))
)
return ret
return float(param.to_vector((input_value - perc_sum) / (1 - perc_sum)))

# pylint: disable=too-complex,too-many-branches
def _validate_special_param_values(self, special_param_values_dict: dict) -> None:
Expand Down
Loading

0 comments on commit a5f36a8

Please sign in to comment.