Skip to content

Commit

Permalink
Move quantization_bins into meta field of NumericalHyperaparameter (#851
Browse files Browse the repository at this point in the history
)

Co-authored-by: Sergiy Matusevych <sergiy.matusevych@gmail.com>
  • Loading branch information
bpkroth and motus authored Aug 21, 2024
1 parent e514908 commit 2b2a9f0
Show file tree
Hide file tree
Showing 5 changed files with 266 additions and 104 deletions.
40 changes: 23 additions & 17 deletions mlos_bench/mlos_bench/optimizers/convert_configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,10 @@
from mlos_bench.tunables.tunable import Tunable, TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups
from mlos_bench.util import try_parse_val
from mlos_core.spaces.converters.util import monkey_patch_quantization
from mlos_core.spaces.converters.util import (
QUANTIZATION_BINS_META_KEY,
monkey_patch_hp_quantization,
)

_LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -77,6 +80,10 @@ def _tunable_to_configspace(
meta: Dict[Hashable, TunableValue] = {"cost": cost}
if group_name is not None:
meta["group"] = group_name
if tunable.is_numerical and tunable.quantization_bins:
# Temporary workaround to dropped quantization support in ConfigSpace 1.0
# See Also: https://github.com/automl/ConfigSpace/issues/390
meta[QUANTIZATION_BINS_META_KEY] = tunable.quantization_bins

if tunable.type == "categorical":
return ConfigurationSpace(
Expand Down Expand Up @@ -137,13 +144,9 @@ def _tunable_to_configspace(
else:
raise TypeError(f"Invalid Parameter Type: {tunable.type}")

if tunable.quantization_bins:
# Temporary workaround to dropped quantization support in ConfigSpace 1.0
# See Also: https://github.com/automl/ConfigSpace/issues/390
monkey_patch_quantization(range_hp, tunable.quantization_bins)

monkey_patch_hp_quantization(range_hp)
if not tunable.special:
return ConfigurationSpace({tunable.name: range_hp})
return ConfigurationSpace(space=[range_hp])

# Compute the probabilities of switching between regular and special values.
special_weights: Optional[List[float]] = None
Expand All @@ -156,30 +159,33 @@ def _tunable_to_configspace(
# one for special values, and one to choose between the two.
(special_name, type_name) = special_param_names(tunable.name)
conf_space = ConfigurationSpace(
{
tunable.name: range_hp,
special_name: CategoricalHyperparameter(
space=[
range_hp,
CategoricalHyperparameter(
name=special_name,
choices=tunable.special,
weights=special_weights,
default_value=tunable.default if tunable.default in tunable.special else NotSet,
meta=meta,
),
type_name: CategoricalHyperparameter(
CategoricalHyperparameter(
name=type_name,
choices=[TunableValueKind.SPECIAL, TunableValueKind.RANGE],
weights=switch_weights,
default_value=TunableValueKind.SPECIAL,
),
}
]
)
conf_space.add(
EqualsCondition(conf_space[special_name], conf_space[type_name], TunableValueKind.SPECIAL)
)
conf_space.add(
EqualsCondition(conf_space[tunable.name], conf_space[type_name], TunableValueKind.RANGE)
[
EqualsCondition(
conf_space[special_name], conf_space[type_name], TunableValueKind.SPECIAL
),
EqualsCondition(
conf_space[tunable.name], conf_space[type_name], TunableValueKind.RANGE
),
]
)

return conf_space


Expand Down
60 changes: 33 additions & 27 deletions mlos_bench/mlos_bench/tests/tunables/tunable_to_configspace_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
)
from ConfigSpace.hyperparameters import NumericalHyperparameter

from mlos_bench.optimizers.convert_configspace import (
TunableValueKind,
Expand All @@ -23,7 +22,10 @@
)
from mlos_bench.tunables.tunable import Tunable
from mlos_bench.tunables.tunable_groups import TunableGroups
from mlos_core.spaces.converters.util import monkey_patch_quantization
from mlos_core.spaces.converters.util import (
QUANTIZATION_BINS_META_KEY,
monkey_patch_cs_quantization,
)

# pylint: disable=redefined-outer-name

Expand All @@ -45,66 +47,67 @@ def configuration_space() -> ConfigurationSpace:

# NOTE: FLAML requires distribution to be uniform
spaces = ConfigurationSpace(
{
"vmSize": CategoricalHyperparameter(
space=[
CategoricalHyperparameter(
name="vmSize",
choices=["Standard_B2s", "Standard_B2ms", "Standard_B4ms"],
default_value="Standard_B4ms",
meta={"group": "provision", "cost": 0},
),
"idle": CategoricalHyperparameter(
CategoricalHyperparameter(
name="idle",
choices=["halt", "mwait", "noidle"],
default_value="halt",
meta={"group": "boot", "cost": 0},
),
"kernel_sched_latency_ns": Integer(
Integer(
name="kernel_sched_latency_ns",
bounds=(0, 1000000000),
log=False,
default=2000000,
meta={"group": "kernel", "cost": 0},
meta={
"group": "kernel",
"cost": 0,
QUANTIZATION_BINS_META_KEY: 11,
},
),
"kernel_sched_migration_cost_ns": Integer(
Integer(
name="kernel_sched_migration_cost_ns",
bounds=(0, 500000),
log=False,
default=250000,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_special: CategoricalHyperparameter(
CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_special,
choices=[-1, 0],
weights=[0.5, 0.5],
default_value=-1,
meta={"group": "kernel", "cost": 0},
),
kernel_sched_migration_cost_ns_type: CategoricalHyperparameter(
CategoricalHyperparameter(
name=kernel_sched_migration_cost_ns_type,
choices=[TunableValueKind.SPECIAL, TunableValueKind.RANGE],
weights=[0.5, 0.5],
default_value=TunableValueKind.SPECIAL,
),
}
)
spaces.add(
EqualsCondition(
spaces[kernel_sched_migration_cost_ns_special],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.SPECIAL,
)
]
)
spaces.add(
EqualsCondition(
spaces["kernel_sched_migration_cost_ns"],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.RANGE,
)
[
EqualsCondition(
spaces[kernel_sched_migration_cost_ns_special],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.SPECIAL,
),
EqualsCondition(
spaces["kernel_sched_migration_cost_ns"],
spaces[kernel_sched_migration_cost_ns_type],
TunableValueKind.RANGE,
),
]
)
hp = spaces["kernel_sched_latency_ns"]
assert isinstance(hp, NumericalHyperparameter)
monkey_patch_quantization(hp, quantization_bins=11)
return spaces
return monkey_patch_cs_quantization(spaces)


def _cmp_tunable_hyperparameter_categorical(tunable: Tunable, space: ConfigurationSpace) -> None:
Expand All @@ -122,6 +125,9 @@ def _cmp_tunable_hyperparameter_numerical(tunable: Tunable, space: Configuration
assert (param.lower, param.upper) == tuple(tunable.range)
if tunable.in_range(tunable.value):
assert param.default_value == tunable.value
assert (param.meta or {}).get(QUANTIZATION_BINS_META_KEY) == tunable.quantization_bins
if tunable.quantization_bins:
assert param.sample_value() in list(tunable.quantized_values or [])


def test_tunable_to_configspace_categorical(tunable_categorical: Tunable) -> None:
Expand Down
78 changes: 62 additions & 16 deletions mlos_core/mlos_core/spaces/converters/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,36 +4,82 @@
#
"""Helper functions for config space converters."""

from ConfigSpace import ConfigurationSpace
from ConfigSpace.functional import quantize
from ConfigSpace.hyperparameters import NumericalHyperparameter
from ConfigSpace.hyperparameters import Hyperparameter, NumericalHyperparameter

QUANTIZATION_BINS_META_KEY = "quantization_bins"

def monkey_patch_quantization(hp: NumericalHyperparameter, quantization_bins: int) -> None:

def monkey_patch_hp_quantization(hp: Hyperparameter) -> Hyperparameter:
"""
Monkey-patch quantization into the Hyperparameter.
Temporary workaround to dropped quantization support in ConfigSpace 1.0
See Also: <https://github.com/automl/ConfigSpace/issues/390>
Parameters
----------
hp : NumericalHyperparameter
hp : Hyperparameter
ConfigSpace hyperparameter to patch.
quantization_bins : int
Number of bins to quantize the hyperparameter into.
Returns
-------
hp : Hyperparameter
Patched hyperparameter.
"""
if not isinstance(hp, NumericalHyperparameter):
return hp

assert isinstance(hp, NumericalHyperparameter)
dist = hp._vector_dist # pylint: disable=protected-access
quantization_bins = (hp.meta or {}).get(QUANTIZATION_BINS_META_KEY)
if quantization_bins is None:
# No quantization requested.
# Remove any previously applied patches.
if hasattr(dist, "sample_vector_mlos_orig"):
setattr(dist, "sample_vector", dist.sample_vector_mlos_orig)
delattr(dist, "sample_vector_mlos_orig")
return hp

try:
quantization_bins = int(quantization_bins)
except ValueError as ex:
raise ValueError(f"{quantization_bins=} :: must be an integer.") from ex

if quantization_bins <= 1:
raise ValueError(f"{quantization_bins=} :: must be greater than 1.")

# Temporary workaround to dropped quantization support in ConfigSpace 1.0
# See Also: https://github.com/automl/ConfigSpace/issues/390
if not hasattr(hp, "sample_value_mlos_orig"):
setattr(hp, "sample_value_mlos_orig", hp.sample_value)
if not hasattr(dist, "sample_vector_mlos_orig"):
setattr(dist, "sample_vector_mlos_orig", dist.sample_vector)

assert hasattr(hp, "sample_value_mlos_orig")
assert hasattr(dist, "sample_vector_mlos_orig")
setattr(
hp,
"sample_value",
lambda size=None, **kwargs: quantize(
hp.sample_value_mlos_orig(size, **kwargs),
bounds=(hp.lower, hp.upper),
dist,
"sample_vector",
lambda n, *, seed=None: quantize(
dist.sample_vector_mlos_orig(n, seed=seed),
bounds=(dist.lower_vectorized, dist.upper_vectorized),
bins=quantization_bins,
).astype(type(hp.default_value)),
),
)
return hp


def monkey_patch_cs_quantization(cs: ConfigurationSpace) -> ConfigurationSpace:
"""
Monkey-patch quantization into the Hyperparameters of a ConfigSpace.
Parameters
----------
cs : ConfigurationSpace
ConfigSpace to patch.
Returns
-------
cs : ConfigurationSpace
Patched ConfigSpace.
"""
for hp in cs.values():
monkey_patch_hp_quantization(hp)
return cs
Loading

0 comments on commit 2b2a9f0

Please sign in to comment.