Skip to content

Commit

Permalink
Rename quantization -> quantization_bins (#844)
Browse files Browse the repository at this point in the history
Merge after (or instead of) #835 

diff from #835 :: https://github.com/motus/MLOS/pull/15/files

Closes #803

---------

Co-authored-by: Brian Kroth <bpkroth@users.noreply.github.com>
Co-authored-by: Brian Kroth <bpkroth@microsoft.com>
  • Loading branch information
3 people authored Aug 16, 2024
1 parent 2e4cfa2 commit fadfacb
Show file tree
Hide file tree
Showing 12 changed files with 36 additions and 36 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@
"maxItems": 2,
"uniqueItems": true
},
"quantization": {
"quantization_bins": {
"description": "The number of buckets to quantize the range into.",
"type": "integer",
"exclusiveMinimum": 1
Expand Down Expand Up @@ -187,7 +187,7 @@
},
"required": ["type", "default", "values"],
"not": {
"required": ["range", "special", "special_weights", "range_weight", "log", "quantization", "distribution"]
"required": ["range", "special", "special_weights", "range_weight", "log", "quantization_bins", "distribution"]
},
"$comment": "TODO: add check that default is in values",
"unevaluatedProperties": false
Expand Down Expand Up @@ -217,8 +217,8 @@
"distribution": {
"$ref": "#/$defs/tunable_param_distribution"
},
"quantization": {
"$ref": "#/$defs/quantization"
"quantization_bins": {
"$ref": "#/$defs/quantization_bins"
},
"log": {
"$ref": "#/$defs/log_scale"
Expand Down Expand Up @@ -265,8 +265,8 @@
"distribution": {
"$ref": "#/$defs/tunable_param_distribution"
},
"quantization": {
"$ref": "#/$defs/quantization"
"quantization_bins": {
"$ref": "#/$defs/quantization_bins"
},
"log": {
"$ref": "#/$defs/log_scale"
Expand Down
4 changes: 2 additions & 2 deletions mlos_bench/mlos_bench/optimizers/convert_configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,10 +168,10 @@ def _tunable_to_configspace(
else:
raise TypeError(f"Invalid Parameter Type: {tunable.type}")

if tunable.quantization:
if tunable.quantization_bins:
# Temporary workaround to dropped quantization support in ConfigSpace 1.0
# See Also: https://github.com/automl/ConfigSpace/issues/390
_monkey_patch_quantization(range_hp, tunable.quantization)
_monkey_patch_quantization(range_hp, tunable.quantization_bins)

if not tunable.special:
return ConfigurationSpace({tunable.name: range_hp})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"type": "float",
"default": 10,
"range": [0, 10],
"quantization": true // <-- this is invalid
"quantization_bins": true // <-- this is invalid
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"type": "float",
"default": 10,
"range": [1, 500],
"quantization": 1 // <-- should be greater than 1
"quantization_bins": 1 // <-- should be greater than 1
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"type": "int",
"default": 10,
"range": [1, 500],
"quantization": 1 // <-- should be greater than 1
"quantization_bins": 1 // <-- should be greater than 1
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"type": "int",
"default": 10,
"range": [1, 500],
"quantization": "yes" // <-- this is invalid
"quantization_bins": "yes" // <-- this is invalid
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"special": [-1],
"special_weights": [0.1],
"range_weight": 0.9,
"quantization": 50,
"quantization_bins": 50,
"distribution": {
"type": "beta",
"params": {
Expand All @@ -31,7 +31,7 @@
"special": [-1],
"special_weights": [0.1],
"range_weight": 0.9,
"quantization": 50,
"quantization_bins": 50,
"distribution": {
"type": "normal",
"params": {
Expand All @@ -48,7 +48,7 @@
"meta": {"scale": 1000, "prefix": "/proc/var/random/", "base": 2.71828},
"range": [1.1, 111.1],
"special": [-1.1],
"quantization": 11,
"quantization_bins": 11,
"distribution": {
"type": "uniform"
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def grid_search_tunables_config() -> dict:
"type": "float",
"range": [0, 1],
"default": 0.5,
"quantization": 5,
"quantization_bins": 5,
},
},
},
Expand Down
2 changes: 1 addition & 1 deletion mlos_bench/mlos_bench/tests/tunable_groups_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
"type": "int",
"default": 2000000,
"range": [0, 1000000000],
"quantization": 11,
"quantization_bins": 11,
"log": false
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,13 +71,13 @@ def test_tunable_quantized_int_size_props() -> None:
"type": "int",
"range": [100, 1000],
"default": 100,
"quantization": 10,
"quantization_bins": 10,
},
)
expected = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]
assert tunable.span == 900
assert tunable.cardinality == len(expected)
assert tunable.quantization == len(expected)
assert tunable.quantization_bins == len(expected)
assert list(tunable.quantized_values or []) == expected
assert list(tunable.values or []) == expected

Expand All @@ -90,12 +90,12 @@ def test_tunable_quantized_float_size_props() -> None:
"type": "float",
"range": [0, 1],
"default": 0,
"quantization": 11,
"quantization_bins": 11,
},
)
expected = [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
assert tunable.span == 1
assert tunable.cardinality == len(expected)
assert tunable.quantization == len(expected)
assert tunable.quantization_bins == len(expected)
assert pytest.approx(list(tunable.quantized_values or []), 0.0001) == expected
assert pytest.approx(list(tunable.values or []), 0.0001) == expected
Original file line number Diff line number Diff line change
Expand Up @@ -234,14 +234,14 @@ def test_numerical_quantization(tunable_type: TunableValueTypeName) -> None:
{{
"type": "{tunable_type}",
"range": [0, 100],
"quantization": 11,
"quantization_bins": 11,
"default": 0
}}
"""
config = json.loads(json_config)
tunable = Tunable(name="test", config=config)
expected = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
assert tunable.quantization == len(expected)
assert tunable.quantization_bins == len(expected)
assert pytest.approx(list(tunable.quantized_values or []), 1e-8) == expected
assert not tunable.is_log

Expand Down Expand Up @@ -393,7 +393,7 @@ def test_numerical_quantization_wrong(tunable_type: TunableValueTypeName) -> Non
{{
"type": "{tunable_type}",
"range": [0, 100],
"quantization": 0,
"quantization_bins": 0,
"default": 0
}}
"""
Expand Down
24 changes: 12 additions & 12 deletions mlos_bench/mlos_bench/tunables/tunable.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class TunableDict(TypedDict, total=False):
default: TunableValue
values: Optional[List[Optional[str]]]
range: Optional[Union[Sequence[int], Sequence[float]]]
quantization: Optional[int]
quantization_bins: Optional[int]
log: Optional[bool]
distribution: Optional[DistributionDict]
special: Optional[Union[List[int], List[float]]]
Expand Down Expand Up @@ -109,7 +109,7 @@ def __init__(self, name: str, config: TunableDict):
self._values = [str(v) if v is not None else v for v in self._values]
self._meta: Dict[str, Any] = config.get("meta", {})
self._range: Optional[Union[Tuple[int, int], Tuple[float, float]]] = None
self._quantization: Optional[int] = config.get("quantization")
self._quantization_bins: Optional[int] = config.get("quantization_bins")
self._log: Optional[bool] = config.get("log")
self._distribution: Optional[DistributionName] = None
self._distribution_params: Dict[str, float] = {}
Expand Down Expand Up @@ -162,7 +162,7 @@ def _sanity_check_categorical(self) -> None:
raise ValueError(f"Categorical tunable cannot have range_weight: {self}")
if self._log is not None:
raise ValueError(f"Categorical tunable cannot have log parameter: {self}")
if self._quantization is not None:
if self._quantization_bins is not None:
raise ValueError(f"Categorical tunable cannot have quantization parameter: {self}")
if self._distribution is not None:
raise ValueError(f"Categorical parameters do not support `distribution`: {self}")
Expand All @@ -182,7 +182,7 @@ def _sanity_check_numerical(self) -> None:
raise ValueError(f"Values must be None for the numerical type tunable {self}")
if not self._range or len(self._range) != 2 or self._range[0] >= self._range[1]:
raise ValueError(f"Invalid range for tunable {self}: {self._range}")
if self._quantization is not None and self._quantization <= 1:
if self._quantization_bins is not None and self._quantization_bins <= 1:
raise ValueError(f"Number of quantization bins is <= 1: {self}")
if self._distribution is not None and self._distribution not in {
"uniform",
Expand Down Expand Up @@ -580,18 +580,18 @@ def span(self) -> Union[int, float]:
return num_range[1] - num_range[0]

@property
def quantization(self) -> Optional[int]:
def quantization_bins(self) -> Optional[int]:
"""
Get the number of quantization bins, if specified.
Returns
-------
quantization : int | None
quantization_bins : int | None
Number of quantization bins, or None.
"""
if self.is_categorical:
return None
return self._quantization
return self._quantization_bins

@property
def quantized_values(self) -> Optional[Union[Iterable[int], Iterable[float]]]:
Expand All @@ -606,23 +606,23 @@ def quantized_values(self) -> Optional[Union[Iterable[int], Iterable[float]]]:
"""
num_range = self.range
if self.type == "float":
if not self.quantization:
if not self.quantization_bins:
return None
# Be sure to return python types instead of numpy types.
return (
float(x)
for x in np.linspace(
start=num_range[0],
stop=num_range[1],
num=self.quantization,
num=self.quantization_bins,
endpoint=True,
)
)
assert self.type == "int", f"Unhandled tunable type: {self}"
return range(
int(num_range[0]),
int(num_range[1]) + 1,
int(self.span / (self.quantization - 1)) if self.quantization else 1,
int(self.span / (self.quantization_bins - 1)) if self.quantization_bins else 1,
)

@property
Expand All @@ -640,8 +640,8 @@ def cardinality(self) -> Optional[int]:
"""
if self.is_categorical:
return len(self.categories)
if self.quantization:
return self.quantization
if self.quantization_bins:
return self.quantization_bins
if self.type == "int":
return int(self.span) + 1
return None
Expand Down

0 comments on commit fadfacb

Please sign in to comment.