Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add log scale and quantization to the tunables #662

Merged
merged 37 commits into from
Feb 6, 2024
Merged
Show file tree
Hide file tree
Changes from 30 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
8f59f5a
add weights support to teh Tunable and the JSON schema
motus Jan 19, 2024
996cc9a
typo: use "number" instead of "float" in JSON schema
motus Jan 19, 2024
afc41e2
add unit tests for weighted parameters of the tunables; check for
motus Jan 19, 2024
cf3d19e
bugfix: incorrect formatting of intyerpolated JSON strings in the unit
motus Jan 19, 2024
e143928
check for zero weights (this is ok)
motus Jan 19, 2024
4503689
pass weights from Tunable to ConfigSpace hyperparameters
motus Jan 19, 2024
4341df1
update the weights in unit tests to make FLAML optimizer happy
motus Jan 19, 2024
845c700
specify probabilities in the config space unit tests
motus Jan 19, 2024
cda0694
Merge branch 'main' into sergiym/tunable/weights
motus Jan 19, 2024
528a094
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Jan 23, 2024
33e86af
Merge branch 'sergiym/tunable/weights' of github.com:motus/MLOS into …
motus Jan 23, 2024
c756f55
Merge branch 'main' into sergiym/tunable/weights
motus Jan 24, 2024
54f816f
Merge branch 'main' into sergiym/tunable/weights
motus Jan 27, 2024
56e5ddd
Merge branch 'main' into sergiym/tunable/weights
motus Jan 29, 2024
025b55a
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Feb 1, 2024
3514fa2
Merge branch 'main' into sergiym/tunable/weights
motus Feb 1, 2024
eecbc7f
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Feb 1, 2024
b30950c
add a range_weight property
motus Feb 1, 2024
0b6d21a
move range weight to a special parameter
motus Feb 1, 2024
fdeaad0
minor fix for pylint
motus Feb 1, 2024
61143e3
add quantization points and log scale to the tunables and pass this d…
motus Feb 2, 2024
fd072e9
add new test cases
motus Feb 2, 2024
8500d67
add log to some tunables in the unit tests
motus Feb 2, 2024
4bcd84b
add more unit tests for new tunable's properties
motus Feb 2, 2024
cb30b7d
use special_weights instead of just weights for the numerical tunables
motus Feb 2, 2024
49c4603
Merge branch 'sergiym/tunable/weights' into sergiym/tunable/quant_log
motus Feb 2, 2024
f219178
use values_weights instead of just weights for categoricals
motus Feb 2, 2024
a938b57
Merge branch 'sergiym/tunable/weights' into sergiym/tunable/quant_log
motus Feb 2, 2024
c230662
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Feb 5, 2024
5d18329
Merge branch 'sergiym/tunable/weights' into sergiym/tunable/quant_log
motus Feb 5, 2024
000ebb5
Update mlos_bench/mlos_bench/config/schemas/tunables/tunable-params-s…
motus Feb 6, 2024
ebac572
Restructure numeric tunable params schema for more docs and reuse
bpkroth Feb 6, 2024
26b9862
more descriptions
bpkroth Feb 6, 2024
58b9e62
Merge remote-tracking branch 'serigy/sergiym/tunable/quant_log' into …
bpkroth Feb 6, 2024
e1414f1
Merge pull request #8 from bpkroth/sergiym/tunable/quant_log
motus Feb 6, 2024
88d98c4
add float vs int handling of quantization
bpkroth Feb 6, 2024
a2d80da
Merge pull request #10 from bpkroth/sergiym/tunable/quant_log
motus Feb 6, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,12 @@
"minItems": 2,
"maxItems": 2
},
"quantization": {
"type": "number"
motus marked this conversation as resolved.
Show resolved Hide resolved
motus marked this conversation as resolved.
Show resolved Hide resolved
},
"log": {
"type": "boolean"
},
"special": {
"type": "array",
"items": {
Expand Down Expand Up @@ -120,6 +126,12 @@
"minItems": 2,
"maxItems": 2
},
"quantization": {
"type": "number"
bpkroth marked this conversation as resolved.
Show resolved Hide resolved
},
"log": {
"type": "boolean"
},
"special": {
"type": "array",
"items": {
Expand Down
4 changes: 4 additions & 0 deletions mlos_bench/mlos_bench/optimizers/convert_configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,8 @@ def _tunable_to_configspace(
name=tunable.name,
lower=tunable.range[0],
upper=tunable.range[1],
log=tunable.is_log,
q=tunable.quantization,
default_value=tunable.default if tunable.in_range(tunable.default) else None,
meta=meta)
})
Expand All @@ -108,6 +110,8 @@ def _tunable_to_configspace(
name=tunable.name,
lower=tunable.range[0],
upper=tunable.range[1],
log=tunable.is_log,
q=tunable.quantization,
bpkroth marked this conversation as resolved.
Show resolved Hide resolved
default_value=tunable.default if tunable.in_range(tunable.default) else None,
meta=meta
),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"float": {
"type": "float",
"default": 10,
"range": [0, 10],
"log": "yes" // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"float": {
"type": "float",
"default": 10,
"range": [0, 10],
"quantization": true // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"int": {
"type": "int",
"default": 10,
"range": [1, 500],
"log": 1 // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"int": {
"type": "int",
"default": 10,
"range": [1, 500],
"quantization": "yes" // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,19 @@
"meta": {"suffix": "MB"},
"special": [-1],
"special_weights": [0.1],
"range_weight": 0.9
"range_weight": 0.9,
"quantization": 50,
"log": true
},
"float": {
"description": "Float",
"type": "float",
"default": 10.1,
"meta": {"scale": 1000, "prefix": "/proc/var/random/", "base": 2.71828},
"range": [1.1, 111.1],
"special": [-1.1]
"special": [-1.1],
"quantization": 10,
"log": false
},
"cat": {
"description": "Cat",
Expand Down
6 changes: 4 additions & 2 deletions mlos_bench/mlos_bench/tests/tunable_groups_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,15 @@
// FLAML requires uniform weights, separately for
// specials and switching between specials and range.
"special_weights": [0.25, 0.25],
"range_weight": 0.5
"range_weight": 0.5,
"log": false
},
"kernel_sched_latency_ns": {
"description": "Initial value for the scheduler period",
"type": "int",
"default": 2000000,
"range": [0, 1000000000]
"range": [0, 1000000000],
"log": false
}
}
}
Expand Down
55 changes: 55 additions & 0 deletions mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,43 @@ def test_numerical_weights(tunable_type: str) -> None:
assert tunable.range_weight == 0.9


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_quantization(tunable_type: str) -> None:
"""
Instantiate a numerical tunable with quantization.
"""
json_config = f"""
{{
"type": "{tunable_type}",
"range": [0, 100],
"quantization": 10,
"default": 0
}}
"""
config = json.loads(json_config)
tunable = Tunable(name='test', config=config)
assert tunable.quantization == 10
assert not tunable.is_log


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_log(tunable_type: str) -> None:
"""
Instantiate a numerical tunable with log scale.
"""
json_config = f"""
{{
"type": "{tunable_type}",
"range": [0, 100],
"log": true,
"default": 0
}}
"""
config = json.loads(json_config)
tunable = Tunable(name='test', config=config)
assert tunable.is_log


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_weights_no_specials(tunable_type: str) -> None:
"""
Expand Down Expand Up @@ -384,6 +421,24 @@ def test_numerical_weights_wrong_values(tunable_type: str) -> None:
Tunable(name='test', config=config)


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_quantization_wrong(tunable_type: str) -> None:
"""
Instantiate a numerical tunable with invalid number of quantization points.
"""
json_config = f"""
{{
"type": "{tunable_type}",
"range": [0, 100],
"quantization": 0,
"default": 0
}}
"""
config = json.loads(json_config)
with pytest.raises(ValueError):
motus marked this conversation as resolved.
Show resolved Hide resolved
Tunable(name='test', config=config)


def test_bad_type() -> None:
"""
Disallow bad types.
Expand Down
39 changes: 39 additions & 0 deletions mlos_bench/mlos_bench/tunables/tunable.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ class TunableDict(TypedDict, total=False):
default: TunableValue
values: Optional[List[Optional[str]]]
range: Optional[Union[Sequence[int], Sequence[float]]]
quantization: Optional[int]
motus marked this conversation as resolved.
Show resolved Hide resolved
log: Optional[bool]
special: Optional[Union[List[int], List[float]]]
values_weights: Optional[List[float]]
special_weights: Optional[List[float]]
Expand Down Expand Up @@ -76,6 +78,8 @@ def __init__(self, name: str, config: TunableDict):
self._values = [str(v) if v is not None else v for v in self._values]
self._meta: Dict[str, Any] = config.get("meta", {})
self._range: Optional[Union[Tuple[int, int], Tuple[float, float]]] = None
self._quantization: Optional[int] = config.get("quantization")
self._log: Optional[bool] = config.get("log")
config_range = config.get("range")
if config_range is not None:
assert len(config_range) == 2, f"Invalid range: {config_range}"
Expand Down Expand Up @@ -105,6 +109,10 @@ def _sanity_check(self) -> None:
raise ValueError(f"Categorical tunable cannot have special values: {self}")
if self._range_weight is not None:
raise ValueError(f"Categorical tunable cannot have range_weight: {self}")
if self._log is not None:
raise ValueError(f"Categorical tunable cannot have log parameter: {self}")
if self._quantization is not None:
raise ValueError(f"Categorical tunable cannot have quantization parameter: {self}")
if self._weights:
if len(self._weights) != len(self._values):
raise ValueError(f"Must specify weights for all values: {self}")
Expand All @@ -115,6 +123,8 @@ def _sanity_check(self) -> None:
raise ValueError(f"Values must be None for the numerical type tunable {self}")
if not self._range or len(self._range) != 2 or self._range[0] >= self._range[1]:
raise ValueError(f"Invalid range for tunable {self}: {self._range}")
if self._quantization is not None and self._quantization <= 1:
raise ValueError(f"Number of quantization points is <= 1: {self}")
motus marked this conversation as resolved.
Show resolved Hide resolved
if self._weights:
if self._range_weight is None:
raise ValueError(f"Must specify weight for the range: {self}")
Expand Down Expand Up @@ -416,6 +426,9 @@ def range_weight(self) -> Optional[float]:
weight : float
Weight of the range or None.
"""
assert self.is_numerical
assert self._special
assert self._weights
return self._range_weight

@property
Expand Down Expand Up @@ -483,6 +496,32 @@ def range(self) -> Union[Tuple[int, int], Tuple[float, float]]:
assert self._range is not None
return self._range

@property
def quantization(self) -> Optional[int]:
"""
Get the number of quantization points, if specified.

Returns
-------
quantization : int
Number of quantization points or None.
"""
assert self.is_numerical
return self._quantization

@property
def is_log(self) -> Optional[bool]:
"""
Check if numeric tunable is log scale.

Returns
-------
log : bool
True if numeric tunable is log scale, False if linear.
"""
assert self.is_numerical
return self._log

@property
def categories(self) -> List[Optional[str]]:
"""
Expand Down
Loading