Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add log scale and quantization to the tunables #662

Merged
merged 37 commits into from
Feb 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
8f59f5a
add weights support to teh Tunable and the JSON schema
motus Jan 19, 2024
996cc9a
typo: use "number" instead of "float" in JSON schema
motus Jan 19, 2024
afc41e2
add unit tests for weighted parameters of the tunables; check for
motus Jan 19, 2024
cf3d19e
bugfix: incorrect formatting of intyerpolated JSON strings in the unit
motus Jan 19, 2024
e143928
check for zero weights (this is ok)
motus Jan 19, 2024
4503689
pass weights from Tunable to ConfigSpace hyperparameters
motus Jan 19, 2024
4341df1
update the weights in unit tests to make FLAML optimizer happy
motus Jan 19, 2024
845c700
specify probabilities in the config space unit tests
motus Jan 19, 2024
cda0694
Merge branch 'main' into sergiym/tunable/weights
motus Jan 19, 2024
528a094
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Jan 23, 2024
33e86af
Merge branch 'sergiym/tunable/weights' of github.com:motus/MLOS into …
motus Jan 23, 2024
c756f55
Merge branch 'main' into sergiym/tunable/weights
motus Jan 24, 2024
54f816f
Merge branch 'main' into sergiym/tunable/weights
motus Jan 27, 2024
56e5ddd
Merge branch 'main' into sergiym/tunable/weights
motus Jan 29, 2024
025b55a
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Feb 1, 2024
3514fa2
Merge branch 'main' into sergiym/tunable/weights
motus Feb 1, 2024
eecbc7f
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Feb 1, 2024
b30950c
add a range_weight property
motus Feb 1, 2024
0b6d21a
move range weight to a special parameter
motus Feb 1, 2024
fdeaad0
minor fix for pylint
motus Feb 1, 2024
61143e3
add quantization points and log scale to the tunables and pass this d…
motus Feb 2, 2024
fd072e9
add new test cases
motus Feb 2, 2024
8500d67
add log to some tunables in the unit tests
motus Feb 2, 2024
4bcd84b
add more unit tests for new tunable's properties
motus Feb 2, 2024
cb30b7d
use special_weights instead of just weights for the numerical tunables
motus Feb 2, 2024
49c4603
Merge branch 'sergiym/tunable/weights' into sergiym/tunable/quant_log
motus Feb 2, 2024
f219178
use values_weights instead of just weights for categoricals
motus Feb 2, 2024
a938b57
Merge branch 'sergiym/tunable/weights' into sergiym/tunable/quant_log
motus Feb 2, 2024
c230662
Merge branch 'main' of github.com:microsoft/MLOS into sergiym/tunable…
motus Feb 5, 2024
5d18329
Merge branch 'sergiym/tunable/weights' into sergiym/tunable/quant_log
motus Feb 5, 2024
000ebb5
Update mlos_bench/mlos_bench/config/schemas/tunables/tunable-params-s…
motus Feb 6, 2024
ebac572
Restructure numeric tunable params schema for more docs and reuse
bpkroth Feb 6, 2024
26b9862
more descriptions
bpkroth Feb 6, 2024
58b9e62
Merge remote-tracking branch 'serigy/sergiym/tunable/quant_log' into …
bpkroth Feb 6, 2024
e1414f1
Merge pull request #8 from bpkroth/sergiym/tunable/quant_log
motus Feb 6, 2024
88d98c4
add float vs int handling of quantization
bpkroth Feb 6, 2024
a2d80da
Merge pull request #10 from bpkroth/sergiym/tunable/quant_log
motus Feb 6, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,55 @@

"$defs": {
"tunable_param_meta": {
"description": "A dictionary of metadata about the tunable parameter. Can be used by scripts for additional info when generating configs from the suggested values.",
"type": "object",
"additionalProperties": {
"$comment": "Only flat dictionaries are allowed.",
"type": ["array", "string", "boolean", "null", "number"],
"items": {
"type": ["string", "boolean", "null", "number"]
}
}
},
"numeric_range": {
"description": "Two element array representing the lower and upper bounds of the range.",
"type": "array",
"$comment": "items type left unspecified here",
"minItems": 2,
"maxItems": 2
},
"quantization": {
"description": "The number of buckets to quantize the range into.\nSee Also:\nhttps://automl.github.io/ConfigSpace/main/api/hyperparameters.html#module-ConfigSpace.api.types.float,\nhttps://automl.github.io/ConfigSpace/main/api/hyperparameters.html#module-ConfigSpace.api.types.integer",
"$comment": "type left unspecified here"
},
"log_scale": {
"description": "Whether to use log instead of linear scale for the range search.",
"type": "boolean"
},
"special_values": {
"description": "An array of values that may have special meaning for the target system and could be outside the usual search range.",
"type": "array",
"items": {
"description": "Some special values may have a different type than the numeric parameter (e.g., keyword \"AUTO\").",
"type": ["number", "string", "boolean", "null"]
},
"minItems": 1,
"uniqueItems": true
},
"weights": {
"description": "An array of weights to be associated with the values in order to influence their search priorities.",
"type": "array",
"items": {
"type": "number",
"minimum": 0
},
"minItems": 1
},
"range_weight": {
"description": "The weight to be associated with the range in order to influence its search priority relative to specials values.",
"type": "number",
"minimum": 0
},
"tunable_param_categorical": {
"type": "object",
"properties": {
Expand All @@ -23,12 +64,14 @@
"$ref": "#/$defs/tunable_param_meta"
},
"type": {
"description": "A categorical type tunable.",
"const": "categorical"
},
"default": {
"type": ["string", "number", "boolean"]
},
"values": {
"description": "List of values for this categorical type tunable",
"type": "array",
"items": {
"type": ["string", "number", "boolean"]
Expand All @@ -37,10 +80,7 @@
"uniqueItems": true
},
"values_weights": {
"type": "array",
"items": {
"type": "number"
}
"$ref": "#/$defs/weights"
}
},
"required": ["type", "default", "values"],
Expand All @@ -60,34 +100,34 @@
"$ref": "#/$defs/tunable_param_meta"
},
"type": {
"description": "An integer type tunable.",
"const": "int"
},
"default": {
"type": "integer"
},
"range": {
"type": "array",
"$ref": "#/$defs/numeric_range",
"items": {
"type": "integer"
},
"minItems": 2,
"maxItems": 2
}
},
"quantization": {
"$ref": "#/$defs/quantization",
"type": "integer",
"exclusiveMinimum": 1
},
"log": {
"$ref": "#/$defs/log_scale"
},
"special": {
"type": "array",
"items": {
"type": "integer"
},
"uniqueItems": true
"$ref": "#/$defs/special_values"
},
"special_weights": {
"type": "array",
"items": {
"type": "number"
}
"$ref": "#/$defs/weights"
},
"range_weight": {
"type": "number"
"$ref": "#/$defs/range_weight"
}
},
"required": ["type", "default", "range"],
Expand All @@ -107,34 +147,34 @@
"$ref": "#/$defs/tunable_param_meta"
},
"type": {
"description": "A continuous numerical type tunable.",
"const": "float"
},
"default": {
"type": "number"
},
"range": {
"type": "array",
"$ref": "#/$defs/numeric_range",
"items": {
"type": "number"
},
"minItems": 2,
"maxItems": 2
}
},
"quantization": {
"$ref": "#/$defs/quantization",
"type": "number",
"exclusiveMinimum": 0
},
"log": {
"$ref": "#/$defs/log_scale"
},
"special": {
"type": "array",
"items": {
"type": "number"
},
"uniqueItems": true
"$ref": "#/$defs/special_values"
},
"special_weights": {
"type": "array",
"items": {
"type": "number"
}
"$ref": "#/$defs/weights"
},
"range_weight": {
"type": "number"
"$ref": "#/$defs/range_weight"
}
},
"required": ["type", "default", "range"],
Expand Down
4 changes: 4 additions & 0 deletions mlos_bench/mlos_bench/optimizers/convert_configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,8 @@ def _tunable_to_configspace(
name=tunable.name,
lower=tunable.range[0],
upper=tunable.range[1],
log=tunable.is_log,
q=tunable.quantization,
default_value=tunable.default if tunable.in_range(tunable.default) else None,
meta=meta)
})
Expand All @@ -108,6 +110,8 @@ def _tunable_to_configspace(
name=tunable.name,
lower=tunable.range[0],
upper=tunable.range[1],
log=tunable.is_log,
q=tunable.quantization,
bpkroth marked this conversation as resolved.
Show resolved Hide resolved
default_value=tunable.default if tunable.in_range(tunable.default) else None,
meta=meta
),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"float": {
"type": "float",
"default": 10,
"range": [0, 10],
"log": "yes" // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"float": {
"type": "float",
"default": 10,
"range": [0, 10],
"quantization": true // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"float": {
"type": "float",
"default": 10,
"range": [1, 500],
"quantization": 0 // <-- should be greater than 0
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"int": {
"type": "int",
"default": 10,
"range": [1, 500],
"quantization": 1 // <-- should be greater than 1
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"int": {
"type": "int",
"default": 10,
"range": [1, 500],
"log": 1 // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"covariant_group_name-1": {
"cost": 1,
"params": {
"int": {
"type": "int",
"default": 10,
"range": [1, 500],
"quantization": "yes" // <-- this is invalid
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,19 @@
"meta": {"suffix": "MB"},
"special": [-1],
"special_weights": [0.1],
"range_weight": 0.9
"range_weight": 0.9,
"quantization": 50,
"log": true
},
"float": {
"description": "Float",
"type": "float",
"default": 10.1,
"meta": {"scale": 1000, "prefix": "/proc/var/random/", "base": 2.71828},
"range": [1.1, 111.1],
"special": [-1.1]
"special": [-1.1],
"quantization": 10,
"log": false
},
"cat": {
"description": "Cat",
Expand Down
6 changes: 4 additions & 2 deletions mlos_bench/mlos_bench/tests/tunable_groups_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,15 @@
// FLAML requires uniform weights, separately for
// specials and switching between specials and range.
"special_weights": [0.25, 0.25],
"range_weight": 0.5
"range_weight": 0.5,
"log": false
},
"kernel_sched_latency_ns": {
"description": "Initial value for the scheduler period",
"type": "int",
"default": 2000000,
"range": [0, 1000000000]
"range": [0, 1000000000],
"log": false
}
}
}
Expand Down
55 changes: 55 additions & 0 deletions mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,43 @@ def test_numerical_weights(tunable_type: str) -> None:
assert tunable.range_weight == 0.9


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_quantization(tunable_type: str) -> None:
"""
Instantiate a numerical tunable with quantization.
"""
json_config = f"""
{{
"type": "{tunable_type}",
"range": [0, 100],
"quantization": 10,
"default": 0
}}
"""
config = json.loads(json_config)
tunable = Tunable(name='test', config=config)
assert tunable.quantization == 10
assert not tunable.is_log


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_log(tunable_type: str) -> None:
"""
Instantiate a numerical tunable with log scale.
"""
json_config = f"""
{{
"type": "{tunable_type}",
"range": [0, 100],
"log": true,
"default": 0
}}
"""
config = json.loads(json_config)
tunable = Tunable(name='test', config=config)
assert tunable.is_log


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_weights_no_specials(tunable_type: str) -> None:
"""
Expand Down Expand Up @@ -384,6 +421,24 @@ def test_numerical_weights_wrong_values(tunable_type: str) -> None:
Tunable(name='test', config=config)


@pytest.mark.parametrize("tunable_type", ["int", "float"])
def test_numerical_quantization_wrong(tunable_type: str) -> None:
"""
Instantiate a numerical tunable with invalid number of quantization points.
"""
json_config = f"""
{{
"type": "{tunable_type}",
"range": [0, 100],
"quantization": 0,
"default": 0
}}
"""
config = json.loads(json_config)
with pytest.raises(ValueError):
motus marked this conversation as resolved.
Show resolved Hide resolved
Tunable(name='test', config=config)


def test_bad_type() -> None:
"""
Disallow bad types.
Expand Down
Loading
Loading