Skip to content

Commit

Permalink
Fix test for process_metrics to allow overwriting existing results
Browse files Browse the repository at this point in the history
  • Loading branch information
stijnh committed Apr 19, 2023
1 parent c5f36de commit f052353
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion kernel_tuner/runners/sequential.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def run(self, parameter_space, tuning_options):
logging.debug('kernel configuration was skipped silently due to compile or runtime failure')

# only compute metrics on configs that have not errored
if not isinstance(params.get(tuning_options.objective), ErrorConfig):
if tuning_options.metrics and not isinstance(params.get(tuning_options.objective), ErrorConfig):
params = process_metrics(params, tuning_options.metrics)

# get the framework time by estimating based on other times
Expand Down
6 changes: 3 additions & 3 deletions test/test_util_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -667,15 +667,15 @@ def test_process_metrics():
with pytest.raises(ValueError):
params = process_metrics(params, {})

# test ValueError is raised when b already exists in params
# test if a metric overrides any existing metrics
params = {
"x": 15,
"b": 12
}
metrics = OrderedDict()
metrics["b"] = "x"
with pytest.raises(ValueError):
params = process_metrics(params, metrics)
params = process_metrics(params, metrics)
assert params["b"] == 15


def test_parse_restrictions():
Expand Down

0 comments on commit f052353

Please sign in to comment.