-
Notifications
You must be signed in to change notification settings - Fork 109
/
hyperband.py
95 lines (76 loc) · 3.06 KB
/
hyperband.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import os
import time
import math
import pdb
import copy
import logging
import numpy as np
import ConfigSpace as CS
from hpbandster.core.master import Master
from hpbandster.optimizers.iterations import SuccessiveHalving
from hpbandster.optimizers.config_generators import RandomSampling
class HyperBand(Master):
def __init__(self, configspace = None,
eta=3, min_budget=0.01, max_budget=1,
**kwargs ):
"""
Hyperband implements hyperparameter optimization by sampling
candidates at random and "trying" them first, running them for
a specific budget. The approach is iterative, promising
candidates are run for a longer time, increasing the fidelity
for their performance. While this is a very efficient racing
approach, random sampling makes no use of the knowledge gained
about the candidates during optimization.
Parameters
----------
configspace: ConfigSpace object
valid representation of the search space
eta : float
In each iteration, a complete run of sequential halving is executed. In it,
after evaluating each configuration on the same subset size, only a fraction of
1/eta of them 'advances' to the next round.
Must be greater or equal to 2.
min_budget : float
The smallest budget to consider. Needs to be positive!
max_budget : float
the largest budget to consider. Needs to be larger than min_budget!
The budgets will be geometrically distributed $\sim \eta^k$ for
$k\in [0, 1, ... , num_subsets - 1]$.
"""
# TODO: Propper check for ConfigSpace object!
if configspace is None:
raise ValueError("You have to provide a valid CofigSpace object")
super().__init__(config_generator=RandomSampling(configspace), **kwargs)
# Hyperband related stuff
self.eta = eta
self.min_budget = min_budget
self.max_budget = max_budget
# precompute some HB stuff
self.max_SH_iter = -int(np.log(min_budget/max_budget)/np.log(eta)) + 1
self.budgets = max_budget * np.power(eta, -np.linspace(self.max_SH_iter-1, 0, self.max_SH_iter))
self.config.update({
'eta' : eta,
'min_budget' : min_budget,
'max_budget' : max_budget,
'budgets' : self.budgets,
'max_SH_iter': self.max_SH_iter,
})
def get_next_iteration(self, iteration, iteration_kwargs={}):
"""
Hyperband uses SuccessiveHalving for each iteration.
See Li et al. (2016) for reference.
Parameters
----------
iteration: int
the index of the iteration to be instantiated
Returns
-------
SuccessiveHalving: the SuccessiveHalving iteration with the
corresponding number of configurations
"""
# number of 'SH rungs'
s = self.max_SH_iter - 1 - (iteration%self.max_SH_iter)
# number of configurations in that bracket
n0 = int(np.floor((self.max_SH_iter)/(s+1)) * self.eta**s)
ns = [max(int(n0*(self.eta**(-i))), 1) for i in range(s+1)]
return(SuccessiveHalving(HPB_iter=iteration, num_configs=ns, budgets=self.budgets[(-s-1):], config_sampler=self.config_generator.get_config, **iteration_kwargs))