import os
import time
import math
import pdb
import copy
import logging
import numpy as np
import ConfigSpace as CS
from hpbandster.core.master import Master
from hpbandster.optimizers.iterations import SuccessiveHalving
from hpbandster.optimizers.config_generators import RandomSampling
[docs]class HyperBand(Master):
def __init__(self, configspace = None,
eta=3, min_budget=0.01, max_budget=1,
**kwargs ):
"""
Hyperband implements hyperparameter optimization by sampling
candidates at random and "trying" them first, running them for
a specific budget. The approach is iterative, promising
candidates are run for a longer time, increasing the fidelity
for their performance. While this is a very efficient racing
approach, random sampling makes no use of the knowledge gained
about the candidates during optimization.
Parameters
----------
configspace: ConfigSpace object
valid representation of the search space
eta : float
In each iteration, a complete run of sequential halving is executed. In it,
after evaluating each configuration on the same subset size, only a fraction of
1/eta of them 'advances' to the next round.
Must be greater or equal to 2.
min_budget : float
The smallest budget to consider. Needs to be positive!
max_budget : float
the largest budget to consider. Needs to be larger than min_budget!
The budgets will be geometrically distributed $\sim \eta^k$ for
$k\in [0, 1, ... , num_subsets - 1]$.
"""
# TODO: Propper check for ConfigSpace object!
if configspace is None:
raise ValueError("You have to provide a valid CofigSpace object")
super().__init__(config_generator=RandomSampling(configspace), **kwargs)
# Hyperband related stuff
self.eta = eta
self.min_budget = min_budget
self.max_budget = max_budget
# precompute some HB stuff
self.max_SH_iter = -int(np.log(min_budget/max_budget)/np.log(eta)) + 1
self.budgets = max_budget * np.power(eta, -np.linspace(self.max_SH_iter-1, 0, self.max_SH_iter))
self.config.update({
'eta' : eta,
'min_budget' : min_budget,
'max_budget' : max_budget,
'budgets' : self.budgets,
'max_SH_iter': self.max_SH_iter,
})
[docs] def get_next_iteration(self, iteration, iteration_kwargs={}):
"""
Hyperband uses SuccessiveHalving for each iteration.
See Li et al. (2016) for reference.
Parameters
----------
iteration: int
the index of the iteration to be instantiated
Returns
-------
SuccessiveHalving: the SuccessiveHalving iteration with the
corresponding number of configurations
"""
# number of 'SH rungs'
s = self.max_SH_iter - 1 - (iteration%self.max_SH_iter)
# number of configurations in that bracket
n0 = int(np.floor((self.max_SH_iter)/(s+1)) * self.eta**s)
ns = [max(int(n0*(self.eta**(-i))), 1) for i in range(s+1)]
return(SuccessiveHalving(HPB_iter=iteration, num_configs=ns, budgets=self.budgets[(-s-1):], config_sampler=self.config_generator.get_config, **iteration_kwargs))