Defining, tuning and tracking hyperparameters in machine learning experiments can get messy. hp uses python classes to declaratively define your hyperparameters.
- Type annotated container for your parameters
- Automatically generated command line interface
- Handles saving and loading of parameters
Install with pip
pip install hp
import hp
class Params(hp.HyperParams):
learning_rate: hp.Range(0.001, 0.1) = 0.03
optimizer: hp.Choice(('SGD', 'Adam')) = 'SGD'
batch_size = 32
seed = 1
# parse from command line arguments
params = Params.from_command()
params = Params.from_env(prefix='HP_')
params = Params.from_constants() # load all CAP_CASE variables
hp.save(params, 'params.yaml')
params = Params.load('params.yaml')
params = hp.HyperParams()
@params.bind
def train(epochs=10):
pass
train() # use current param value (default to function default)
train(epochs=4) # override params
trainer.batch_size = params.bind('batch_size')
params.bind(Optimizer, fields={'lr': 'learning_rate'})
# grid search
for params in Params.grid():
pass
# random samples without replacement
for params in Params.samples():
pass
@params.on_change
def log_changes(params, key, value):
print(f"changing {key} from {params[key]} to {value}")
params.learning_rate = 0.001
# >> changing learning_rate from 0.03 to 0.001