Skip to content

Commit

Permalink
Update docs (#501)
Browse files Browse the repository at this point in the history
  • Loading branch information
ethanwharris committed Jan 29, 2019
1 parent 1a0bcf0 commit ebc1879
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 2 deletions.
16 changes: 16 additions & 0 deletions torchbearer/callbacks/torch_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ def on_end_epoch(self, state):

class LambdaLR(TorchScheduler):
"""
Args:
step_on_batch (bool): If True, step will be called on each training iteration rather than on each epoch
See:
`PyTorch LambdaLR <http://pytorch.org/docs/master/optim.html#torch.optim.lr_scheduler.LambdaLR>`_
"""
Expand All @@ -45,6 +48,9 @@ def __init__(self, lr_lambda, last_epoch=-1, step_on_batch=False):

class StepLR(TorchScheduler):
"""
Args:
step_on_batch (bool): If True, step will be called on each training iteration rather than on each epoch
See:
`PyTorch StepLR <http://pytorch.org/docs/master/optim.html#torch.optim.lr_scheduler.StepLR>`_
"""
Expand All @@ -57,6 +63,9 @@ def __init__(self, step_size, gamma=0.1, last_epoch=-1, step_on_batch=False):

class MultiStepLR(TorchScheduler):
"""
Args:
step_on_batch (bool): If True, step will be called on each training iteration rather than on each epoch
See:
`PyTorch MultiStepLR <http://pytorch.org/docs/master/optim.html#torch.optim.lr_scheduler.MultiStepLR>`_
"""
Expand All @@ -69,6 +78,9 @@ def __init__(self, milestones, gamma=0.1, last_epoch=-1, step_on_batch=False):

class ExponentialLR(TorchScheduler):
"""
Args:
step_on_batch (bool): If True, step will be called on each training iteration rather than on each epoch
See:
`PyTorch ExponentialLR <http://pytorch.org/docs/master/optim.html#torch.optim.lr_scheduler.ExponentialLR>`_
"""
Expand All @@ -80,6 +92,9 @@ def __init__(self, gamma, last_epoch=-1, step_on_batch=False):

class CosineAnnealingLR(TorchScheduler):
"""
Args:
step_on_batch (bool): If True, step will be called on each training iteration rather than on each epoch
See:
`PyTorch CosineAnnealingLR <http://pytorch.org/docs/master/optim.html#torch.optim.lr_scheduler.CosineAnnealingLR>`_
"""
Expand All @@ -94,6 +109,7 @@ class ReduceLROnPlateau(TorchScheduler):
"""
Args:
monitor (str): The name of the quantity in metrics to monitor. (Default value = 'val_loss')
step_on_batch (bool): If True, step will be called on each training iteration rather than on each epoch
See:
`PyTorch ReduceLROnPlateau <http://pytorch.org/docs/master/optim.html#torch.optim.lr_scheduler.ReduceLROnPlateau>`_
Expand Down
4 changes: 2 additions & 2 deletions torchbearer/variational/distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ def rsample(self, sample_shape=torch.Size()):

def log_prob(self, value):
"""Calculates the log probability that the given value was drawn from this distribution. Since this distribution
is uniform, the log probability is zero for all values in the range ``[low, high)`` and -inf elsewhere. This
function is therefore non-differentiable.
is uniform, the log probability is ``-log(high - low)`` for all values in the range ``[low, high)`` and -inf
elsewhere. This function is therefore only piecewise differentiable.
Args:
value (torch.Tensor, Number): The sampled value
Expand Down

0 comments on commit ebc1879

Please sign in to comment.