Skip to content

Commit

Permalink
Fix cite decorator (#487)
Browse files Browse the repository at this point in the history
* Fix cite decorator

* Remove dead references in docs

* Update CHANGELOG.md
  • Loading branch information
ethanwharris committed Jan 21, 2019
1 parent c40fa70 commit 099bc55
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 21 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
### Removed
### Fixed
- Fixed a bug in the weight decay callback which would result in potentially negative decay (now just uses torch.norm)
- Fixed a bug in the cite decorator causing the citation to not show up correctly

## [0.2.6] - 2018-12-19
### Added
Expand Down
27 changes: 21 additions & 6 deletions torchbearer/cite.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,32 @@
def cite(bibtex):
"""A decorator which adds a reference to the docstring of the given object. The docstring must contain ':bib:' which
is then replaced with the given bibtex string at runtime.
"""A decorator which adds a reference to the **Google style** docstring of the given object. The ``Args:`` or
``Returns:`` line is then prepended with the given bibtex string at runtime. Otherwise, the last line is used.
Args:
bibtex (str): The bibtex string to insert
Returns:
The decorator
"""
to_insert = '::\n\n' + ' '*8
to_insert += bibtex.strip().replace('\n', '\n' + ' '*8).rstrip()

def decorator(inner):
inner.__doc__ = inner.__doc__.replace(':bib:', to_insert)
doc = inner.__doc__.split('\n')
i = 0
s = 0
for line in doc:
sline = line.strip()
if sline == 'Args:' or sline == 'Returns:':
for char in line:
if char == ' ':
s += 1
break
i += 1

spaces = ' ' * (s + 4)
to_insert = ' ' * s + '::\n\n' + spaces
to_insert += bibtex.strip().replace('\n', '\n' + spaces).rstrip()

doc.insert(i, '')
doc.insert(i, to_insert)
inner.__doc__ = '\n'.join(doc)
return inner
return decorator
1 change: 0 additions & 1 deletion torchbearer/trial.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,6 @@ class Trial(object):
"""
The trial class contains all of the required hyper-parameters for model running in torchbearer and presents an
API for model fitting, evaluating and predicting.
:bib:
Args:
model (:class:`torch.nn.Module`): The base pytorch model
Expand Down
12 changes: 0 additions & 12 deletions torchbearer/variational/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,18 +33,6 @@
.. automodule:: torchbearer.variational.visualisation
:members:
:undoc-members:
Implementations
------------------------------------
.. automodule:: torchbearer.variational.beta_vae
:members:
.. automodule:: torchbearer.variational.info_vae
:members:
.. automodule:: torchbearer.variational.isolating_sources
:members:
"""

from .auto_encoder import *
Expand Down
2 changes: 0 additions & 2 deletions torchbearer/variational/divergence.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ def with_sum_sum_reduction(self):
@cite(beta_vae)
def with_beta(self, beta):
"""Multiply the divergence by the given beta, as introduced by beta-vae.
:bib:
Args:
beta (float): The beta (> 1) to multiply by.
Expand All @@ -130,7 +129,6 @@ def beta_div(loss):
def with_linear_capacity(self, min_c=0, max_c=25, steps=100000, gamma=1000):
"""Limit divergence by capacity, linearly increased from min_c to max_c for steps, as introduced in
`Understanding disentangling in beta-VAE`.
:bib:
Args:
min_c (float): Minimum capacity
Expand Down

0 comments on commit 099bc55

Please sign in to comment.