Skip to content

Commit

Permalink
Docs: Fix issues callback docs (#867)
Browse files Browse the repository at this point in the history
- Added/removed some blank lines for consistency
- Some entries like 'event_' were interpreted as links, so I added code
  markers around them.
- Added usage example to TensorBoard docstring
- Fixed a copy paste error in WandbLogger
- Some links were not parsed correctly, so I used different syntax
- Fixed issue in code example of SacredLogger
- Remove outdated comment in probabilistic.py docstr
  • Loading branch information
BenjaminBossan committed Jul 5, 2022
1 parent 507709a commit d17e3bc
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 45 deletions.
91 changes: 50 additions & 41 deletions skorch/callbacks/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,9 @@ class NeptuneLogger(Callback):
manager.
keys_ignored : str or list of str (default=None)
Key or list of keys that should not be logged to
Neptune. Note that in addition to the keys provided by the
user, keys such as those starting with 'event_' or ending on
'_best' are ignored by default.
Key or list of keys that should not be logged to Neptune. Note that in
addition to the keys provided by the user, keys such as those starting
with ``'event_'`` or ending on ``'_best'`` are ignored by default.
Attributes
----------
Expand Down Expand Up @@ -208,6 +207,7 @@ def on_train_end(self, net, **kwargs):
if self.close_after_train:
self.experiment.stop()


class WandbLogger(Callback):
"""Logs best model and metrics to `Weights & Biases <https://docs.wandb.com/>`_
Expand Down Expand Up @@ -249,10 +249,10 @@ class WandbLogger(Callback):
to your Run on W&B servers.
keys_ignored : str or list of str (default=None)
Key or list of keys that should not be logged to
tensorboard. Note that in addition to the keys provided by the
user, keys such as those starting with 'event_' or ending on
'_best' are ignored by default.
Key or list of keys that should not be logged to wandb. Note that in
addition to the keys provided by the user, keys such as those starting
with ``'event_'`` or ending on ``'_best'`` are ignored by default.
"""

def __init__(
Expand Down Expand Up @@ -319,10 +319,10 @@ class PrintLog(Callback):
Parameters
----------
keys_ignored : str or list of str (default=None)
Key or list of keys that should not be part of the printed
table. Note that in addition to the keys provided by the user,
keys such as those starting with 'event_' or ending on '_best'
are ignored by default.
Key or list of keys that should not be part of the printed table. Note
that in addition to the keys provided by the user, keys such as those
starting with ``'event_'`` or ending on ``'_best'`` are ignored by
default.
sink : callable (default=print)
The target that the output string is sent to. By default, the
Expand Down Expand Up @@ -607,27 +607,39 @@ def rename_tensorboard_key(key):
class TensorBoard(Callback):
"""Logs results from history to TensorBoard
"TensorBoard provides the visualization and tooling needed for
machine learning experimentation" (tensorboard_)
"TensorBoard provides the visualization and tooling needed for machine
learning experimentation" (`offical docs
<https://www.tensorflow.org/tensorboard/>`_).
Use this callback to automatically log all interesting values from
your net's history to tensorboard after each epoch.
Use this callback to automatically log all interesting values from your
net's history to tensorboard after each epoch.
Examples
--------
Here is the standard way of using the callback:
>>> # Example: normal usage
>>> from skorch.callbacks import TensorBoard
>>> from torch.utils.tensorboard import SummaryWriter
>>> writer = SummaryWriter(...)
>>> net = NeuralNet(..., callbacks=[TensorBoard(writer)])
>>> net.fit(X, y)
The best way to log additional information is to subclass this
callback and add your code to one of the ``on_*`` methods.
Examples
--------
>>> # Example to log the bias parameter as a histogram
>>> # Example: log the bias parameter as a histogram
>>> def extract_bias(module):
... return module.hidden.bias
>>> # override on_epoch_end
>>> class MyTensorBoard(TensorBoard):
... def on_epoch_end(self, net, **kwargs):
... bias = extract_bias(net.module_)
... epoch = net.history[-1, 'epoch']
... self.writer.add_histogram('bias', bias, global_step=epoch)
... super().on_epoch_end(net, **kwargs) # call super last
>>> # other code
>>> net = NeuralNet(..., callbacks=[MyTensorBoard(writer)])
Parameters
----------
Expand All @@ -641,10 +653,9 @@ class TensorBoard(Callback):
manager.
keys_ignored : str or list of str (default=None)
Key or list of keys that should not be logged to
tensorboard. Note that in addition to the keys provided by the
user, keys such as those starting with 'event_' or ending on
'_best' are ignored by default.
Key or list of keys that should not be logged to tensorboard. Note that in
addition to the keys provided by the user, keys such as those starting
with ``'event_'`` or ending on ``'_best'`` are ignored by default.
key_mapper : callable or function (default=rename_tensorboard_key)
This function maps a key name from the history to a tag in
Expand All @@ -654,8 +665,6 @@ class TensorBoard(Callback):
callback will prefix all keys that start with "train" or "valid"
with the "Loss/" prefix.
.. _tensorboard: https://www.tensorflow.org/tensorboard/
"""
def __init__(
self,
Expand Down Expand Up @@ -747,32 +756,32 @@ class SacredLogger(Callback):
To use this logger, you first have to install Sacred:
$ python -m pip install sacred
.. code-block:: bash
python -m pip install sacred
You might also install pymongo to use a mongodb backend. See the upstream_
documentation for more details. Once you have installed it, you can set up
a simple experiment and pass this Logger as a callback to your skorch
estimator:
You might also install pymongo to use a mongodb backend. See the `upstream
documentation <https://github.com/IDSIA/sacred#installing>`_ for more
details. Once you have installed it, you can set up a simple experiment and
pass this Logger as a callback to your skorch estimator:
# contents of sacred-experiment.py
Examples
--------
>>> # contents of sacred-experiment.py
>>> import numpy as np
>>> from sacred import Experiment
>>> from sklearn.datasets import make_classification
>>> from skorch.callbacks.logging import SacredLogger
>>> from skorch.callbacks.scoring import EpochScoring
>>> from skorch import NeuralNetClassifier
>>> from skorch.toy import make_classifier
>>> ex = Experiment()
>>> @ex.config
>>> def my_config():
... max_epochs = 20
... lr = 0.01
>>> X, y = make_classification()
>>> X, y = X.astype(np.float32), y.astype(np.int64)
>>> @ex.automain
>>> def main(_run, max_epochs, lr):
... # Take care to add additional scoring callbacks *before* the logger.
Expand All @@ -786,12 +795,14 @@ class SacredLogger(Callback):
... net.fit(X, y)
Then call this from the command line, e.g. like this:
``python sacred-script.py with max_epochs=15``
.. code-block:: bash
python sacred-script.py with max_epochs=15
You can also change other options on the command line and optionally
specify a backend.
Parameters
----------
experiment : sacred.Experiment
Expand All @@ -816,10 +827,8 @@ class SacredLogger(Callback):
keys_ignored : str or list of str (default=None)
Key or list of keys that should not be logged to Sacred. Note that in
addition to the keys provided by the user, keys such as those starting
with 'event_' or ending on '_best' are ignored by default.
with ``'event_'`` or ending on ``'_best'`` are ignored by default.
.. _upstream: https://github.com/IDSIA/sacred#installing
"""

def __init__(
Expand Down
3 changes: 1 addition & 2 deletions skorch/callbacks/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,7 @@ def on_train_begin(self, net,

class TrainEndCheckpoint(Callback):
"""Saves the model parameters, optimizer state, and history at the end of
training. The default ``fn_prefix`` is 'train_end_'.
training. The default ``fn_prefix`` is ``'train_end_'``.
Examples
--------
Expand All @@ -698,7 +698,6 @@ class TrainEndCheckpoint(Callback):
Parameters
----------
f_params : file-like object, str, None (default='params.pt')
File path to the file or file-like object where the model
parameters should be saved. Pass ``None`` to disable saving
Expand Down
2 changes: 0 additions & 2 deletions skorch/probabilistic.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
"""Integrate GPyTorch for Gaussian Processes
TODO: verify the assumptions being made and remove from here:
- The criterion always takes likelihood and module as input arguments
- Always optimize the negative objective function
- Need elaboration on how batching works - are distributions disjoint?
Expand Down

0 comments on commit d17e3bc

Please sign in to comment.