|
3 | 3 |
|
4 | 4 | Loggers |
5 | 5 | =========== |
6 | | -.. automodule:: pytorch_lightning.loggers |
7 | | - :noindex: |
8 | | - :exclude-members: |
9 | | - _abc_impl, |
10 | | - _save_model, |
11 | | - on_epoch_end, |
12 | | - on_train_end, |
13 | | - on_epoch_start, |
| 6 | +Lightning supports the most popular logging frameworks (TensorBoard, Comet, Weights and Biases, etc...). |
| 7 | +To use a logger, simply pass it into the :class:`~pytorch_lightning.trainer.trainer.Trainer`. |
| 8 | +Lightning uses TensorBoard by default. |
| 9 | + |
| 10 | +.. code-block:: python |
| 11 | +
|
| 12 | + from pytorch_lightning import Trainer |
| 13 | + from pytorch_lightning import loggers |
| 14 | + tb_logger = loggers.TensorBoardLogger('logs/') |
| 15 | + trainer = Trainer(logger=tb_logger) |
| 16 | +
|
| 17 | +Choose from any of the others such as MLflow, Comet, Neptune, WandB, ... |
| 18 | + |
| 19 | +.. code-block:: python |
| 20 | +
|
| 21 | + comet_logger = loggers.CometLogger(save_dir='logs/') |
| 22 | + trainer = Trainer(logger=comet_logger) |
| 23 | +
|
| 24 | +To use multiple loggers, simply pass in a ``list`` or ``tuple`` of loggers ... |
| 25 | + |
| 26 | +.. code-block:: python |
| 27 | +
|
| 28 | + tb_logger = loggers.TensorBoardLogger('logs/') |
| 29 | + comet_logger = loggers.CometLogger(save_dir='logs/') |
| 30 | + trainer = Trainer(logger=[tb_logger, comet_logger]) |
| 31 | +
|
| 32 | +Note: |
| 33 | + All loggers log by default to ``os.getcwd()``. To change the path without creating a logger set |
| 34 | + ``Trainer(default_root_dir='/your/path/to/save/checkpoints')`` |
| 35 | + |
| 36 | +---------- |
| 37 | + |
| 38 | +Custom Logger |
| 39 | +------------- |
| 40 | + |
| 41 | +You can implement your own logger by writing a class that inherits from |
| 42 | +:class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only` |
| 43 | +decorator to make sure that only the first process in DDP training logs data. |
| 44 | + |
| 45 | +.. code-block:: python |
| 46 | +
|
| 47 | + from pytorch_lightning.utilities import rank_zero_only |
| 48 | + from pytorch_lightning.loggers import LightningLoggerBase |
| 49 | + class MyLogger(LightningLoggerBase): |
| 50 | +
|
| 51 | + @rank_zero_only |
| 52 | + def log_hyperparams(self, params): |
| 53 | + # params is an argparse.Namespace |
| 54 | + # your code to record hyperparameters goes here |
| 55 | + pass |
| 56 | +
|
| 57 | + @rank_zero_only |
| 58 | + def log_metrics(self, metrics, step): |
| 59 | + # metrics is a dictionary of metric names and values |
| 60 | + # your code to record metrics goes here |
| 61 | + pass |
| 62 | +
|
| 63 | + def save(self): |
| 64 | + # Optional. Any code necessary to save logger data goes here |
| 65 | + pass |
| 66 | +
|
| 67 | + @rank_zero_only |
| 68 | + def finalize(self, status): |
| 69 | + # Optional. Any code that needs to be run after training |
| 70 | + # finishes goes here |
| 71 | + pass |
| 72 | +
|
| 73 | +If you write a logger that may be useful to others, please send |
| 74 | +a pull request to add it to Lighting! |
| 75 | + |
| 76 | +---------- |
| 77 | + |
| 78 | +Using loggers |
| 79 | +------------- |
| 80 | + |
| 81 | +Call the logger anywhere except ``__init__`` in your |
| 82 | +:class:`~pytorch_lightning.core.lightning.LightningModule` by doing: |
| 83 | + |
| 84 | +.. code-block:: python |
| 85 | +
|
| 86 | + from pytorch_lightning import LightningModule |
| 87 | + class LitModel(LightningModule): |
| 88 | + def training_step(self, batch, batch_idx): |
| 89 | + # example |
| 90 | + self.logger.experiment.whatever_method_summary_writer_supports(...) |
| 91 | +
|
| 92 | + # example if logger is a tensorboard logger |
| 93 | + self.logger.experiment.add_image('images', grid, 0) |
| 94 | + self.logger.experiment.add_graph(model, images) |
| 95 | +
|
| 96 | + def any_lightning_module_function_or_hook(self): |
| 97 | + self.logger.experiment.add_histogram(...) |
| 98 | +
|
| 99 | +Read more in the `Experiment Logging use case <./experiment_logging.html>`_. |
| 100 | + |
| 101 | +------ |
| 102 | + |
| 103 | +Supported Loggers |
| 104 | +----------------- |
| 105 | +The following are loggers we support |
| 106 | + |
| 107 | +Comet |
| 108 | +^^^^^ |
| 109 | + |
| 110 | +.. autoclass:: pytorch_lightning.loggers.comet.CometLogger |
| 111 | + :noindex: |
| 112 | + |
| 113 | +MLFlow |
| 114 | +^^^^^^ |
| 115 | + |
| 116 | +.. autoclass:: pytorch_lightning.loggers.mlflow.MLFlowLogger |
| 117 | + :noindex: |
| 118 | + |
| 119 | +Neptune |
| 120 | +^^^^^^^ |
| 121 | + |
| 122 | +.. autoclass:: pytorch_lightning.loggers.neptune.NeptuneLogger |
| 123 | + :noindex: |
| 124 | + |
| 125 | +Tensorboard |
| 126 | +^^^^^^^^^^^^ |
| 127 | + |
| 128 | +.. autoclass:: pytorch_lightning.loggers.tensorboard.TensorBoardLogger |
| 129 | + :noindex: |
| 130 | + |
| 131 | +Test-tube |
| 132 | +^^^^^^^^^ |
| 133 | + |
| 134 | +.. autoclass:: pytorch_lightning.loggers.test_tube.TestTubeLogger |
| 135 | + :noindex: |
| 136 | + |
| 137 | +Trains |
| 138 | +^^^^^^ |
| 139 | + |
| 140 | +.. autoclass:: pytorch_lightning.loggers.trains.TrainsLogger |
| 141 | + :noindex: |
0 commit comments