|
3 | 3 | To use a logger, simply pass it into the :class:`~pytorch_lightning.trainer.trainer.Trainer`. |
4 | 4 | Lightning uses TensorBoard by default. |
5 | 5 |
|
6 | | ->>> from pytorch_lightning import Trainer |
7 | | ->>> from pytorch_lightning import loggers |
8 | | ->>> tb_logger = loggers.TensorBoardLogger('logs/') |
9 | | ->>> trainer = Trainer(logger=tb_logger) |
| 6 | +.. code-block:: python |
| 7 | +
|
| 8 | + from pytorch_lightning import Trainer |
| 9 | + from pytorch_lightning import loggers |
| 10 | + tb_logger = loggers.TensorBoardLogger('logs/') |
| 11 | + trainer = Trainer(logger=tb_logger) |
10 | 12 |
|
11 | 13 | Choose from any of the others such as MLflow, Comet, Neptune, WandB, ... |
12 | 14 |
|
13 | | ->>> comet_logger = loggers.CometLogger(save_dir='logs/') |
14 | | ->>> trainer = Trainer(logger=comet_logger) |
| 15 | +.. code-block:: python |
| 16 | +
|
| 17 | + comet_logger = loggers.CometLogger(save_dir='logs/') |
| 18 | + trainer = Trainer(logger=comet_logger) |
15 | 19 |
|
16 | 20 | To use multiple loggers, simply pass in a ``list`` or ``tuple`` of loggers ... |
17 | 21 |
|
18 | | ->>> tb_logger = loggers.TensorBoardLogger('logs/') |
19 | | ->>> comet_logger = loggers.CometLogger(save_dir='logs/') |
20 | | ->>> trainer = Trainer(logger=[tb_logger, comet_logger]) |
| 22 | +.. code-block:: python |
| 23 | +
|
| 24 | + tb_logger = loggers.TensorBoardLogger('logs/') |
| 25 | + comet_logger = loggers.CometLogger(save_dir='logs/') |
| 26 | + trainer = Trainer(logger=[tb_logger, comet_logger]) |
21 | 27 |
|
22 | 28 | Note: |
23 | 29 | All loggers log by default to ``os.getcwd()``. To change the path without creating a logger set |
|
30 | 36 | :class:`LightningLoggerBase`. Use the :func:`~pytorch_lightning.loggers.base.rank_zero_only` |
31 | 37 | decorator to make sure that only the first process in DDP training logs data. |
32 | 38 |
|
33 | | ->>> from pytorch_lightning.utilities import rank_zero_only |
34 | | ->>> from pytorch_lightning.loggers import LightningLoggerBase |
35 | | ->>> class MyLogger(LightningLoggerBase): |
36 | | -... |
37 | | -... @rank_zero_only |
38 | | -... def log_hyperparams(self, params): |
39 | | -... # params is an argparse.Namespace |
40 | | -... # your code to record hyperparameters goes here |
41 | | -... pass |
42 | | -... |
43 | | -... @rank_zero_only |
44 | | -... def log_metrics(self, metrics, step): |
45 | | -... # metrics is a dictionary of metric names and values |
46 | | -... # your code to record metrics goes here |
47 | | -... pass |
48 | | -... |
49 | | -... def save(self): |
50 | | -... # Optional. Any code necessary to save logger data goes here |
51 | | -... pass |
52 | | -... |
53 | | -... @rank_zero_only |
54 | | -... def finalize(self, status): |
55 | | -... # Optional. Any code that needs to be run after training |
56 | | -... # finishes goes here |
57 | | -... pass |
| 39 | +.. code-block:: python |
| 40 | +
|
| 41 | + from pytorch_lightning.utilities import rank_zero_only |
| 42 | + from pytorch_lightning.loggers import LightningLoggerBase |
| 43 | + class MyLogger(LightningLoggerBase): |
| 44 | +
|
| 45 | + @rank_zero_only |
| 46 | + def log_hyperparams(self, params): |
| 47 | + # params is an argparse.Namespace |
| 48 | + # your code to record hyperparameters goes here |
| 49 | + pass |
| 50 | +
|
| 51 | + @rank_zero_only |
| 52 | + def log_metrics(self, metrics, step): |
| 53 | + # metrics is a dictionary of metric names and values |
| 54 | + # your code to record metrics goes here |
| 55 | + pass |
| 56 | +
|
| 57 | + def save(self): |
| 58 | + # Optional. Any code necessary to save logger data goes here |
| 59 | + pass |
| 60 | +
|
| 61 | + @rank_zero_only |
| 62 | + def finalize(self, status): |
| 63 | + # Optional. Any code that needs to be run after training |
| 64 | + # finishes goes here |
| 65 | + pass |
58 | 66 |
|
59 | 67 | If you write a logger that may be useful to others, please send |
60 | 68 | a pull request to add it to Lighting! |
|
65 | 73 | Call the logger anywhere except ``__init__`` in your |
66 | 74 | :class:`~pytorch_lightning.core.lightning.LightningModule` by doing: |
67 | 75 |
|
68 | | ->>> from pytorch_lightning import LightningModule |
69 | | ->>> class LitModel(LightningModule): |
70 | | -... def training_step(self, batch, batch_idx): |
71 | | -... # example |
72 | | -... self.logger.experiment.whatever_method_summary_writer_supports(...) |
| 76 | +.. code-block:: python |
| 77 | +
|
| 78 | + from pytorch_lightning import LightningModule |
| 79 | + class LitModel(LightningModule): |
| 80 | + def training_step(self, batch, batch_idx): |
| 81 | + # example |
| 82 | + self.logger.experiment.whatever_method_summary_writer_supports(...) |
73 | 83 |
|
74 | 84 | # example if logger is a tensorboard logger |
75 | 85 | self.logger.experiment.add_image('images', grid, 0) |
76 | 86 | self.logger.experiment.add_graph(model, images) |
77 | 87 |
|
78 | | -... def any_lightning_module_function_or_hook(self): |
79 | | -... self.logger.experiment.add_histogram(...) |
| 88 | + def any_lightning_module_function_or_hook(self): |
| 89 | + self.logger.experiment.add_histogram(...) |
80 | 90 |
|
81 | 91 | Read more in the `Experiment Logging use case <./experiment_logging.html>`_. |
82 | 92 |
|
|
0 commit comments