Skip to content

Commit f5e0df3

Browse files
authored
Fix loggers and update docs (Lightning-AI#964)
* Fix loggers and update docs * Update trainer.py
1 parent 27a3be0 commit f5e0df3

File tree

4 files changed

+39
-22
lines changed

4 files changed

+39
-22
lines changed

docs/source/experiment_logging.rst

Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,13 @@ To use CometLogger as your logger do the following.
2222
)
2323
trainer = Trainer(logger=comet_logger)
2424
25-
The CometLogger is available anywhere in your LightningModule
25+
The CometLogger is available anywhere except ``__init__`` in your LightningModule
2626

2727
.. code-block:: python
2828
2929
class MyModule(pl.LightningModule):
3030
31-
def __init__(self, ...):
31+
def any_lightning_module_function_or_hook(self, ...):
3232
some_img = fake_image()
3333
self.logger.experiment.add_image('generated_images', some_img, 0)
3434
@@ -52,13 +52,13 @@ To use Neptune.ai as your logger do the following.
5252
)
5353
trainer = Trainer(logger=neptune_logger)
5454
55-
The Neptune.ai is available anywhere in your LightningModule
55+
The Neptune.ai is available anywhere except ``__init__`` in your LightningModule
5656

5757
.. code-block:: python
5858
5959
class MyModule(pl.LightningModule):
6060
61-
def __init__(self, ...):
61+
def any_lightning_module_function_or_hook(self, ...):
6262
some_img = fake_image()
6363
self.logger.experiment.add_image('generated_images', some_img, 0)
6464
@@ -76,13 +76,13 @@ To use `Tensorboard <https://pytorch.org/docs/stable/tensorboard.html>`_ as your
7676
logger = TensorBoardLogger("tb_logs", name="my_model")
7777
trainer = Trainer(logger=logger)
7878
79-
The TensorBoardLogger is available anywhere in your LightningModule
79+
The TensorBoardLogger is available anywhere except ``__init__`` in your LightningModule
8080

8181
.. code-block:: python
8282
8383
class MyModule(pl.LightningModule):
8484
85-
def __init__(self, ...):
85+
def any_lightning_module_function_or_hook(self, ...):
8686
some_img = fake_image()
8787
self.logger.experiment.add_image('generated_images', some_img, 0)
8888
@@ -102,13 +102,13 @@ To use TestTube as your logger do the following.
102102
logger = TestTubeLogger("tb_logs", name="my_model")
103103
trainer = Trainer(logger=logger)
104104
105-
The TestTubeLogger is available anywhere in your LightningModule
105+
The TestTubeLogger is available anywhere except ``__init__`` in your LightningModule
106106

107107
.. code-block:: python
108108
109109
class MyModule(pl.LightningModule):
110110
111-
def __init__(self, ...):
111+
def any_lightning_module_function_or_hook(self, ...):
112112
some_img = fake_image()
113113
self.logger.experiment.add_image('generated_images', some_img, 0)
114114
@@ -127,13 +127,13 @@ To use Wandb as your logger do the following.
127127
wandb_logger = WandbLogger()
128128
trainer = Trainer(logger=wandb_logger)
129129
130-
The Wandb logger is available anywhere in your LightningModule
130+
The Wandb logger is available anywhere except ``__init__`` in your LightningModule
131131

132132
.. code-block:: python
133133
134134
class MyModule(pl.LightningModule):
135135
136-
def __init__(self, ...):
136+
def any_lightning_module_function_or_hook(self, ...):
137137
some_img = fake_image()
138138
self.logger.experiment.add_image('generated_images', some_img, 0)
139139
@@ -151,12 +151,17 @@ PyTorch-Lightning supports use of multiple loggers, just pass a list to the `Tra
151151
logger2 = TestTubeLogger("tt_logs", name="my_model")
152152
trainer = Trainer(logger=[logger1, logger2])
153153
154-
The loggers are available as a list anywhere in your LightningModule
154+
The loggers are available as a list anywhere except ``__init__`` in your LightningModule
155155

156156
.. code-block:: python
157157
158158
class MyModule(pl.LightningModule):
159159
160-
def __init__(self, ...):
160+
def any_lightning_module_function_or_hook(self, ...):
161161
some_img = fake_image()
162+
163+
# Option 1
162164
self.logger.experiment[0].add_image('generated_images', some_img, 0)
165+
166+
# Option 2
167+
self.logger[0].experiment.add_image('generated_images', some_img, 0)

pytorch_lightning/loggers/__init__.py

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
Lightning supports most popular logging frameworks (Tensorboard, comet, weights and biases, etc...).
3-
To use a logger, simply pass it into the trainer.
3+
To use a logger, simply pass it into the trainer. To use multiple loggers, simply pass in a ``list``
4+
or ``tuple`` of loggers.
45
56
.. code-block:: python
67
@@ -14,14 +15,19 @@
1415
comet_logger = loggers.CometLogger()
1516
trainer = Trainer(logger=comet_logger)
1617
17-
.. note:: All loggers log by default to `os.getcwd()`. To change the path without creating a logger set
18-
Trainer(default_save_path='/your/path/to/save/checkpoints')
18+
# or pass a list
19+
tb_logger = loggers.TensorBoardLogger()
20+
comet_logger = loggers.CometLogger()
21+
trainer = Trainer(logger=[tb_logger, comet_logger])
22+
23+
.. note:: All loggers log by default to ``os.getcwd()``. To change the path without creating a logger set
24+
``Trainer(default_save_path='/your/path/to/save/checkpoints')``
1925
2026
Custom logger
2127
-------------
2228
2329
You can implement your own logger by writing a class that inherits from
24-
`LightningLoggerBase`. Use the `rank_zero_only` decorator to make sure that
30+
``LightningLoggerBase``. Use the ``rank_zero_only`` decorator to make sure that
2531
only the first process in DDP training logs data.
2632
2733
.. code-block:: python
@@ -52,13 +58,13 @@ def finalize(self, status):
5258
# finishes goes here
5359
5460
55-
If you write a logger than may be useful to others, please send
61+
If you write a logger that may be useful to others, please send
5662
a pull request to add it to Lighting!
5763
5864
Using loggers
5965
-------------
6066
61-
Call the logger anywhere from your LightningModule by doing:
67+
Call the logger anywhere except ``__init__`` in your LightningModule by doing:
6268
6369
.. code-block:: python
6470
@@ -69,6 +75,8 @@ def train_step(...):
6975
def any_lightning_module_function_or_hook(...):
7076
self.logger.experiment.add_histogram(...)
7177
78+
Read more in the `Experiment Logging use case <./experiment_logging.html>`_.
79+
7280
Supported Loggers
7381
-----------------
7482
"""
@@ -77,7 +85,7 @@ def any_lightning_module_function_or_hook(...):
7785
from .base import LightningLoggerBase, LoggerCollection, rank_zero_only
7886
from .tensorboard import TensorBoardLogger
7987

80-
__all__ = ['TensorBoardLogger', 'LoggerCollection']
88+
__all__ = ['TensorBoardLogger']
8189

8290
try:
8391
# needed to prevent ImportError and duplicated logs.

pytorch_lightning/loggers/base.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -100,6 +100,9 @@ def __init__(self, logger_iterable: Iterable[LightningLoggerBase]):
100100
super().__init__()
101101
self._logger_iterable = logger_iterable
102102

103+
def __getitem__(self, index: int) -> LightningLoggerBase:
104+
return [logger for logger in self._logger_iterable][index]
105+
103106
@property
104107
def experiment(self) -> List[Any]:
105108
return [logger.experiment() for logger in self._logger_iterable]

pytorch_lightning/trainer/trainer.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -937,6 +937,9 @@ def fit(
937937
# feed to .fit()
938938
939939
"""
940+
# bind logger
941+
model.logger = self.logger
942+
940943
# Fit begin callbacks
941944
self.on_fit_start()
942945

@@ -1065,10 +1068,8 @@ def run_pretrain_routine(self, model: LightningModule):
10651068
# set local properties on the model
10661069
self.copy_trainer_model_properties(ref_model)
10671070

1068-
# link up experiment object
1071+
# log hyper-parameters
10691072
if self.logger is not None:
1070-
ref_model.logger = self.logger
1071-
10721073
# save exp to get started
10731074
if hasattr(ref_model, "hparams"):
10741075
self.logger.log_hyperparams(ref_model.hparams)

0 commit comments

Comments
 (0)