-
Notifications
You must be signed in to change notification settings - Fork 3.6k
change print to logging #457
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,6 +1,7 @@ | ||
| import os | ||
| import shutil | ||
|
|
||
| import logging | ||
| import warnings | ||
| import numpy as np | ||
|
|
||
| from pytorch_lightning.pt_overrides.override_data_parallel import LightningDistributedDataParallel | ||
|
|
@@ -91,7 +92,7 @@ def __init__(self, monitor='val_loss', | |
| self.stopped_epoch = 0 | ||
|
|
||
| if mode not in ['auto', 'min', 'max']: | ||
| print('EarlyStopping mode %s is unknown, fallback to auto mode.' % mode) | ||
| logging.info(f'EarlyStopping mode {mode} is unknown, fallback to auto mode.') | ||
| mode = 'auto' | ||
|
|
||
| if mode == 'min': | ||
|
|
@@ -121,9 +122,10 @@ def on_epoch_end(self, epoch, logs=None): | |
| current = logs.get(self.monitor) | ||
| stop_training = False | ||
| if current is None: | ||
| print('Early stopping conditioned on metric `%s` ' | ||
| 'which is not available. Available metrics are: %s' % | ||
| (self.monitor, ','.join(list(logs.keys()))), RuntimeWarning) | ||
| warnings.warn( | ||
| f'Early stopping conditioned on metric `{self.monitor}`' | ||
| f' which is not available. Available metrics are: {",".join(list(logs.keys()))}', | ||
| RuntimeWarning) | ||
| stop_training = True | ||
| return stop_training | ||
|
|
||
|
|
@@ -141,7 +143,7 @@ def on_epoch_end(self, epoch, logs=None): | |
|
|
||
| def on_train_end(self, logs=None): | ||
| if self.stopped_epoch > 0 and self.verbose > 0: | ||
| print('Epoch %05d: early stopping' % (self.stopped_epoch + 1)) | ||
| logging.info(f'Epoch {self.stopped_epoch + 1:05d}: early stopping') | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. not directly to this line, but we shall be sure that all this new logging is tested (covered by tests)
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This did occur to me, but I didn't know how to test this logging, any idea on this?
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I usually tested that the message does not crash the code... |
||
|
|
||
|
|
||
| class ModelCheckpoint(Callback): | ||
|
|
@@ -187,8 +189,9 @@ def __init__(self, filepath, monitor='val_loss', verbose=0, | |
| self.prefix = prefix | ||
|
|
||
| if mode not in ['auto', 'min', 'max']: | ||
| print('ModelCheckpoint mode %s is unknown, ' | ||
| 'fallback to auto mode.' % (mode), RuntimeWarning) | ||
| warnings.warn( | ||
| f'ModelCheckpoint mode {mode} is unknown, ' | ||
| 'fallback to auto mode.', RuntimeWarning) | ||
| mode = 'auto' | ||
|
|
||
| if mode == 'min': | ||
|
|
@@ -232,25 +235,26 @@ def on_epoch_end(self, epoch, logs=None): | |
| if self.save_best_only: | ||
| current = logs.get(self.monitor) | ||
| if current is None: | ||
| print('Can save best model only with %s available,' | ||
| ' skipping.' % (self.monitor), RuntimeWarning) | ||
| warnings.warn( | ||
| f'Can save best model only with {self.monitor} available,' | ||
| ' skipping.', RuntimeWarning) | ||
| else: | ||
| if self.monitor_op(current, self.best): | ||
| if self.verbose > 0: | ||
| print('\nEpoch %05d: %s improved from %0.5f to %0.5f,' | ||
| ' saving model to %s' | ||
| % (epoch + 1, self.monitor, self.best, | ||
| current, filepath)) | ||
| logging.info( | ||
| f'\nEpoch {epoch + 1:05d}: {self.monitor} improved' | ||
| f' from {self.best:0.5f} to {current:0.5f},', | ||
| f' saving model to {filepath}') | ||
| self.best = current | ||
| self.save_model(filepath, overwrite=True) | ||
|
|
||
| else: | ||
| if self.verbose > 0: | ||
| print('\nEpoch %05d: %s did not improve' % | ||
| (epoch + 1, self.monitor)) | ||
| logging.info( | ||
| f'\nEpoch {epoch + 1:05d}: {self.monitor} did not improve') | ||
| else: | ||
| if self.verbose > 0: | ||
| print('\nEpoch %05d: saving model to %s' % (epoch + 1, filepath)) | ||
| logging.info(f'\nEpoch {epoch + 1:05d}: saving model to {filepath}') | ||
| self.save_model(filepath, overwrite=False) | ||
|
|
||
|
|
||
|
|
@@ -291,6 +295,6 @@ def on_epoch_begin(self, epoch, trainer): | |
| losses = [10, 9, 8, 8, 6, 4.3, 5, 4.4, 2.8, 2.5] | ||
| for i, loss in enumerate(losses): | ||
| should_stop = c.on_epoch_end(i, logs={'val_loss': loss}) | ||
| print(loss) | ||
| logging.info(loss) | ||
| if should_stop: | ||
| break | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
warnings.warnandlogging.warningare a completely different stream with different consequences so please be sure that you want this behaviour... You may consider also raise it as ErrorThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
hmmm I'm not familiar with this, but I noticed that the previous code used warnings.warn a lot, I thought this was kind of best-practice.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I believe that it was related to using
print, in such case, it is better towarningsI do not say that is wrong, in this case it is good, just to be aware of it... :]