diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 2897d19bd0cb5..6252e34326492 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -28,9 +28,11 @@ from .trainer.trainer import Trainer from .core.lightning import LightningModule from .core.decorators import data_loader + import logging __all__ = [ 'Trainer', 'LightningModule', 'data_loader', ] + logging.basicConfig(level=logging.INFO) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 95a98e51e9233..97fb772e4890d 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -1022,7 +1022,7 @@ def load_from_checkpoint(cls, checkpoint_path): def summarize(self, mode): model_summary = ModelSummary(self, mode=mode) - logging.info(model_summary) + logging.info('\n' + model_summary.__str__()) def freeze(self): """Freeze all params for inference diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index eb00cda4690c1..6c844f53b68a7 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -240,9 +240,6 @@ def __init__(self, self.amp_level = amp_level self.init_amp(use_amp) - # set logging options - logging.basicConfig(level=logging.INFO) - @property def slurm_job_id(self): try: