From d71fe9b10e96e1de2de86eacb20824d30408fff2 Mon Sep 17 00:00:00 2001 From: Jiri Borovec Date: Tue, 28 Jan 2020 01:11:36 +0100 Subject: [PATCH 1/6] wip --- docs/requirements.txt | 4 ++-- pytorch_lightning/__init__.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 5834ce1ee23f6..ea4ff69e8945b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -4,7 +4,7 @@ m2r # fails with multi-line text nbsphinx pandoc docutils -git+https://github.com/PytorchLightning/lightning_sphinx_theme.git sphinxcontrib-fulltoc sphinxcontrib-mockautodoc -pip_shims \ No newline at end of file +git+https://github.com/PytorchLightning/lightning_sphinx_theme.git +# pip_shims \ No newline at end of file diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index fc6490891f1a4..4cd731322e519 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -10,7 +10,6 @@ __docs__ = "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers." \ " Scale your models. Write less boilerplate." - try: # This variable is injected in the __builtins__ by the build # process. It used to enable importing subpackages of skimage when @@ -37,3 +36,5 @@ 'LightningModule', 'data_loader', ] + + __package__ = __all__ From 51b6f4b5302c8b6c7fb66a4c42eabeca88b5bf57 Mon Sep 17 00:00:00 2001 From: Jiri Borovec Date: Tue, 28 Jan 2020 01:26:17 +0100 Subject: [PATCH 2/6] wip --- pytorch_lightning/__init__.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 4cd731322e519..1e941820ab28a 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -31,10 +31,8 @@ from .core.lightning import LightningModule from .core.decorators import data_loader - __all__ = [ - 'Trainer', - 'LightningModule', - 'data_loader', - ] - - __package__ = __all__ +__all__ = [ + 'Trainer', + 'LightningModule', + 'data_loader', +] From d52a2dba611ea9690fc4759392c3e9b39b2b9f91 Mon Sep 17 00:00:00 2001 From: Jiri Borovec Date: Thu, 30 Jan 2020 00:22:58 +0100 Subject: [PATCH 3/6] debug imports docs formatting --- docs/source/conf.py | 12 ++++-- docs/source/documentation.rst | 8 ---- docs/source/index.rst | 2 +- docs/source/lightning-module.rst | 3 +- docs/source/modules.rst | 7 ---- docs/source/tutorials.rst | 18 ++++----- pl_examples/__init__.py | 1 + .../lightning_module_template.py | 9 ++--- pl_examples/domain_templates/gan.py | 8 ++-- .../imagenet/imagenet_example.py | 10 ++--- pytorch_lightning/__init__.py | 5 ++- pytorch_lightning/core/__init__.py | 5 ++- pytorch_lightning/core/decorators.py | 5 +-- pytorch_lightning/core/lightning.py | 40 ++++++++++--------- pytorch_lightning/testing/model_base.py | 2 +- pytorch_lightning/testing/model_mixins.py | 2 +- pytorch_lightning/trainer/__init__.py | 1 + setup.py | 12 ++++-- 18 files changed, 76 insertions(+), 74 deletions(-) delete mode 100644 docs/source/documentation.rst delete mode 100644 docs/source/modules.rst diff --git a/docs/source/conf.py b/docs/source/conf.py index a5d7841aa649c..c1e193917da2a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -348,7 +348,11 @@ def find_source(): autodoc_member_order = 'groupwise' autoclass_content = 'both' -autodoc_default_flags = [ - 'members', 'undoc-members', 'show-inheritance', 'private-members', - # 'special-members', 'inherited-members' -] +autodoc_default_options = { + 'members': True, + 'special-members': '__call__', + 'undoc-members': True, + # 'exclude-members': '__weakref__', + 'show-inheritance': True, + 'private-members': True, +} diff --git a/docs/source/documentation.rst b/docs/source/documentation.rst deleted file mode 100644 index 6b22dfc1ab6e4..0000000000000 --- a/docs/source/documentation.rst +++ /dev/null @@ -1,8 +0,0 @@ -Documentation -============= - - -.. toctree:: - :maxdepth: 4 - - pytorch_lightning \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst index 755bbfe5f2025..33d3c34f67967 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -4,7 +4,7 @@ contain the root `toctree` directive. PyTorch-Lightning Documentation -============================= +=============================== .. toctree:: :maxdepth: 1 diff --git a/docs/source/lightning-module.rst b/docs/source/lightning-module.rst index 93c81537f65ca..a05c1de444faa 100644 --- a/docs/source/lightning-module.rst +++ b/docs/source/lightning-module.rst @@ -2,7 +2,8 @@ :class: hidden-section LightningModule -=========== +=============== + .. automodule:: pytorch_lightning.core :exclude-members: _abc_impl, diff --git a/docs/source/modules.rst b/docs/source/modules.rst deleted file mode 100644 index e4c5121858c28..0000000000000 --- a/docs/source/modules.rst +++ /dev/null @@ -1,7 +0,0 @@ -pl_examples -=========== - -.. toctree:: - :maxdepth: 4 - - pl_examples diff --git a/docs/source/tutorials.rst b/docs/source/tutorials.rst index 03a493691eebd..ba2baf2a617f5 100644 --- a/docs/source/tutorials.rst +++ b/docs/source/tutorials.rst @@ -1,20 +1,20 @@ Refactoring PyTorch into Lightning -================================== -`Tutorial `_ +---------------------------------- +`How to refactor your PyTorch code to get these 42 benefits of PyTorch-Lighting `_ Start a research project -========================= +------------------------ `Research seed `_ Basic Lightning use -==================== -`Tutorial `_ +------------------- +`Supercharge your AI research with PyTorch-Lightning `_ 9 key Lightning tricks -======================== -`Tutorial `_ +----------------------- +`Tutorial on 9 key speed features in PyTorch-Lightning <9 key speed features in Pytorch-Lightning>`_ Multi-node training on SLURM -============================= -`Tutorial `_ +---------------------------- +`Trivial multi node training with PyTorch-Lightning `_ diff --git a/pl_examples/__init__.py b/pl_examples/__init__.py index 92abda69d1e96..65522ace054ba 100644 --- a/pl_examples/__init__.py +++ b/pl_examples/__init__.py @@ -43,6 +43,7 @@ The main function is your entry into the program. This is where you init your model, checkpoint directory, and launch the training. The main function should have 3 arguments: + - hparams: a configuration of hyperparameters. - slurm_manager: Slurm cluster manager object (can be None) - dict: for you to return any values you want (useful in meta-learning, otherwise set to) diff --git a/pl_examples/basic_examples/lightning_module_template.py b/pl_examples/basic_examples/lightning_module_template.py index 50fba989ea768..f8e564b5e2491 100644 --- a/pl_examples/basic_examples/lightning_module_template.py +++ b/pl_examples/basic_examples/lightning_module_template.py @@ -15,8 +15,7 @@ from torch.utils.data.distributed import DistributedSampler from torchvision.datasets import MNIST -import pytorch_lightning as pl -from pytorch_lightning.core.lightning import LightningModule +from pytorch_lightning import data_loader, LightningModule class LightningTemplateModel(LightningModule): @@ -213,17 +212,17 @@ def __dataloader(self, train): return loader - @pl.data_loader + @data_loader def train_dataloader(self): log.info('Training data loader called.') return self.__dataloader(train=True) - @pl.data_loader + @data_loader def val_dataloader(self): log.info('Validation data loader called.') return self.__dataloader(train=False) - @pl.data_loader + @data_loader def test_dataloader(self): log.info('Test data loader called.') return self.__dataloader(train=False) diff --git a/pl_examples/domain_templates/gan.py b/pl_examples/domain_templates/gan.py index 78a813e82f4b4..1eba8f79d063b 100644 --- a/pl_examples/domain_templates/gan.py +++ b/pl_examples/domain_templates/gan.py @@ -19,7 +19,7 @@ from torch.utils.data import DataLoader from torchvision.datasets import MNIST -import pytorch_lightning as pl +from pytorch_lightning import data_loader, LightningModule, Trainer class Generator(nn.Module): @@ -69,7 +69,7 @@ def forward(self, img): return validity -class GAN(pl.LightningModule): +class GAN(LightningModule): def __init__(self, hparams): super(GAN, self).__init__() @@ -165,7 +165,7 @@ def configure_optimizers(self): opt_d = torch.optim.Adam(self.discriminator.parameters(), lr=lr, betas=(b1, b2)) return [opt_g, opt_d], [] - @pl.data_loader + @data_loader def train_dataloader(self): transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize([0.5], [0.5])]) @@ -193,7 +193,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = pl.Trainer() + trainer = Trainer() # ------------------------ # 3 START TRAINING diff --git a/pl_examples/full_examples/imagenet/imagenet_example.py b/pl_examples/full_examples/imagenet/imagenet_example.py index ce2fbf6a12dd2..f4daf922c1e2d 100644 --- a/pl_examples/full_examples/imagenet/imagenet_example.py +++ b/pl_examples/full_examples/imagenet/imagenet_example.py @@ -18,7 +18,7 @@ import torchvision.models as models import torchvision.transforms as transforms -import pytorch_lightning as pl +from pytorch_lightning import data_loader, Trainer, LightningModule # pull out resnet names from torchvision models MODEL_NAMES = sorted( @@ -27,7 +27,7 @@ ) -class ImageNetLightningModel(pl.LightningModule): +class ImageNetLightningModel(LightningModule): def __init__(self, hparams): super(ImageNetLightningModel, self).__init__() @@ -128,7 +128,7 @@ def configure_optimizers(self): scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.1) return [optimizer], [scheduler] - @pl.data_loader + @data_loader def train_dataloader(self): normalize = transforms.Normalize( mean=[0.485, 0.456, 0.406], @@ -159,7 +159,7 @@ def train_dataloader(self): ) return train_loader - @pl.data_loader + @data_loader def val_dataloader(self): normalize = transforms.Normalize( mean=[0.485, 0.456, 0.406], @@ -232,7 +232,7 @@ def main(hparams): random.seed(hparams.seed) torch.manual_seed(hparams.seed) cudnn.deterministic = True - trainer = pl.Trainer( + trainer = Trainer( default_save_path=hparams.save_path, gpus=hparams.gpus, max_epochs=hparams.epochs, diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 1e941820ab28a..5f607de8d0b81 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -1,6 +1,6 @@ """Root package info.""" -__version__ = '0.6.0.dev' +__version__ = '0.6.1.dev' __author__ = 'William Falcon et al.' __author_email__ = 'waf2107@columbia.edu' __license__ = 'Apache-2.0' @@ -31,8 +31,11 @@ from .core.lightning import LightningModule from .core.decorators import data_loader + import sys + __all__ = [ 'Trainer', 'LightningModule', 'data_loader', ] +__call__ = __all__ diff --git a/pytorch_lightning/core/__init__.py b/pytorch_lightning/core/__init__.py index b338a8b4f7d9f..2a9f39dcc6436 100644 --- a/pytorch_lightning/core/__init__.py +++ b/pytorch_lightning/core/__init__.py @@ -96,6 +96,9 @@ def test_dataloader(self): for a live demo. """ + +from .decorators import data_loader from .lightning import LightningModule -__all__ = ['LightningModule'] +__all__ = ['LightningModule', 'data_loader'] +__call__ = __all__ \ No newline at end of file diff --git a/pytorch_lightning/core/decorators.py b/pytorch_lightning/core/decorators.py index aeea1a7e44256..743aac656d18c 100644 --- a/pytorch_lightning/core/decorators.py +++ b/pytorch_lightning/core/decorators.py @@ -3,12 +3,11 @@ def data_loader(fn): - """ - Decorator to make any fx with this use the lazy property + """Decorator to make any fx with this use the lazy property. + :param fn: :return: """ - wraps(fn) attr_name = '_lazy_' + fn.__name__ diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 66b887fa76374..837fea0b70d9f 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -18,6 +18,7 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks): + def __init__(self, *args, **kwargs): super(LightningModule, self).__init__(*args, **kwargs) @@ -115,6 +116,7 @@ def training_step(self, *args, **kwargs): :param int batch_idx: Integer displaying which batch this is :return: dict with loss key and optional log, progress keys if implementing training_step, return whatever you need in that step: + - loss -> tensor scalar [REQUIRED] - progress_bar -> Dict for progress bar display. Must have only tensors - log -> Dict of metrics to add to logger. Must have only tensors (no images, etc) @@ -1061,30 +1063,30 @@ def load_from_checkpoint(cls, checkpoint_path, map_location=None): it stores the hyperparameters in the checkpoint if you initialized your LightningModule with an argument called `hparams` which is a Namespace or dictionary of hyperparameters - Example - ------- - .. code-block:: python + Example + ------- + .. code-block:: python - # -------------- - # Case 1 - # when using Namespace (output of using Argparse to parse command line arguments) - from argparse import Namespace - hparams = Namespace(**{'learning_rate': 0.1}) + # -------------- + # Case 1 + # when using Namespace (output of using Argparse to parse command line arguments) + from argparse import Namespace + hparams = Namespace(**{'learning_rate': 0.1}) - model = MyModel(hparams) + model = MyModel(hparams) - class MyModel(pl.LightningModule): - def __init__(self, hparams): - self.learning_rate = hparams.learning_rate + class MyModel(pl.LightningModule): + def __init__(self, hparams): + self.learning_rate = hparams.learning_rate - # -------------- - # Case 2 - # when using a dict - model = MyModel({'learning_rate': 0.1}) + # -------------- + # Case 2 + # when using a dict + model = MyModel({'learning_rate': 0.1}) - class MyModel(pl.LightningModule): - def __init__(self, hparams): - self.learning_rate = hparams['learning_rate'] + class MyModel(pl.LightningModule): + def __init__(self, hparams): + self.learning_rate = hparams['learning_rate'] Args: checkpoint_path (str): Path to checkpoint. diff --git a/pytorch_lightning/testing/model_base.py b/pytorch_lightning/testing/model_base.py index 9baaf17640336..949a39ef26ca2 100644 --- a/pytorch_lightning/testing/model_base.py +++ b/pytorch_lightning/testing/model_base.py @@ -16,7 +16,7 @@ # TODO: this should be discussed and moved out of this package raise ImportError('Missing test-tube package.') -from pytorch_lightning import data_loader +from pytorch_lightning.core.decorators import data_loader from pytorch_lightning.core.lightning import LightningModule diff --git a/pytorch_lightning/testing/model_mixins.py b/pytorch_lightning/testing/model_mixins.py index b568676685c23..03da85d59d096 100644 --- a/pytorch_lightning/testing/model_mixins.py +++ b/pytorch_lightning/testing/model_mixins.py @@ -2,7 +2,7 @@ import torch -from pytorch_lightning import data_loader +from pytorch_lightning.core.decorators import data_loader class LightningValidationStepMixin: diff --git a/pytorch_lightning/trainer/__init__.py b/pytorch_lightning/trainer/__init__.py index c18f22b821a3f..98c2b99b56357 100644 --- a/pytorch_lightning/trainer/__init__.py +++ b/pytorch_lightning/trainer/__init__.py @@ -26,4 +26,5 @@ """ from .trainer import Trainer + __all__ = ['Trainer'] diff --git a/setup.py b/setup.py index 44d0d199a331b..469387da3be6c 100755 --- a/setup.py +++ b/setup.py @@ -5,10 +5,8 @@ # Always prefer setuptools over distutils from setuptools import setup, find_packages -try: - import builtins -except ImportError: - import __builtin__ as builtins +import builtins +# import __builtin__ as builtins # https://packaging.python.org/guides/single-sourcing-package-version/ # http://blog.ionelmc.ro/2014/05/25/python-packaging/ @@ -58,6 +56,12 @@ def load_requirements(path_dir=PATH_ROOT, comment_char='#'): setup_requires=[], install_requires=load_requirements(PATH_ROOT), + project_urls={ + "Bug Tracker": "https://github.com/PyTorchLightning/pytorch-lightning/issues", + "Documentation": "https://pytorch-lightning.rtfd.io/en/latest/", + "Source Code": "https://github.com/PyTorchLightning/pytorch-lightning", + }, + classifiers=[ 'Environment :: Console', 'Natural Language :: English', From 826418ccface5a1733f5ea809cf7b478af03d621 Mon Sep 17 00:00:00 2001 From: Jiri Borovec Date: Thu, 30 Jan 2020 01:00:48 +0100 Subject: [PATCH 4/6] WIP --- pl_examples/basic_examples/cpu_template.py | 4 ++-- pl_examples/basic_examples/gpu_template.py | 4 ++-- .../lightning_module_template.py | 10 +++++----- pl_examples/domain_templates/gan.py | 8 ++++---- .../imagenet/imagenet_example.py | 10 +++++----- .../multi_node_ddp2_demo.py | 4 ++-- .../multi_node_ddp_demo.py | 4 ++-- pytorch_lightning/__init__.py | 19 ++++++++----------- pytorch_lightning/trainer/trainer.py | 1 + 9 files changed, 31 insertions(+), 33 deletions(-) diff --git a/pl_examples/basic_examples/cpu_template.py b/pl_examples/basic_examples/cpu_template.py index fd750bc2967fe..0714b1aac0f13 100644 --- a/pl_examples/basic_examples/cpu_template.py +++ b/pl_examples/basic_examples/cpu_template.py @@ -7,8 +7,8 @@ import numpy as np import torch +import pytorch_lightning as pl from pl_examples.basic_examples.lightning_module_template import LightningTemplateModel -from pytorch_lightning import Trainer SEED = 2334 torch.manual_seed(SEED) @@ -28,7 +28,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = Trainer() + trainer = pl.Trainer() # ------------------------ # 3 START TRAINING diff --git a/pl_examples/basic_examples/gpu_template.py b/pl_examples/basic_examples/gpu_template.py index d95b9ab8672d0..c661eef65fc68 100644 --- a/pl_examples/basic_examples/gpu_template.py +++ b/pl_examples/basic_examples/gpu_template.py @@ -7,8 +7,8 @@ import numpy as np import torch +import pytorch_lightning as pl from pl_examples.basic_examples.lightning_module_template import LightningTemplateModel -from pytorch_lightning import Trainer SEED = 2334 torch.manual_seed(SEED) @@ -28,7 +28,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = Trainer( + trainer = pl.Trainer( gpus=hparams.gpus, distributed_backend=hparams.distributed_backend, use_amp=hparams.use_16bit diff --git a/pl_examples/basic_examples/lightning_module_template.py b/pl_examples/basic_examples/lightning_module_template.py index f8e564b5e2491..81cdf2acba6be 100644 --- a/pl_examples/basic_examples/lightning_module_template.py +++ b/pl_examples/basic_examples/lightning_module_template.py @@ -15,10 +15,10 @@ from torch.utils.data.distributed import DistributedSampler from torchvision.datasets import MNIST -from pytorch_lightning import data_loader, LightningModule +import pytorch_lightning as pl -class LightningTemplateModel(LightningModule): +class LightningTemplateModel(pl.LightningModule): """ Sample model to show how to define a template """ @@ -212,17 +212,17 @@ def __dataloader(self, train): return loader - @data_loader + @pl.data_loader def train_dataloader(self): log.info('Training data loader called.') return self.__dataloader(train=True) - @data_loader + @pl.data_loader def val_dataloader(self): log.info('Validation data loader called.') return self.__dataloader(train=False) - @data_loader + @pl.data_loader def test_dataloader(self): log.info('Test data loader called.') return self.__dataloader(train=False) diff --git a/pl_examples/domain_templates/gan.py b/pl_examples/domain_templates/gan.py index 1eba8f79d063b..78a813e82f4b4 100644 --- a/pl_examples/domain_templates/gan.py +++ b/pl_examples/domain_templates/gan.py @@ -19,7 +19,7 @@ from torch.utils.data import DataLoader from torchvision.datasets import MNIST -from pytorch_lightning import data_loader, LightningModule, Trainer +import pytorch_lightning as pl class Generator(nn.Module): @@ -69,7 +69,7 @@ def forward(self, img): return validity -class GAN(LightningModule): +class GAN(pl.LightningModule): def __init__(self, hparams): super(GAN, self).__init__() @@ -165,7 +165,7 @@ def configure_optimizers(self): opt_d = torch.optim.Adam(self.discriminator.parameters(), lr=lr, betas=(b1, b2)) return [opt_g, opt_d], [] - @data_loader + @pl.data_loader def train_dataloader(self): transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize([0.5], [0.5])]) @@ -193,7 +193,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = Trainer() + trainer = pl.Trainer() # ------------------------ # 3 START TRAINING diff --git a/pl_examples/full_examples/imagenet/imagenet_example.py b/pl_examples/full_examples/imagenet/imagenet_example.py index f4daf922c1e2d..ce2fbf6a12dd2 100644 --- a/pl_examples/full_examples/imagenet/imagenet_example.py +++ b/pl_examples/full_examples/imagenet/imagenet_example.py @@ -18,7 +18,7 @@ import torchvision.models as models import torchvision.transforms as transforms -from pytorch_lightning import data_loader, Trainer, LightningModule +import pytorch_lightning as pl # pull out resnet names from torchvision models MODEL_NAMES = sorted( @@ -27,7 +27,7 @@ ) -class ImageNetLightningModel(LightningModule): +class ImageNetLightningModel(pl.LightningModule): def __init__(self, hparams): super(ImageNetLightningModel, self).__init__() @@ -128,7 +128,7 @@ def configure_optimizers(self): scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.1) return [optimizer], [scheduler] - @data_loader + @pl.data_loader def train_dataloader(self): normalize = transforms.Normalize( mean=[0.485, 0.456, 0.406], @@ -159,7 +159,7 @@ def train_dataloader(self): ) return train_loader - @data_loader + @pl.data_loader def val_dataloader(self): normalize = transforms.Normalize( mean=[0.485, 0.456, 0.406], @@ -232,7 +232,7 @@ def main(hparams): random.seed(hparams.seed) torch.manual_seed(hparams.seed) cudnn.deterministic = True - trainer = Trainer( + trainer = pl.Trainer( default_save_path=hparams.save_path, gpus=hparams.gpus, max_epochs=hparams.epochs, diff --git a/pl_examples/multi_node_examples/multi_node_ddp2_demo.py b/pl_examples/multi_node_examples/multi_node_ddp2_demo.py index 0fcf423c0ca21..d88ee08a59a93 100644 --- a/pl_examples/multi_node_examples/multi_node_ddp2_demo.py +++ b/pl_examples/multi_node_examples/multi_node_ddp2_demo.py @@ -7,8 +7,8 @@ import numpy as np import torch +import pytorch_lightning as pl from pl_examples.basic_examples.lightning_module_template import LightningTemplateModel -from pytorch_lightning import Trainer SEED = 2334 torch.manual_seed(SEED) @@ -29,7 +29,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = Trainer( + trainer = pl.Trainer( gpus=2, num_nodes=2, distributed_backend='ddp2' diff --git a/pl_examples/multi_node_examples/multi_node_ddp_demo.py b/pl_examples/multi_node_examples/multi_node_ddp_demo.py index bb8dcf01278fa..18939fb96d156 100644 --- a/pl_examples/multi_node_examples/multi_node_ddp_demo.py +++ b/pl_examples/multi_node_examples/multi_node_ddp_demo.py @@ -7,8 +7,8 @@ import numpy as np import torch +import pytorch_lightning as pl from pl_examples.basic_examples.lightning_module_template import LightningTemplateModel -from pytorch_lightning import Trainer SEED = 2334 torch.manual_seed(SEED) @@ -29,7 +29,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = Trainer( + trainer = pt.Trainer( gpus=2, num_nodes=2, distributed_backend='ddp' diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 5f607de8d0b81..7d06e2b4fa15e 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -27,15 +27,12 @@ import logging as log log.basicConfig(level=log.INFO) - from .trainer.trainer import Trainer - from .core.lightning import LightningModule - from .core.decorators import data_loader + from .core import * + from .trainer import * - import sys - -__all__ = [ - 'Trainer', - 'LightningModule', - 'data_loader', -] -__call__ = __all__ + __all__ = [ + 'Trainer', + 'LightningModule', + 'data_loader', + ] + __call__ = __all__ diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 8871b2eae86a7..9402d925edefe 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -49,6 +49,7 @@ class Trainer(TrainerIOMixin, TrainerTrainLoopMixin, TrainerCallbackConfigMixin, ): + def __init__( self, logger=True, From 6804c988957ebae54fa58c083550e153c2be0c52 Mon Sep 17 00:00:00 2001 From: Jiri Borovec Date: Fri, 7 Feb 2020 15:31:44 +0100 Subject: [PATCH 5/6] formatting --- .run_local_tests.sh | 12 ++++++------ .../multi_node_examples/multi_node_ddp_demo.py | 2 +- pytorch_lightning/__init__.py | 6 +++--- pytorch_lightning/core/__init__.py | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.run_local_tests.sh b/.run_local_tests.sh index 5e015941c2976..6e09ed1cc7364 100644 --- a/.run_local_tests.sh +++ b/.run_local_tests.sh @@ -2,11 +2,11 @@ # use this to run tests rm -rf _ckpt_* -rm -rf tests/save_dir* -rm -rf tests/mlruns_* -rm -rf tests/cometruns* -rm -rf tests/wandb* -rm -rf tests/tests/* -rm -rf lightning_logs +rm -rf ./tests/save_dir* +rm -rf ./tests/mlruns_* +rm -rf ./tests/cometruns* +rm -rf ./tests/wandb* +rm -rf ./tests/tests/* +rm -rf ./lightning_logs coverage run --source pytorch_lightning -m py.test pytorch_lightning tests pl_examples -v --doctest-modules coverage report -m diff --git a/pl_examples/multi_node_examples/multi_node_ddp_demo.py b/pl_examples/multi_node_examples/multi_node_ddp_demo.py index 18939fb96d156..fb28c06039b10 100644 --- a/pl_examples/multi_node_examples/multi_node_ddp_demo.py +++ b/pl_examples/multi_node_examples/multi_node_ddp_demo.py @@ -29,7 +29,7 @@ def main(hparams): # ------------------------ # 2 INIT TRAINER # ------------------------ - trainer = pt.Trainer( + trainer = pl.Trainer( gpus=2, num_nodes=2, distributed_backend='ddp' diff --git a/pytorch_lightning/__init__.py b/pytorch_lightning/__init__.py index 7d06e2b4fa15e..aab6dd6137e1d 100644 --- a/pytorch_lightning/__init__.py +++ b/pytorch_lightning/__init__.py @@ -27,12 +27,12 @@ import logging as log log.basicConfig(level=log.INFO) - from .core import * - from .trainer import * + from .core import data_loader, LightningModule + from .trainer import Trainer __all__ = [ 'Trainer', 'LightningModule', 'data_loader', ] - __call__ = __all__ + # __call__ = __all__ diff --git a/pytorch_lightning/core/__init__.py b/pytorch_lightning/core/__init__.py index 2a9f39dcc6436..a2fb7110ec8cd 100644 --- a/pytorch_lightning/core/__init__.py +++ b/pytorch_lightning/core/__init__.py @@ -101,4 +101,4 @@ def test_dataloader(self): from .lightning import LightningModule __all__ = ['LightningModule', 'data_loader'] -__call__ = __all__ \ No newline at end of file +# __call__ = __all__ From 21dc4ad9f9faafa9e63e29292a374b6601b3e3be Mon Sep 17 00:00:00 2001 From: Jiri Borovec Date: Fri, 7 Feb 2020 23:02:00 +0100 Subject: [PATCH 6/6] fix setup --- setup.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 469387da3be6c..1195ada907f85 100755 --- a/setup.py +++ b/setup.py @@ -5,8 +5,10 @@ # Always prefer setuptools over distutils from setuptools import setup, find_packages -import builtins -# import __builtin__ as builtins +try: + import builtins +except ImportError: + import __builtin__ as builtins # https://packaging.python.org/guides/single-sourcing-package-version/ # http://blog.ionelmc.ro/2014/05/25/python-packaging/