Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
graft docs

include COPYING
include AUTHORS

recursive-include src/einsteinpy/tests *.py *.html

prune docs/source/examples/.ipynb_checkpoints
global-exclude *.py[cod] __pycache__ *.so *.dylib
Empty file removed demo/__init__.py
Empty file.
6 changes: 3 additions & 3 deletions docs/source/examples/basic_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
import sys

from test_tube import HyperOptArgumentParser, Experiment
from pytorch_lightning.models.trainer import Trainer
from pytorch_lightning.utils.arg_parse import add_default_args
from pytorch_lightning.utils.pt_callbacks import EarlyStopping, ModelCheckpoint
from pytorch-lightning.models.trainer import Trainer
from pytorch-lightning.utils.arg_parse import add_default_args
from pytorch-lightning.utils.pt_callbacks import EarlyStopping, ModelCheckpoint
from demo.example_model import ExampleModel


Expand Down
2 changes: 1 addition & 1 deletion docs/source/examples/example_model.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import torch.nn as nn
import numpy as np
from pytorch_lightning.root_module.root_module import RootModule
from pytorch-lightning.root_module.root_module import RootModule
from test_tube import HyperOptArgumentParser
from torchvision.datasets import MNIST
import torchvision.transforms as transforms
Expand Down
9 changes: 4 additions & 5 deletions docs/source/examples/fully_featured_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
import torch

from test_tube import HyperOptArgumentParser, Experiment, SlurmCluster
from pytorch_lightning.models.trainer import Trainer
from pytorch_lightning.utils.arg_parse import add_default_args
from pytorch-lightning.models.trainer import Trainer
from pytorch-lightning.utils.arg_parse import add_default_args

from pytorch_lightning.utils.pt_callbacks import EarlyStopping, ModelCheckpoint
from pytorch-lightning.utils.pt_callbacks import EarlyStopping, ModelCheckpoint

SEED = 2334
torch.manual_seed(SEED)
Expand All @@ -27,7 +27,7 @@

"""
Allows training by using command line arguments
Run by:
Run by:
# TYPE YOUR RUN COMMAND HERE
"""

Expand Down Expand Up @@ -199,4 +199,3 @@ def optimize_on_cluster(hyperparams):
nb_trials=hyperparams.nb_hopt_trials,
nb_workers=len(gpu_ids)
)

5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[build-system]
requires = [
"setuptools",
"wheel",
]
21 changes: 21 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[tool:pytest]
norecursedirs =
.git
dist
build
python_files =
test_*.py
doctest_plus = disabled
addopts = --strict
markers =
slow
remote_data
filterwarnings

[pycodestyle]
ignore = E731,W504
max-line-length = 120

[flake8]
ignore = E731,W504,F401,F841
max-line-length = 120
1 change: 0 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
},
packages=find_packages("src"),
package_dir={"": "src"},
entry_points={"console_scripts": ["pytorch-lightning=pytorch-lightning.cli:main"]},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Education",
Expand Down