Skip to content

Commit f656882

Browse files
authored
1 parent b9364f9 commit f656882

File tree

16 files changed

+56
-56
lines changed

16 files changed

+56
-56
lines changed

.github/workflows/docs-check.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ jobs:
1111
- uses: actions/checkout@v2
1212
- uses: ammaraskar/sphinx-action@master
1313
with:
14-
# git is requried to clone the docs theme
14+
# git is required to clone the docs theme
1515
pre-build-command: "apt-get update -y && apt-get install -y git"
1616
docs-folder: "docs/"
1717
repo-token: "${{ secrets.GITHUB_TOKEN }}"

docs/source/lr_finder.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ To reduce the amount of guesswork concerning choosing a good initial learning
1515
rate, a `learning rate finder` can be used. As described in this `paper <https://arxiv.org/abs/1506.01186>`_
1616
a learning rate finder does a small run where the learning rate is increased
1717
after each processed batch and the corresponding loss is logged. The result of
18-
this is a `lr` vs. `loss` plot that can be used as guidence for choosing a optimal
18+
this is a `lr` vs. `loss` plot that can be used as guidance for choosing a optimal
1919
initial lr.
2020

2121
.. warning:: For the moment, this feature only works with models having a single optimizer.

pl_examples/domain_templates/reinforce_learn_Qnet.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ def dqn_mse_loss(self, batch: Tuple[torch.Tensor, torch.Tensor]) -> torch.Tensor
257257
def training_step(self, batch: Tuple[torch.Tensor, torch.Tensor], nb_batch) -> OrderedDict:
258258
"""
259259
Carries out a single step through the environment to update the replay buffer.
260-
Then calculates loss based on the minibatch recieved
260+
Then calculates loss based on the minibatch received
261261
262262
Args:
263263
batch: current mini batch of replay data

pytorch_lightning/core/lightning.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -875,7 +875,7 @@ def configure_ddp(self, model, device_ids):
875875

876876
def _init_slurm_connection(self) -> None:
877877
"""
878-
Sets up environemnt variables necessary for pytorch distributed communications
878+
Sets up environment variables necessary for pytorch distributed communications
879879
based on slurm environment.
880880
"""
881881
# use slurm job id for the port number

pytorch_lightning/loggers/neptune.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ class NeptuneLogger(LightningLoggerBase):
3131
pip install neptune-client
3232
3333
The Neptune logger can be used in the online mode or offline (silent) mode.
34-
To log experiment data in online mode, :class:`NeptuneLogger` requries an API key.
34+
To log experiment data in online mode, :class:`NeptuneLogger` requires an API key.
3535
In offline mode, Neptune will log to a local directory.
3636
3737
**ONLINE MODE**

pytorch_lightning/trainer/callback_config.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def configure_checkpoint_callback(self):
6161
ckpt_path = os.path.join(self.default_root_dir, "checkpoints")
6262

6363
# when no val step is defined, use 'loss' otherwise 'val_loss'
64-
train_step_only = not self.is_overriden('validation_step')
64+
train_step_only = not self.is_overridden('validation_step')
6565
monitor_key = 'loss' if train_step_only else 'val_loss'
6666

6767
if self.checkpoint_callback is True:

pytorch_lightning/trainer/data_loading.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ class TrainerDataLoadingMixin(ABC):
7878
replace_sampler_ddp: bool
7979

8080
@abstractmethod
81-
def is_overriden(self, *args):
81+
def is_overridden(self, *args):
8282
"""Warning: this is just empty shell for code implemented in other class."""
8383

8484
def _percent_range_check(self, name: str) -> None:
@@ -251,7 +251,7 @@ def reset_val_dataloader(self, model: LightningModule) -> None:
251251
Args:
252252
model: The current `LightningModule`
253253
"""
254-
if self.is_overriden('validation_step'):
254+
if self.is_overridden('validation_step'):
255255
self.num_val_batches, self.val_dataloaders = \
256256
self._reset_eval_dataloader(model, 'val')
257257

@@ -261,7 +261,7 @@ def reset_test_dataloader(self, model) -> None:
261261
Args:
262262
model: The current `LightningModule`
263263
"""
264-
if self.is_overriden('test_step'):
264+
if self.is_overridden('test_step'):
265265
self.num_test_batches, self.test_dataloaders =\
266266
self._reset_eval_dataloader(model, 'test')
267267

pytorch_lightning/trainer/evaluation_loop.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@ def get_model(self):
195195
"""Warning: this is just empty shell for code implemented in other class."""
196196

197197
@abstractmethod
198-
def is_overriden(self, *args):
198+
def is_overridden(self, *args):
199199
"""Warning: this is just empty shell for code implemented in other class."""
200200

201201
@abstractmethod
@@ -279,13 +279,13 @@ def _evaluate(self, model: LightningModule, dataloaders, max_batches: int, test_
279279

280280
# on dp / ddp2 might still want to do something with the batch parts
281281
if test_mode:
282-
if self.is_overriden('test_step_end'):
282+
if self.is_overridden('test_step_end'):
283283
model_ref = self.get_model()
284284
with self.profiler.profile('test_step_end'):
285285
output = model_ref.test_step_end(output)
286286
self.on_test_batch_end()
287287
else:
288-
if self.is_overriden('validation_step_end'):
288+
if self.is_overridden('validation_step_end'):
289289
model_ref = self.get_model()
290290
with self.profiler.profile('validation_step_end'):
291291
output = model_ref.validation_step_end(output)
@@ -307,23 +307,23 @@ def _evaluate(self, model: LightningModule, dataloaders, max_batches: int, test_
307307
model = model.module
308308

309309
if test_mode:
310-
if self.is_overriden('test_end', model=model):
310+
if self.is_overridden('test_end', model=model):
311311
# TODO: remove in v1.0.0
312312
eval_results = model.test_end(outputs)
313313
rank_zero_warn('Method `test_end` was deprecated in v0.7 and will be removed v1.0.'
314314
' Use `test_epoch_end` instead.', DeprecationWarning)
315315

316-
elif self.is_overriden('test_epoch_end', model=model):
316+
elif self.is_overridden('test_epoch_end', model=model):
317317
eval_results = model.test_epoch_end(outputs)
318318

319319
else:
320-
if self.is_overriden('validation_end', model=model):
320+
if self.is_overridden('validation_end', model=model):
321321
# TODO: remove in v1.0.0
322322
eval_results = model.validation_end(outputs)
323323
rank_zero_warn('Method `validation_end` was deprecated in v0.7 and will be removed v1.0.'
324324
' Use `validation_epoch_end` instead.', DeprecationWarning)
325325

326-
elif self.is_overriden('validation_epoch_end', model=model):
326+
elif self.is_overridden('validation_epoch_end', model=model):
327327
eval_results = model.validation_epoch_end(outputs)
328328

329329
# enable train mode again
@@ -336,7 +336,7 @@ def _evaluate(self, model: LightningModule, dataloaders, max_batches: int, test_
336336

337337
def run_evaluation(self, test_mode: bool = False):
338338
# when testing make sure user defined a test step
339-
if test_mode and not self.is_overriden('test_step'):
339+
if test_mode and not self.is_overridden('test_step'):
340340
raise MisconfigurationException(
341341
"You called `.test()` without defining model's `.test_step()`."
342342
" Please define and try again")

pytorch_lightning/trainer/lr_finder.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ class _LRFinder(object):
214214
215215
lr_min: lr to start search from
216216
217-
lr_max: lr to stop seach
217+
lr_max: lr to stop search
218218
219219
num_training: number of steps to take between lr_min and lr_max
220220

pytorch_lightning/trainer/model_hooks.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ def is_function_implemented(self, f_name):
1111
f_op = getattr(model, f_name, None)
1212
return callable(f_op)
1313

14-
def is_overriden(self, method_name: str, model: LightningModule = None) -> bool:
14+
def is_overridden(self, method_name: str, model: LightningModule = None) -> bool:
1515
if model is None:
1616
model = self.get_model()
1717
super_object = LightningModule
@@ -30,10 +30,10 @@ def is_overriden(self, method_name: str, model: LightningModule = None) -> bool:
3030
# cannot pickle __code__ so cannot verify if PatchDataloader
3131
# exists which shows dataloader methods have been overwritten.
3232
# so, we hack it by using the string representation
33-
is_overriden = instance_attr.patch_loader_code != str(super_attr.__code__)
33+
is_overridden = instance_attr.patch_loader_code != str(super_attr.__code__)
3434
else:
35-
is_overriden = instance_attr.__code__ is not super_attr.__code__
36-
return is_overriden
35+
is_overridden = instance_attr.__code__ is not super_attr.__code__
36+
return is_overridden
3737

3838
def has_arg(self, f_name, arg_name):
3939
model = self.get_model()

pytorch_lightning/trainer/optimizers.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def init_optimizers(
8181
' * multiple outputs, dictionaries as described with an optional `frequency` key (int)')
8282

8383
def configure_schedulers(self, schedulers: list):
84-
# Convert each scheduler into dict sturcture with relevant information
84+
# Convert each scheduler into dict structure with relevant information
8585
lr_schedulers = []
8686
default_config = {'interval': 'epoch', # default every epoch
8787
'frequency': 1, # default every epoch/batch

pytorch_lightning/trainer/trainer.py

+14-14
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ def __init__(
193193
show_progress_bar:
194194
.. warning:: .. deprecated:: 0.7.2
195195
196-
Set `progress_bar_refresh_rate` to postive integer to enable. Will remove 0.9.0.
196+
Set `progress_bar_refresh_rate` to positive integer to enable. Will remove 0.9.0.
197197
198198
progress_bar_refresh_rate: How often to refresh progress bar (in steps). Value ``0`` disables progress bar.
199199
Ignored when a custom callback is passed to :paramref:`~Trainer.callbacks`.
@@ -893,7 +893,7 @@ def run_pretrain_routine(self, model: LightningModule):
893893
return
894894

895895
# check if we should run validation during training
896-
self.disable_validation = not (self.is_overriden('validation_step') and self.val_percent_check > 0) \
896+
self.disable_validation = not (self.is_overridden('validation_step') and self.val_percent_check > 0) \
897897
and not self.fast_dev_run
898898

899899
# run tiny validation (if validation defined)
@@ -994,54 +994,54 @@ def check_model_configuration(self, model: LightningModule):
994994
995995
"""
996996
# Check training_step, train_dataloader, configure_optimizer methods
997-
if not self.is_overriden('training_step', model):
997+
if not self.is_overridden('training_step', model):
998998
raise MisconfigurationException(
999999
'No `training_step()` method defined. Lightning `Trainer` expects as minimum a'
10001000
' `training_step()`, `training_dataloader()` and `configure_optimizers()` to be defined.')
10011001

1002-
if not self.is_overriden('train_dataloader', model):
1002+
if not self.is_overridden('train_dataloader', model):
10031003
raise MisconfigurationException(
10041004
'No `train_dataloader()` method defined. Lightning `Trainer` expects as minimum a'
10051005
' `training_step()`, `training_dataloader()` and `configure_optimizers()` to be defined.')
10061006

1007-
if not self.is_overriden('configure_optimizers', model):
1007+
if not self.is_overridden('configure_optimizers', model):
10081008
raise MisconfigurationException(
10091009
'No `configure_optimizers()` method defined. Lightning `Trainer` expects as minimum a'
10101010
' `training_step()`, `training_dataloader()` and `configure_optimizers()` to be defined.')
10111011

10121012
# Check val_dataloader, validation_step and validation_epoch_end
1013-
if self.is_overriden('val_dataloader', model):
1014-
if not self.is_overriden('validation_step', model):
1013+
if self.is_overridden('val_dataloader', model):
1014+
if not self.is_overridden('validation_step', model):
10151015
raise MisconfigurationException('You have passed in a `val_dataloader()`'
10161016
' but have not defined `validation_step()`.')
10171017
else:
1018-
if not self.is_overriden('validation_epoch_end', model):
1018+
if not self.is_overridden('validation_epoch_end', model):
10191019
rank_zero_warn(
10201020
'You have defined a `val_dataloader()` and have defined a `validation_step()`,'
10211021
' you may also want to define `validation_epoch_end()` for accumulating stats.',
10221022
RuntimeWarning
10231023
)
10241024
else:
1025-
if self.is_overriden('validation_step', model):
1025+
if self.is_overridden('validation_step', model):
10261026
raise MisconfigurationException('You have defined `validation_step()`,'
10271027
' but have not passed in a val_dataloader().')
10281028

10291029
# Check test_dataloader, test_step and test_epoch_end
1030-
if self.is_overriden('test_dataloader', model):
1031-
if not self.is_overriden('test_step', model):
1030+
if self.is_overridden('test_dataloader', model):
1031+
if not self.is_overridden('test_step', model):
10321032
raise MisconfigurationException('You have passed in a `test_dataloader()`'
10331033
' but have not defined `test_step()`.')
10341034
else:
1035-
if not self.is_overriden('test_epoch_end', model):
1035+
if not self.is_overridden('test_epoch_end', model):
10361036
rank_zero_warn(
10371037
'You have defined a `test_dataloader()` and have defined a `test_step()`, you may also want to'
10381038
' define `test_epoch_end()` for accumulating stats.', RuntimeWarning
10391039
)
10401040

10411041
def check_testing_model_configuration(self, model: LightningModule):
10421042

1043-
has_test_step = self.is_overriden('test_step', model)
1044-
has_test_epoch_end = self.is_overriden('test_epoch_end', model)
1043+
has_test_step = self.is_overridden('test_step', model)
1044+
has_test_epoch_end = self.is_overridden('test_epoch_end', model)
10451045
gave_test_loader = hasattr(model, 'test_dataloader') and model.test_dataloader()
10461046

10471047
if gave_test_loader and not has_test_step:

pytorch_lightning/trainer/training_loop.py

+9-9
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ def detect_nan_tensors(self, *args):
271271
"""Warning: this is just empty shell for code implemented in other class."""
272272

273273
@abstractmethod
274-
def is_overriden(self, *args):
274+
def is_overridden(self, *args):
275275
"""Warning: this is just empty shell for code implemented in other class."""
276276

277277
@abstractmethod
@@ -419,9 +419,9 @@ def run_training_epoch(self):
419419
_outputs = self.run_training_batch(batch, batch_idx)
420420
batch_result, grad_norm_dic, batch_step_metrics, batch_output = _outputs
421421

422-
# only track outputs when user implementes training_epoch_end
423-
# otherwise we will build up unecessary memory
424-
if self.is_overriden('training_epoch_end', model=self.get_model()):
422+
# only track outputs when user implements training_epoch_end
423+
# otherwise we will build up unnecessary memory
424+
if self.is_overridden('training_epoch_end', model=self.get_model()):
425425
outputs.append(batch_output)
426426

427427
# when returning -1 from train_step, we end epoch early
@@ -484,7 +484,7 @@ def run_training_epoch(self):
484484

485485
# process epoch outputs
486486
model = self.get_model()
487-
if self.is_overriden('training_epoch_end', model=model):
487+
if self.is_overridden('training_epoch_end', model=model):
488488
epoch_output = model.training_epoch_end(outputs)
489489
_processed_outputs = self.process_output(epoch_output)
490490
log_epoch_metrics = _processed_outputs[2]
@@ -493,7 +493,7 @@ def run_training_epoch(self):
493493
self.callback_metrics.update(callback_epoch_metrics)
494494

495495
# when no val loop is present or fast-dev-run still need to call checkpoints
496-
if not self.is_overriden('validation_step') and not (self.fast_dev_run or should_check_val):
496+
if not self.is_overridden('validation_step') and not (self.fast_dev_run or should_check_val):
497497
self.call_checkpoint_callback()
498498
self.call_early_stop_callback()
499499

@@ -539,7 +539,7 @@ def run_training_batch(self, batch, batch_idx):
539539
self.split_idx = split_idx
540540

541541
for opt_idx, optimizer in self._get_optimizers_iterable():
542-
# make sure only the gradients of the current optimizer's paramaters are calculated
542+
# make sure only the gradients of the current optimizer's parameters are calculated
543543
# in the training step to prevent dangling gradients in multiple-optimizer setup.
544544
if len(self.optimizers) > 1:
545545
for param in self.get_model().parameters():
@@ -737,14 +737,14 @@ def training_forward(self, batch, batch_idx, opt_idx, hiddens):
737737

738738
# allow any mode to define training_step_end
739739
# do something will all the dp outputs (like softmax)
740-
if self.is_overriden('training_step_end'):
740+
if self.is_overridden('training_step_end'):
741741
model_ref = self.get_model()
742742
with self.profiler.profile('training_step_end'):
743743
output = model_ref.training_step_end(output)
744744

745745
# allow any mode to define training_end
746746
# TODO: remove in 1.0.0
747-
if self.is_overriden('training_end'):
747+
if self.is_overridden('training_end'):
748748
model_ref = self.get_model()
749749
with self.profiler.profile('training_end'):
750750
output = model_ref.training_end(output)

setup.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def load_requirements(path_dir=PATH_ROOT, comment_char='#'):
3232
return reqs
3333

3434

35-
def load_long_describtion():
35+
def load_long_description():
3636
# https://github.com/PyTorchLightning/pytorch-lightning/raw/master/docs/source/_images/lightning_module/pt_to_pl.png
3737
url = os.path.join(pytorch_lightning.__homepage__, 'raw', pytorch_lightning.__version__, 'docs')
3838
text = open('README.md', encoding='utf-8').read()
@@ -59,7 +59,7 @@ def load_long_describtion():
5959
license=pytorch_lightning.__license__,
6060
packages=find_packages(exclude=['tests', 'tests/*', 'benchmarks']),
6161

62-
long_description=load_long_describtion(),
62+
long_description=load_long_description(),
6363
long_description_content_type='text/markdown',
6464
include_package_data=True,
6565
zip_safe=False,

tests/models/test_gpu.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -273,13 +273,13 @@ def test_parse_gpu_fail_on_unsupported_inputs(mocked_device_count, gpus):
273273

274274
@pytest.mark.gpus_param_tests
275275
@pytest.mark.parametrize("gpus", [[1, 2, 19], -1, '-1'])
276-
def test_parse_gpu_fail_on_non_existant_id(mocked_device_count_0, gpus):
276+
def test_parse_gpu_fail_on_non_existent_id(mocked_device_count_0, gpus):
277277
with pytest.raises(MisconfigurationException):
278278
parse_gpu_ids(gpus)
279279

280280

281281
@pytest.mark.gpus_param_tests
282-
def test_parse_gpu_fail_on_non_existant_id_2(mocked_device_count):
282+
def test_parse_gpu_fail_on_non_existent_id_2(mocked_device_count):
283283
with pytest.raises(MisconfigurationException):
284284
parse_gpu_ids([1, 2, 19])
285285

0 commit comments

Comments
 (0)