diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index 7116e9c6..e3a8536e 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -31,7 +31,7 @@ jobs: run: | # Limit to coverage of source files in current directory coverage run -m examples.examples --source=. - coverage xml + coverage xml --omit "*_remote_module_non_scriptable.py" - name: Upload coverage to Codecov uses: codecov/codecov-action@v1.2.0 with: diff --git a/setup.py b/setup.py index cbc2a12a..5a2686c6 100644 --- a/setup.py +++ b/setup.py @@ -41,7 +41,7 @@ "numpy", "scipy", "torch>=1.8", - "pytorch-lightning", + "pytorch-lightning>=1.4", # pypi release (only master) doesn't support OrderedDict typing # "typing-extensions", ], diff --git a/torchsynth/profile.py b/torchsynth/profile.py index fb60642f..55c2cbd3 100644 --- a/torchsynth/profile.py +++ b/torchsynth/profile.py @@ -110,7 +110,7 @@ def run_lightning_module( # Run module with profiling pr = cProfile.Profile() pr.enable() - trainer.test(module, test_dataloaders=dataloader) + trainer.test(module, dataloaders=dataloader) pr.disable() s = io.StringIO() @@ -132,7 +132,7 @@ def run_lightning_module( print(s.getvalue()) else: - trainer.test(module, test_dataloaders=dataloader) + trainer.test(module, dataloaders=dataloader) def main(): diff --git a/torchsynth/synth.py b/torchsynth/synth.py index ae737e7d..dd32bffe 100644 --- a/torchsynth/synth.py +++ b/torchsynth/synth.py @@ -239,9 +239,9 @@ def _batch_idx_to_is_train( Determine which samples are training examples if batch_idx is provided """ if batch_idx is not None: - idxs = torch.range( + idxs = torch.arange( self.batch_size * batch_idx, - self.batch_size * (batch_idx + 1) - 1, + self.batch_size * (batch_idx + 1), device=self.device, ) assert len(idxs) == self.batch_size