Skip to content
This repository has been archived by the owner on Apr 19, 2023. It is now read-only.

Commit

Permalink
Disable multi-processing in all tests and enable torch 1.0 in travis
Browse files Browse the repository at this point in the history
  • Loading branch information
constantinpape committed Dec 20, 2018
1 parent 8e3eaff commit 3106760
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 4 deletions.
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@ python:
- 3.7

env:
# FIXME pytorch 1.0 multi-processing is broken ...
# - PYTORCH_CONDA="pytorch" TORCHVISION_CONDA="torchvision" TORCHVISION_CHANNEL=pytorch
- PYTORCH_CONDA="pytorch" TORCHVISION_CONDA="torchvision" TORCHVISION_CHANNEL=pytorch
- PYTORCH_CONDA="pytorch=0.4.1" TORCHVISION_CONDA="torchvision" TORCHVISION_CHANNEL=pytorch

install:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,9 @@ def get_random_dataloaders(self, input_channels=3):

# Build dataloaders from dataset
train_loader = DataLoader(train_dataset, batch_size=1,
shuffle=True, num_workers=1, pin_memory=False)
shuffle=True, num_workers=0, pin_memory=False)
test_loader = DataLoader(test_dataset, batch_size=1,
shuffle=True, num_workers=1, pin_memory=False)
shuffle=True, num_workers=0, pin_memory=False)
return train_loader, test_loader

def get_trainer(self, input_channels):
Expand Down

0 comments on commit 3106760

Please sign in to comment.