Skip to content

Commit

Permalink
Revert D24524219: Remove balance and devices parameter from Pipe.
Browse files Browse the repository at this point in the history
Test Plan: revert-hammer

Differential Revision:
D24524219 (8da7576)

Original commit changeset: 9973172c2bb7

fbshipit-source-id: b187c80270adb2a412e3882863a2d7de2a52ed56
  • Loading branch information
ngimel authored and facebook-github-bot committed Nov 13, 2020
1 parent 4f538a2 commit eb8331e
Show file tree
Hide file tree
Showing 9 changed files with 209 additions and 155 deletions.
6 changes: 2 additions & 4 deletions test/distributed/_pipeline/sync/skip/test_gpipe.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from torch.distributed._pipeline.sync import Pipe
from torch.distributed._pipeline.sync.skip import pop, skippable, stash
from torch.distributed._pipeline.sync.skip.portal import PortalBlue, PortalCopy, PortalOrange
from torch.testing._internal.distributed.pipeline.utils import convert_to_balance


@pytest.mark.skipif(not torch.cuda.is_available(), reason="cuda required")
Expand Down Expand Up @@ -53,8 +52,7 @@ def forward(self, input):
return output

model = nn.Sequential(Layer1(), Layer2(), Layer3())
model = convert_to_balance(model, balance)
model = Pipe(model, chunks=3, checkpoint=checkpoint)
model = Pipe(model, balance, chunks=3, checkpoint=checkpoint)

in_device = model.devices[0]
out_device = model.devices[-1]
Expand Down Expand Up @@ -83,7 +81,7 @@ def forward(self, input):
return input

model = nn.Sequential(Stash(), Pop())
model = Pipe(model, chunks=5)
model = Pipe(model, [1, 1], devices=["cpu", "cpu"], chunks=5)

input = torch.rand(10, requires_grad=True)
output = model(input)
Expand Down
2 changes: 1 addition & 1 deletion test/distributed/_pipeline/sync/skip/test_leak.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def forward(self, input):
return self.F.apply(input)

model = nn.Sequential(NoPortalTensorAtBackward(), stash_, pop_)
model = Pipe(model, chunks=2, checkpoint=checkpoint)
model = Pipe(model, balance=[2, 1], devices=["cpu", "cpu"], chunks=2, checkpoint=checkpoint)

input = torch.rand(10, requires_grad=True)

Expand Down
20 changes: 5 additions & 15 deletions test/distributed/_pipeline/sync/test_bugs.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def forward(self, input):
return Identity.apply(input)

model = nn.Sequential(M(), M())
model = Pipe(model, checkpoint="always")
model = Pipe(model, [1, 1], devices=["cpu", "cpu"], checkpoint="always")

x = torch.rand(42)
y = model(x)
Expand All @@ -62,7 +62,7 @@ def forward(self, x):
raise ExpectedException()

model = nn.Sequential(Pass(), Pass(), Raise())
model = Pipe(model, chunks=3)
model = Pipe(model, [1, 1, 1], devices=["cpu", "cpu", "cpu"], chunks=3)

with pytest.raises(ExpectedException):
model(torch.rand(3))
Expand All @@ -86,28 +86,18 @@ def backward(ctx, grad):
return grad

class Layer1(nn.Module):
def __init__(self):
super().__init__()
self.ones = nn.Parameter(torch.ones(32, 3, 32, 32, requires_grad=True))

def forward(self, pair):
a, b = pair
a = a * self.ones
return a * 1, b * 2, b * 3

class Layer2(nn.Module):
def __init__(self):
super().__init__()
self.ones = nn.Parameter(torch.ones(32, 3, 32, 32, requires_grad=True))

def forward(self, triple):
a, b, c = triple
a = a * self.ones
b = Sleep.apply(b)
return a + b + c

model = nn.Sequential(Layer1().cuda(0), Layer2().cuda(1))
model = Pipe(model, chunks=32, checkpoint="never")
model = nn.Sequential(Layer1(), Layer2())
model = Pipe(model, [1, 1], devices=[0, 1], chunks=32, checkpoint="never")

a = torch.rand(1024, 3, 32, 32, device=0, requires_grad=True)
b = torch.rand(1024, 3, 32, 32, device=0, requires_grad=True)
Expand All @@ -131,7 +121,7 @@ def forward(self, x):
model = nn.Sequential(Dropouts(), Dropouts())

x = torch.rand(10, 10, requires_grad=True)
model = Pipe(model, chunks=10, checkpoint="always")
model = Pipe(model, [1, 1], devices=["cpu", "cpu"], chunks=10, checkpoint="always")
y = model(x)
y.norm().backward()

Expand Down
2 changes: 1 addition & 1 deletion test/distributed/_pipeline/sync/test_inplace.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

def test_inplace_on_requires_grad():
model = nn.Sequential(nn.Linear(1, 1), nn.ReLU(inplace=True))
model = Pipe(model, checkpoint="always")
model = Pipe(model, [1, 1], devices=["cpu", "cpu"], checkpoint="always")

x = torch.rand(1)
y = model(x)
Expand Down

0 comments on commit eb8331e

Please sign in to comment.