Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[functorch] vmap-dynamo: run vmap_impl under fake_mode #107462

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
38 changes: 38 additions & 0 deletions test/dynamo/test_higher_order_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -2651,6 +2651,44 @@ def wrapper_fn(x, out_dims):
{"torch.func.vmap: out_dims is not an int or tuple variable.": 2},
)

def test_vmap_new_tensor_in_body(self):
def fn(x):
return x + torch.ones(3)

def wrapper_fn(x):
return torch.func.vmap(fn)(x)

x = torch.randn(
3,
)
opt = torch.compile(wrapper_fn, backend="eager", fullgraph=True)
expected = wrapper_fn(x)
actual = opt(x)
self.assertEqual(expected, actual)

def test_vmap_new_tensor_unused_in_body(self):
def fn(x):
return torch.tensor(0.5)

def wrapper_fn(x):
return torch.func.vmap(fn)(x)

x = torch.randn(3)
opt = torch.compile(wrapper_fn, backend="eager", fullgraph=True)
expected = wrapper_fn(x)
actual = opt(x)
self.assertEqual(expected, actual)

def test_vmap_new_tensor_implicit_via_op(self):
def wrapper_fn(x):
return torch.func.vmap(lambda t: torch.add(t, 0.5))(x)

x = torch.randn(3)
opt = torch.compile(wrapper_fn, backend="eager", fullgraph=True)
expected = wrapper_fn(x)
actual = opt(x)
self.assertEqual(expected, actual)


class ActivationCheckpointingTests(torch._dynamo.test_case.TestCase):
def _validate(self, fn, backend, *args, skip_check=False, fullgraph=True):
Expand Down
20 changes: 12 additions & 8 deletions torch/_dynamo/variables/higher_order_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,14 +886,18 @@ def call_function(
pytree.tree_map(lambda x: x.value, updated_in_dims.items)
)

example_value = torch._functorch.vmap.vmap_impl(
torch.fx.GraphModule(tx.output.nn_modules, body_graph),
actual_in_dims,
out_dims.as_python_constant(),
randomness.value,
chunk_size.value,
*fake_batched_fn_args,
)
# NOTE: `body_graph` might have operators which
# will create new tensors. So it is required
# that we run `vmap` under FakeMode.
with tx.fake_mode:
example_value = torch._functorch.vmap.vmap_impl(
torch.fx.GraphModule(tx.output.nn_modules, body_graph),
actual_in_dims,
out_dims.as_python_constant(),
randomness.value,
chunk_size.value,
*fake_batched_fn_args,
)

# proxy corresponds to `call = vmap_proxy(*batched_fn_args, **batched_fn_kwargs)`
proxy = vmap_proxy(*proxy_batched_fn_args)
Expand Down