forked from wjakob/nanobind_example
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add minimal example that reproduces the nanobind leak error
- Loading branch information
Showing
3 changed files
with
42 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
from .nanobind_example_ext import add | ||
from .nanobind_example_ext import add, Foo, make_foo |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
import torch | ||
import math | ||
from nanobind_example import * | ||
|
||
# example adapted from https://pytorch.org/tutorials/beginner/examples_autograd/two_layer_net_custom_function.html | ||
class LegendrePolynomial3(torch.autograd.Function): | ||
@staticmethod | ||
def forward(ctx, input, foo): | ||
ctx.foo = foo | ||
ctx.save_for_backward(input) | ||
return 0.5 * (5 * input ** 3 - 3 * input) | ||
|
||
@staticmethod | ||
def backward(ctx, grad_output): | ||
input, = ctx.saved_tensors | ||
return grad_output * 1.5 * (5 * input ** 2 - 1), None | ||
|
||
input_tensor = torch.rand(3,3) | ||
input_tensor.requires_grad_() | ||
foo = make_foo(1, 0.5) | ||
|
||
output = LegendrePolynomial3.apply(input_tensor, foo) | ||
grad_output = torch.rand_like(output) | ||
output.backward(grad_output) |
8629419
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
test_autograd.py
is a minimal reproducible example for the nanobind leak error.My guess is that passing reference to ctx messed up the reference counts from nanobind, as the error went away when removing the line
ctx.foo = foo
.