Skip to content

Commit

Permalink
Update on "Remove incorrect THP{Cpp,}Function_traverse PyObject trave…
Browse files Browse the repository at this point in the history
…rsals"


Fixes #102174


[ghstack-poisoned]
  • Loading branch information
soulitzer committed Jun 2, 2023
1 parent 897ee67 commit 84a5842
Showing 1 changed file with 23 additions and 24 deletions.
47 changes: 23 additions & 24 deletions torch/csrc/autograd/python_cpp_function.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,38 +76,37 @@ PyObject* THPCppFunction_call(
}

int THPCppFunction_traverse(PyObject* self, visitproc visit, void* arg) {
auto& fn = *((THPCppFunction*)self)->cdata;
if ((((THPCppFunction*)self)->cdata).use_count() > 1) {
if ((((THPCppFunction*)self)->cdata).use_count() == 1) {
// The fields traversed below are owned by the cpp grad_fn, which we own a
// reference to. We should only them traverse however if we are the only
// owner of the grad_fn, otherwise we risk prematurely gc'ing the grad_fn.
//
// See: https://github.com/pytorch/pytorch/issues/102174
return 0;
}
for (const auto& hook : fn.tensor_pre_hooks()) {
if (auto pyhook = dynamic_cast<PyFunctionTensorPreHook*>(hook.get())) {
Py_VISIT(pyhook->dict);
auto& fn = *((THPCppFunction*)self)->cdata;
for (const auto& hook : fn.tensor_pre_hooks()) {
if (auto pyhook = dynamic_cast<PyFunctionTensorPreHook*>(hook.get())) {
Py_VISIT(pyhook->dict);
}
}
}
// NOTE [retains_grad_hook PyObject traversal]
// In theory this shouldn't be necessary, because retains_grad_hooks should
// not contain any PyFunctionTensorPreHooks. The alternative is to have a
// check that actually guarantees this.
for (const auto& pair : fn.retains_grad_hooks()) {
if (auto pyhook =
dynamic_cast<PyFunctionTensorPreHook*>(pair.second.get())) {
Py_VISIT(pyhook->dict);
// NOTE [retains_grad_hook PyObject traversal]
// In theory this shouldn't be necessary, because retains_grad_hooks should
// not contain any PyFunctionTensorPreHooks. The alternative is to have a
// check that actually guarantees this.
for (const auto& pair : fn.retains_grad_hooks()) {
if (auto pyhook =
dynamic_cast<PyFunctionTensorPreHook*>(pair.second.get())) {
Py_VISIT(pyhook->dict);
}
}
}
for (const auto& hook : fn.pre_hooks()) {
if (auto pyhook = dynamic_cast<PyFunctionPreHook*>(hook.get())) {
Py_VISIT(pyhook->dict);
for (const auto& hook : fn.pre_hooks()) {
if (auto pyhook = dynamic_cast<PyFunctionPreHook*>(hook.get())) {
Py_VISIT(pyhook->dict);
}
}
}
for (const auto& hook : fn.post_hooks()) {
if (auto pyhook = dynamic_cast<PyFunctionPostHook*>(hook.get())) {
Py_VISIT(pyhook->dict);
for (const auto& hook : fn.post_hooks()) {
if (auto pyhook = dynamic_cast<PyFunctionPostHook*>(hook.get())) {
Py_VISIT(pyhook->dict);
}
}
}
return 0;
Expand Down

0 comments on commit 84a5842

Please sign in to comment.