Skip to content

Commit

Permalink
Remove unused forward AD flag (#57058)
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: #57058

Test Plan: Imported from OSS

Reviewed By: soulitzer

Differential Revision: D28071504

Pulled By: albanD

fbshipit-source-id: df694ac6b9fbb4aed269d61cd9522f8602fdae0c
  • Loading branch information
albanD authored and facebook-github-bot committed Apr 30, 2021
1 parent 83f1867 commit 95dc2b6
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 40 deletions.
3 changes: 0 additions & 3 deletions test/test_autograd.py
Expand Up @@ -21,9 +21,6 @@
# Autograd tests use double as the default dtype
torch.set_default_dtype(torch.double)

# TODO(alband) Remove this when this flag is not needed anymore
torch._C._set_forward_AD_enabled(True)

from torch import nn
from torch._six import inf, nan
from torch.autograd.function import once_differentiable
Expand Down
10 changes: 0 additions & 10 deletions torch/csrc/autograd/forward_grad.cpp
Expand Up @@ -77,14 +77,4 @@ const at::Tensor& ForwardGrad::undef_grad() {
return singleton_undefined_tensor;
}

// Temporary functions to disable forward AD
// TODO(alband) remove these when perf issues are solved
bool isForwardADEnabled() {
return is_forward_grad_enabled;
}

void setForwardADEnabled(bool value) {
is_forward_grad_enabled = value;
}

}} // namespace torch::autograd
5 changes: 0 additions & 5 deletions torch/csrc/autograd/forward_grad.h
Expand Up @@ -186,9 +186,4 @@ struct TORCH_API ForwardGrad : std::enable_shared_from_this<ForwardGrad> {

};

// Temporary functions to disable forward AD
// TODO(alband) remove these when perf issues are solved
bool TORCH_API isForwardADEnabled();
void TORCH_API setForwardADEnabled(bool value);

}} // namespace torch::autograd
22 changes: 0 additions & 22 deletions torch/csrc/autograd/init.cpp
Expand Up @@ -262,26 +262,6 @@ static PyObject * autocast_decrement_nesting(PyObject* _unused, PyObject *arg) {
END_HANDLE_TH_ERRORS
}

static PyObject * set_forward_AD_enabled(PyObject* _unused, PyObject *arg) {
HANDLE_TH_ERRORS
if (!PyBool_Check(arg)) {
throw TypeError("enabled must be a bool (got %s)", Py_TYPE(arg)->tp_name);
}
setForwardADEnabled(arg == Py_True);
Py_RETURN_NONE;
END_HANDLE_TH_ERRORS
}

static PyObject * is_forward_AD_enabled(PyObject* _unused, PyObject *arg) {
HANDLE_TH_ERRORS
if (isForwardADEnabled()) {
Py_RETURN_TRUE;
} else {
Py_RETURN_FALSE;
}
END_HANDLE_TH_ERRORS
}

static PyObject * set_grad_enabled(PyObject* _unused, PyObject *arg) {
HANDLE_TH_ERRORS
if (!PyBool_Check(arg)) {
Expand Down Expand Up @@ -348,8 +328,6 @@ static PyObject * python_exit_dual_level(PyObject* _unused, PyObject* args, PyOb
static PyMethodDef methods[] = { // NOLINT
{"_set_grad_enabled", set_grad_enabled, METH_O, nullptr},
{"is_grad_enabled", is_grad_enabled, METH_NOARGS, nullptr},
{"_set_forward_AD_enabled", set_forward_AD_enabled, METH_O, nullptr},
{"_is_forward_AD_enabled", is_forward_AD_enabled, METH_NOARGS, nullptr},
{"set_autocast_enabled", set_autocast_enabled, METH_O, nullptr},
{"is_autocast_enabled", is_autocast_enabled, METH_NOARGS, nullptr},
{"clear_autocast_cache", clear_autocast_cache, METH_NOARGS, nullptr},
Expand Down

0 comments on commit 95dc2b6

Please sign in to comment.