Skip to content

Commit

Permalink
Make torch._dynamo lazy-importable (#104368)
Browse files Browse the repository at this point in the history
Use [PEP-562](https://peps.python.org/pep-0562) to import `_dynamo` and `_inductor` only when needed.

- Remove redundant imports from tests
- Add `test_lazy_imports_are_lazy` to make sure they will not get imported by accident

<!--
copilot:poem
-->
### <samp>🤖 Generated by Copilot at bae8e90</samp>

> _Sing, O Muse, of the daring deeds of PyTorch, the swift and fiery_
> _framework of deep learning, that with skill and cunning wrought_
> _many wonders of dynamic compilation, using the hidden powers_
> _of `_dynamo` and `_inductor`, the secret modules of LLVM and MLIR._

Pull Request resolved: #104368
Approved by: https://github.com/msaroufim, https://github.com/albanD
  • Loading branch information
malfet authored and pytorchmergebot committed Jun 29, 2023
1 parent d0a72ec commit fea6834
Show file tree
Hide file tree
Showing 11 changed files with 32 additions and 20 deletions.
1 change: 0 additions & 1 deletion test/dynamo/test_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import unittest

import torch
import torch._dynamo
from torch._dynamo.testing import CompileCounter


Expand Down
2 changes: 1 addition & 1 deletion test/inductor/test_cpp_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import unittest
from typing import NamedTuple

import torch._dynamo
import torch
from torch._inductor import config
from torch.testing._internal.common_utils import (
IS_MACOS,
Expand Down
1 change: 0 additions & 1 deletion test/inductor/test_cpu_repro.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import numpy as np
import sympy
import torch
import torch._dynamo
from torch._C import FileCheck
from torch._dynamo.testing import rand_strided
from torch._dynamo.utils import same
Expand Down
1 change: 0 additions & 1 deletion test/inductor/test_cuda_repro.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import unittest

import torch
import torch._dynamo
import torch._dynamo.config as dynamo_config
from torch import nn
from torch._dynamo.debug_utils import same_two_models
Expand Down
1 change: 0 additions & 1 deletion test/inductor/test_cudagraph_trees.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@

import torch

import torch._dynamo
import torch._dynamo.config as dynamo_config
import torch.nn as nn
from torch._inductor import config
Expand Down
1 change: 0 additions & 1 deletion test/inductor/test_minifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from unittest.mock import patch

import torch
import torch._dynamo
import torch._dynamo.config as dynamo_config
import torch._inductor.config as inductor_config
import torch._inductor.utils
Expand Down
2 changes: 1 addition & 1 deletion test/inductor/test_perf.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import functorch

import torch._dynamo
import torch
import torch._inductor.config as config
from torch._dynamo.backends.registry import register_backend
from torch._inductor import metrics
Expand Down
1 change: 0 additions & 1 deletion test/inductor/test_torchinductor.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import torch

import torch._dynamo
import torch._dynamo.config as dynamo_config
import torch.nn as nn
from torch._dispatch.python import enable_python_dispatcher
Expand Down
1 change: 0 additions & 1 deletion test/inductor/test_torchinductor_opinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

import torch

import torch._dynamo
from torch._dynamo.test_case import run_tests
from torch.testing._internal.common_device_type import (
instantiate_device_type_tests,
Expand Down
26 changes: 16 additions & 10 deletions test/test_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2139,6 +2139,15 @@ def test_two_things_subtest_expected_failure(self, device, x, y):


class TestImports(TestCase):
@classmethod
def _check_python_output(cls, program) -> str:
return subprocess.check_output(
[sys.executable, "-W", "all", "-c", program],
stderr=subprocess.STDOUT,
# On Windows, opening the subprocess with the default CWD makes `import torch`
# fail, so just set CWD to this script's directory
cwd=os.path.dirname(os.path.realpath(__file__)),).decode("utf-8")

def test_circular_dependencies(self) -> None:
""" Checks that all modules inside torch can be imported
Prevents regression reported in https://github.com/pytorch/pytorch/issues/77441 """
Expand Down Expand Up @@ -2187,14 +2196,14 @@ def test_circular_dependencies(self) -> None:
raise RuntimeError(f"Failed to import {mod_name}: {e}") from e
self.assertTrue(inspect.ismodule(mod))

@unittest.skipIf(IS_WINDOWS, "TODO enable on Windows")
def test_lazy_imports_are_lazy(self) -> None:
out = self._check_python_output("import sys;import torch;print(all(x not in sys.modules for x in torch._lazy_modules))")
self.assertEqual(out.strip(), "True")

@unittest.skipIf(IS_WINDOWS, "importing torch+CUDA on CPU results in warning")
def test_no_warning_on_import(self) -> None:
out = subprocess.check_output(
[sys.executable, "-W", "all", "-c", "import torch"],
stderr=subprocess.STDOUT,
# On Windows, opening the subprocess with the default CWD makes `import torch`
# fail, so just set CWD to this script's directory
cwd=os.path.dirname(os.path.realpath(__file__)),).decode("utf-8")
out = self._check_python_output("import torch")
self.assertEqual(out, "")

@unittest.skipIf(IS_WINDOWS, "importing torch+CUDA on CPU results in warning")
Expand All @@ -2212,10 +2221,7 @@ def test_no_mutate_global_logging_on_import(self, path) -> None:
'logging.root.setLevel(logging.INFO)',
f'_logger.info("{expected}")'
]
out = subprocess.check_output(
[sys.executable, "-W", "all", "-c", "; ".join(commands)],
stderr=subprocess.STDOUT,
).decode("utf-8")
out = self._check_python_output("; ".join(commands))
self.assertEqual(out.strip(), expected)

class TestOpInfos(TestCase):
Expand Down
15 changes: 14 additions & 1 deletion torch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1652,7 +1652,6 @@ def fn(model: Callable):
disable=disable)
return fn

import torch._dynamo
if mode is not None and options is not None:
raise RuntimeError("Either mode or options can be specified, but both can't be specified at the same time.")
if mode is None and options is None:
Expand Down Expand Up @@ -1743,12 +1742,26 @@ def registerOp(cls, op_key, full_schema, op_impl, dispatch_key):
"has_cudnn": torch.backends.cudnn.is_available,
"has_mkldnn": torch.backends.mkldnn.is_available,
}

_lazy_modules = {
"_dynamo",
"_inductor",
}

def __getattr__(name):
# Deprecated attrs
replacement = _deprecated_attrs.get(name)
if replacement is not None:
import warnings
warnings.warn(f"'{name}' is deprecated, please use '{replacement.__module__}.{replacement.__name__}()'", stacklevel=2)
return replacement()

# Lazy modules
if name in _lazy_modules:
import importlib
return importlib.import_module(f".{name}", __name__)

raise AttributeError(f"module '{__name__}' has no attribute '{name}'")

from . import _logging
_logging._init_logs()

0 comments on commit fea6834

Please sign in to comment.