Skip to content
This repository was archived by the owner on Aug 1, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions tests/test_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ class NNModuleTests(torchdynamo.testing.TestCase):
test_iseval2 = make_test(IsEvalCheck())
test_viamodulecall = make_test(ViaModuleCall())
test_isnonelayer = make_test(IsNoneLayer())
test_intarg = make_test(IntArg())

# not yet implemented
# test_layerlist = make_test(LayerList())
Expand Down
9 changes: 9 additions & 0 deletions torchdynamo/symbolic_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import functools
import inspect
import itertools
from numbers import Real
import operator
import types
import typing
Expand All @@ -23,6 +24,7 @@
from .guards import GuardSource
from .variable_tracker import AllowedFunctionOrModuleVariable, PythonModuleVariable
from .variable_tracker import BaseListVariable
from .variable_tracker import BasicTypeVariable
from .variable_tracker import BuiltinVariable
from .variable_tracker import ConstDictVariable
from .variable_tracker import ConstantVariable
Expand Down Expand Up @@ -184,6 +186,13 @@ def wrap_local(self, name, value):
value=value,
guards={Guard(name, GuardSource.LOCAL, GuardBuilder.VALUE_MATCH)},
)
elif isinstance(value, Real):
self.graphargs.append(LocalArg(name))
return BasicTypeVariable(
proxy=self.create_graph_input(name),
state=TracingSupported.UNKNOWN,
guards={Guard(name, GuardSource.LOCAL, GuardBuilder.TYPE_MATCH)},
)
elif type(value) in (tuple, list) and all(
isinstance(x, torch.Tensor) for x in value
):
Expand Down
9 changes: 9 additions & 0 deletions torchdynamo/variable_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,15 @@ def as_proxy(self):
return self.proxy


class BasicTypeVariable(TensorVariable):
"""
Points to a simple type, e.g. int, float, str. So far, we treat this
the same as TensorVariable
"""

pass


class NNModuleVariable(VariableTracker):
def __init__(self, module_key: str, **kwargs):
super(NNModuleVariable, self).__init__(**kwargs)
Expand Down