Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 32 additions & 4 deletions torchrec/distributed/tests/test_pt2.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,37 @@ def get(self) -> int:
def set(self, val):
self.counter_ = val

@torch._library.register_fake_class("fbgemm::TensorQueue")
class FakeTensorQueue:
def __init__(self, queue, init_tensor):
self.queue = queue
self.init_tensor = init_tensor

@classmethod
def __obj_unflatten__(cls, flattened_ctx):
return cls(**dict(flattened_ctx))

def push(self, x):
self.queue.append(x)

def pop(self):
if len(self.queue) == 0:
return self.init_tensor
return self.queue.pop(0)

def top(self):
if len(self.queue) == 0:
return self.init_tensor
return self.queue[0]

def size(self):
return len(self.queue)

def tearDown(self):
torch._library.fake_class_registry.deregister_fake_class(
"fbgemm::AtomicCounter"
)
torch._library.fake_class_registry.deregister_fake_class("fbgemm::TensorQueue")
super().tearDown()

def _test_kjt_input_module(
Expand Down Expand Up @@ -517,7 +544,7 @@ def test_sharded_quant_ebc_non_strict_export(self) -> None:
{},
strict=False,
pre_dispatch=True,
).run_decompositions()
)

ep.module()(kjt.values(), kjt.lengths())

Expand Down Expand Up @@ -556,16 +583,17 @@ def test_sharded_quant_fpebc_non_strict_export(self) -> None:
{},
strict=False,
pre_dispatch=True,
).run_decompositions()
)
ep.module()(kjt.values(), kjt.lengths())

# PT2 IR autofunctionalizes mutation funcs (bounds_check_indices)
# ensure such node isn't present, as it causes issues with IR
for n in ep.graph_module.graph.nodes:
self.assertFalse("auto_functionalized" in str(n.name))

# TODO: Fix Unflatten
# torch.export.unflatten(ep)
torch.export.unflatten(ep)

ep(kjt.values(), kjt.lengths())

def test_maybe_compute_kjt_to_jt_dict(self) -> None:
kjt: KeyedJaggedTensor = make_kjt([2, 3, 4, 5, 6], [1, 2, 1, 1])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from torchrec.sparse.jagged_tensor import KeyedJaggedTensor


class TestUtils(unittest.TestCase):
class TestTrainPipelineUtils(unittest.TestCase):
def test_get_node_args_helper_call_module_kjt(self) -> None:
graph = torch.fx.Graph()
kjt_args = []
Expand Down