Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[oss][ci] fix tests and cmake C++ linking #2834

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
fix tests and Cmake
  • Loading branch information
iamzainhuda committed Mar 19, 2025
commit 93e1a848f654498b7175f69e858ea3611b951dcf
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@ include(FetchContent)

option(BUILD_TEST "Build C++ test binaries (need gtest and gbenchmark)" OFF)

add_definitions("-D_GLIBCXX_USE_CXX11_ABI=0")
add_definitions("-D_GLIBCXX_USE_CXX11_ABI=1")

add_subdirectory(torchrec/csrc)

14 changes: 12 additions & 2 deletions torchrec/ops/tests/faster_hash_bench.py
Original file line number Diff line number Diff line change
@@ -11,15 +11,22 @@
import contextlib
import logging
import random
import sys
import time
from typing import Any, Generator

import torch

logger: logging.Logger = logging.getLogger(__name__)

torch.ops.load_library("//caffe2/torch/fb/retrieval:faster_hash_cpu")
torch.ops.load_library("//caffe2/torch/fb/retrieval:faster_hash_cuda")
def load_required_libraries() -> bool:
try:
torch.ops.load_library("//torchrec/ops:faster_hash_cpu")
torch.ops.load_library("//torchrec/ops:faster_hash_cuda")
return True
except Exception as e:
logger.error(f"Failed to load faster_hash libraries, skipping test: {e}")
return False


@contextlib.contextmanager
@@ -347,6 +354,9 @@ def _run_benchmark_with_eviction(


if __name__ == "__main__":
if not load_required_libraries():
print("Skipping test because libraries were not loaded")
sys.exit(0)
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
19 changes: 16 additions & 3 deletions torchrec/ops/tests/faster_hash_test.py
Original file line number Diff line number Diff line change
@@ -13,16 +13,29 @@
import torch
from hypothesis import settings

torch.ops.load_library("//torchrec/ops:faster_hash_cpu")
torch.ops.load_library("//torchrec/ops:faster_hash_cuda")

def load_required_libraries() -> bool:
try:
torch.ops.load_library("//torchrec/ops:faster_hash_cpu")
torch.ops.load_library("//torchrec/ops:faster_hash_cuda")
return True
except Exception as e:
print(f"Skipping tests because libraries were not loaded: {e}")
return False

class HashZchKernelEvictionPolicy(IntEnum):
THRESHOLD_EVICTION = 0
LRU_EVICTION = 1


class FasterHashTest(unittest.TestCase):

@classmethod
def setUpClass(cls):
if not load_required_libraries():
raise unittest.SkipTest(
"Libraries not loaded, skipping all tests in MyTestCase"
)

@unittest.skipIf(not torch.cuda.is_available(), "Skip when CUDA is not available")
@settings(deadline=None)
def test_simple_zch_no_evict(self) -> None:
Loading
Oops, something went wrong.