From 070d4075b58b425ab4d8eba44cc42acef8407fde Mon Sep 17 00:00:00 2001 From: vmoens Date: Sun, 19 Oct 2025 17:57:05 -0700 Subject: [PATCH] Update [ghstack-poisoned] --- .github/workflows/benchmarks.yml | 2 +- .github/workflows/benchmarks_pr.yml | 2 +- benchmarks/test_llm.py | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 3e69d24e943..9ee940d8fd1 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -84,7 +84,7 @@ jobs: python3.10 -m pip install ninja pytest pytest-benchmark mujoco dm_control "gym[accept-rom-license,atari]" transformers python -m pip install "pybind11[global]" python3.10 -m pip install git+https://github.com/pytorch/tensordict - python3.10 -m pip install safetensors tqdm pandas numpy matplotlib + python3.10 -m pip install safetensors tqdm pandas numpy matplotlib ray python3.10 setup.py develop # test import diff --git a/.github/workflows/benchmarks_pr.yml b/.github/workflows/benchmarks_pr.yml index 03f34892002..69c506bbafe 100644 --- a/.github/workflows/benchmarks_pr.yml +++ b/.github/workflows/benchmarks_pr.yml @@ -79,7 +79,7 @@ jobs: ${{ matrix.device == 'CPU' && 'export CUDA_VISIBLE_DEVICES=' || '' }} python3.10 -m pip install --pre torch torchvision --index-url https://download.pytorch.org/whl/nightly/cu128 -U - python3.10 -m pip install ninja pytest pytest-benchmark mujoco dm_control "gym[accept-rom-license,atari]" transformers + python3.10 -m pip install ninja pytest pytest-benchmark mujoco dm_control "gym[accept-rom-license,atari]" transformers ray python3.10 -m pip install "pybind11[global]" python3.10 -m pip install git+https://github.com/pytorch/tensordict python3.10 -m pip install safetensors tqdm pandas numpy matplotlib diff --git a/benchmarks/test_llm.py b/benchmarks/test_llm.py index 030b3c45f90..aeebbd62491 100644 --- a/benchmarks/test_llm.py +++ b/benchmarks/test_llm.py @@ -16,6 +16,11 @@ _has_transformers = importlib.import_module("transformers") is not None +# Skip all these tests if gpu is not available +pytestmark = pytest.mark.skipif( + not torch.cuda.is_available(), reason="GPU not available" +) + @pytest.fixture(scope="module") def transformers_wrapper():