Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
e6172a7
fix: enable parallel test execution with pytest-xdist in CI workflow
deependujha Jun 11, 2025
1ee614e
temporary fix to handle parallelly running tests in ci
deependujha Jun 12, 2025
cf5fb70
update
deependujha Jun 12, 2025
71eafa9
update
deependujha Jun 12, 2025
17eed41
update
deependujha Jun 12, 2025
311beae
update
deependujha Jun 12, 2025
46b5843
7 pm
deependujha Jun 12, 2025
93305ba
pytest-xdist ==3.4.0
Borda Jun 12, 2025
cbf1ca5
fix tmp path on windows
deependujha Jun 12, 2025
ecad3d8
add fixture for unique HF URL to support parallel test runs
deependujha Jun 12, 2025
5bad28f
Merge branch 'main' into feat/run-tests-parallely
deependujha Jun 12, 2025
4d730a5
update
deependujha Jun 12, 2025
c6ad3f9
Merge branch 'main' into feat/run-tests-parallely
Borda Jun 18, 2025
70f7501
update
deependujha Jun 13, 2025
f3bdcf8
increase timeout of 60s to 90s
deependujha Jun 18, 2025
c35ac31
bump pytest & pytest-xdist
deependujha Jun 19, 2025
1956b7f
rerun failing tests twice
deependujha Jun 19, 2025
f890dd6
refactor: update pytest command and adjust fixture scopes for better …
deependujha Jun 20, 2025
7b09b9f
update
deependujha Jun 20, 2025
43450e3
update
deependujha Jun 20, 2025
6f47a5b
update
deependujha Jun 20, 2025
6c47e98
update
deependujha Jun 20, 2025
e52dd98
update
deependujha Jun 20, 2025
51aea13
update
deependujha Jun 20, 2025
14c0e79
update
deependujha Jun 20, 2025
140350a
update
deependujha Jun 20, 2025
613309c
update
deependujha Jun 24, 2025
59056f0
let's just wait
deependujha Jun 24, 2025
f5cb8f6
Merge branch 'main' into feat/run-tests-parallely
deependujha Aug 4, 2025
9a8028f
Update tests/streaming/test_dataloader.py
deependujha Aug 4, 2025
2203738
update
deependujha Aug 4, 2025
c03d154
update
deependujha Aug 4, 2025
f5718d7
Merge branch 'main' into feat/run-tests-parallely
deependujha Aug 6, 2025
ca211c8
Apply suggestions from code review
Borda Aug 6, 2025
38c9671
Update src/litdata/streaming/resolver.py
deependujha Aug 7, 2025
3d9a504
update
deependujha Aug 7, 2025
204c3e8
Merge branch 'main' into feat/run-tests-parallely
deependujha Aug 7, 2025
24224e7
update
deependujha Aug 7, 2025
00d5a38
Update .github/workflows/ci-testing.yml
deependujha Aug 7, 2025
41c1f1e
Merge branch 'main' into feat/run-tests-parallely
deependujha Aug 7, 2025
8bf6699
update
deependujha Aug 7, 2025
02c475b
let's try running all tests in parallel
deependujha Aug 7, 2025
1ba1412
update
deependujha Aug 7, 2025
51db91a
update
deependujha Aug 7, 2025
fe292ef
let's run tests in groups
deependujha Aug 7, 2025
62b4cfb
update
deependujha Aug 7, 2025
b61b7d7
tests pass
deependujha Aug 7, 2025
1918a9d
update
deependujha Aug 7, 2025
f59b9ed
Merge branch 'main' into feat/run-tests-parallely
deependujha Aug 7, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions .github/workflows/ci-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,16 @@ jobs:
uv pip install -e ".[extras]" -r requirements/test.txt -U -q
uv pip list

- name: Tests
working-directory: tests
run: pytest . -v --cov=litdata --durations=100
- name: Run fast tests in parallel
run: |
pytest \
tests/streaming tests/utilities \
tests/test_cli.py tests/test_debugger.py \
-n 2 --cov=litdata --cov-append --cov-report= --durations=120

- name: Run processing tests sequentially
run: |
pytest tests/processing tests/raw --cov=litdata --cov-append --cov-report= --durations=90

- name: Statistics
continue-on-error: true
Expand Down
1 change: 1 addition & 0 deletions requirements/test.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ pytest-cov ==6.2.1
pytest-timeout ==2.4.0
pytest-rerunfailures ==15.1
pytest-random-order ==1.1.1
pytest-xdist >=3.8.0
pandas
pyarrow >=20.0.0
polars >1.0.0
Expand Down
10 changes: 5 additions & 5 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
import shutil
import signal
import sys
import threading
from collections import OrderedDict
Expand All @@ -16,7 +17,7 @@
from litdata.utilities.dataset_utilities import get_default_cache_dir


@pytest.fixture(autouse=True)
@pytest.fixture(autouse=True, scope="session")
def teardown_process_group():
"""Ensures distributed process group gets closed before the next test runs."""
yield
Expand All @@ -25,9 +26,8 @@ def teardown_process_group():


@pytest.fixture(autouse=True)
def set_env():
# Set environment variable before each test to configure BaseWorker's maximum wait time
os.environ["DATA_OPTIMIZER_TIMEOUT"] = "20"
def disable_signals(monkeypatch):
monkeypatch.setattr(signal, "signal", lambda *args, **kwargs: None)


@pytest.fixture
Expand Down Expand Up @@ -132,7 +132,7 @@ def lightning_sdk_mock(monkeypatch):
return lightning_sdk


@pytest.fixture(autouse=True)
@pytest.fixture(autouse=True, scope="session")
def _thread_police():
"""Attempts stopping left-over threads to avoid test interactions.

Expand Down
2 changes: 1 addition & 1 deletion tests/streaming/test_dataloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ def test_dataloader_states_with_persistent_workers(tmpdir):
assert count >= 25, "There should be at least 25 batches in the third epoch"


@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
def test_resume_dataloader_with_new_dataset(tmpdir):
dataset_1_path = tmpdir.join("dataset_1")
dataset_2_path = tmpdir.join("dataset_2")
Expand Down
12 changes: 6 additions & 6 deletions tests/streaming/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ def test_streaming_dataset_distributed_no_shuffle(drop_last, tmpdir, compression
pytest.param("zstd", marks=pytest.mark.skipif(condition=not _ZSTD_AVAILABLE, reason="Requires: ['zstd']")),
],
)
@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
def test_streaming_dataset_distributed_full_shuffle_odd(drop_last, tmpdir, compression):
seed_everything(42)

Expand Down Expand Up @@ -364,7 +364,7 @@ def test_streaming_dataset_distributed_full_shuffle_odd(drop_last, tmpdir, compr
),
],
)
@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
def test_streaming_dataset_distributed_full_shuffle_even(drop_last, tmpdir, compression):
seed_everything(42)

Expand Down Expand Up @@ -412,7 +412,7 @@ def test_streaming_dataset_distributed_full_shuffle_even(drop_last, tmpdir, comp
pytest.param("zstd", marks=pytest.mark.skipif(condition=not _ZSTD_AVAILABLE, reason="Requires: ['zstd']")),
],
)
@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
def test_streaming_dataset_distributed_full_shuffle_even_multi_nodes(drop_last, tmpdir, compression):
seed_everything(42)

Expand Down Expand Up @@ -685,7 +685,7 @@ def test_dataset_for_text_tokens_multiple_workers(tmpdir):
assert result == expected


@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
def test_dataset_for_text_tokens_with_large_block_size_multiple_workers(tmpdir):
# test to reproduce ERROR: Unexpected segmentation fault encountered in worker
seed_everything(42)
Expand Down Expand Up @@ -1077,7 +1077,7 @@ def test_dataset_resume_on_future_chunks(shuffle, tmpdir, monkeypatch):
assert torch.equal(next(iter(train_dataloader)), batch_to_resume_from)


@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
@pytest.mark.skipif(sys.platform == "win32", reason="Not tested on windows and MacOs")
def test_dataset_valid_state(tmpdir, monkeypatch):
seed_everything(42)
Expand Down Expand Up @@ -1213,7 +1213,7 @@ def fn(remote_chunkpath: str, local_chunkpath: str):
dataset._validate_state_dict()


@pytest.mark.timeout(60)
@pytest.mark.timeout(90)
@pytest.mark.skipif(sys.platform == "win32", reason="Not tested on windows and MacOs")
def test_dataset_valid_state_override(tmpdir, monkeypatch):
seed_everything(42)
Expand Down
6 changes: 3 additions & 3 deletions tests/utilities/test_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@


def test_distributed_env_from_env(monkeypatch):
monkeypatch.setenv("WORLD_SIZE", 2)
monkeypatch.setenv("GLOBAL_RANK", 1)
monkeypatch.setenv("NNODES", 2)
monkeypatch.setenv("WORLD_SIZE", "2")
monkeypatch.setenv("GLOBAL_RANK", "1")
monkeypatch.setenv("NNODES", "2")

dist_env = _DistributedEnv.detect()
assert dist_env.world_size == 2
Expand Down
Loading