diff --git a/.github/workflows/python_wheel_build.yml b/.github/workflows/python_wheel_build.yml index b7c03e1cb9261..169baafd28811 100644 --- a/.github/workflows/python_wheel_build.yml +++ b/.github/workflows/python_wheel_build.yml @@ -11,7 +11,7 @@ on: schedule: - cron: '01 1 * * *' pull_request: - types: [labeled] + types: [opened, synchronize, reopened, labeled] concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} @@ -25,6 +25,7 @@ jobs: contains(github.event.pull_request.labels.*.name, 'build-python-wheels') runs-on: ubuntu-latest strategy: + fail-fast: false matrix: target: [cp38-manylinux_x86_64, cp39-manylinux_x86_64, cp310-manylinux_x86_64, cp311-manylinux_x86_64, cp312-manylinux_x86_64, cp313-manylinux_x86_64] name: ${{ matrix.target }} @@ -34,6 +35,48 @@ jobs: with: build-tag: ${{ matrix.target }} + test-tutorials: + needs: build-wheels + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + steps: + - uses: actions/checkout@v4 + + - name: Install required system packages + run: | + sudo apt-get update + sudo apt-get install -y libfftw3-dev + + - name: Download produced wheels + uses: actions/download-artifact@v4 + with: + path: wheels + merge-multiple: true + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install produced wheel + run: | + ls -R wheels + PY_VER=$(python -c "import sys; print(f'cp{sys.version_info.major}{sys.version_info.minor}')") + WHEEL=$(ls wheels/*${PY_VER}*.whl | head -n 1) + echo "Python version: ${PY_VER}, installing wheel: ${WHEEL}" + pip install "$WHEEL" + + - name: Install tutorials dependencies + run: | + python -m pip install -r test_tutorials/requirements.txt + + - name: Run tutorials + run: | + pytest -vv -rF --show-capture=all test_tutorials + create-and-upload-wheel-registry: if: github.event_name != 'pull_request' # The secrets are not available in PR needs: build-wheels diff --git a/test_tutorials/requirements.txt b/test_tutorials/requirements.txt new file mode 100644 index 0000000000000..fbb8aa7738c18 --- /dev/null +++ b/test_tutorials/requirements.txt @@ -0,0 +1,50 @@ +# ROOT requirements for third-party Python packages + +# PyROOT: Interoperability with numpy arrays +numpy +pandas + +# TMVA: SOFIE +# dm-sonnet # used for GNNs +# graph_nets +# onnx + +# TMVA: PyMVA interfaces +# scikit-learn +# tensorflow ; python_version < "3.13" # TensorFlow doesn't support Python 3.13 yet +# torch +# xgboost + +# PyROOT: ROOT.Numba.Declare decorator +numba>=0.48 +cffi>=1.9.1 + +# Notebooks: ROOT C++ kernel +# IPython +# jupyter +# metakernel>=0.20.0 +# notebook>=4.4.1 + +# Distributed RDataFrame +# pyspark>=2.4 # Spark backend +# dask>=2022.08.1 # Dask backend +# distributed>=2022.08.1 # Dask backend + +# JsMVA: Jupyter notebook magic for TMVA +# ipywidgets + +# Unified Histogram Interface (UHI) +uhi +matplotlib +mplhep + +# For testing +# nbconvert>=7.4.0 +pytest +# setuptools + +scikit-learn +xgboost + +# Look for CPU-only versions of PyTorch to avoid pulling CUDA in the CI docker images. +# -f https://download.pytorch.org/whl/cpu/torch_stable.html diff --git a/test_tutorials/test_tutorials.py b/test_tutorials/test_tutorials.py new file mode 100644 index 0000000000000..8e45150c170ec --- /dev/null +++ b/test_tutorials/test_tutorials.py @@ -0,0 +1,89 @@ +import subprocess +import sys +import pathlib +import ROOT +import os +import pytest +import signal + +ROOT.gROOT.SetBatch(True) + +tutorial_dir = pathlib.Path(str(ROOT.gROOT.GetTutorialDir())) + +subdirs = [ + "analysis/dataframe", + "analysis/tree", + "hist", + "io/ntuple", + "roofit/roofit" +] + +# ---------------------- +# Python tutorials tests +# ---------------------- +py_tutorials = [] +for sub in subdirs: + sub_path = tutorial_dir / sub + # py_tutorials.extend(sub_path.rglob("*.py")) + for f in sub_path.rglob("*.py"): + # skip distrdf tutorials for now + if "distrdf" in f.name: + continue + py_tutorials.append(f) + +def test_tutorials_are_detected(): + assert len(py_tutorials) > 0 + +@pytest.mark.parametrize("tutorial", py_tutorials, ids=lambda p: p.name) +def test_tutorial(tutorial): + env = dict(**os.environ) + # force matplotlib to use a non-GUI backend + env["MPLBACKEND"] = "Agg" + print("Test env:", env) + try: + result = subprocess.run( + [sys.executable, str(tutorial)], + check=True, + env=env, + timeout=60, + capture_output=True, + text=True, + ) + print("Test stderr:", result.stderr) + except subprocess.TimeoutExpired: + pytest.skip(f"Tutorial {tutorial} timed out") + except subprocess.CalledProcessError as e: + # read stderr to see if EOFError occurred + if "EOFError" in e.stderr: + pytest.skip(f"Skipping {tutorial.name} (requires user input)") + raise + +# ---------------------- +# C++ tutorials tests +# ---------------------- +cpp_tutorials = [] +for sub in subdirs: + sub_path = tutorial_dir / sub + cpp_tutorials.extend(sub_path.rglob("*.C")) + +def test_cpp_tutorials_are_detected(): + assert len(cpp_tutorials) > 0 + +@pytest.mark.parametrize("tutorial", cpp_tutorials, ids=lambda p: p.name) +def test_cpp_tutorial(tutorial): + try: + result = subprocess.run( + [sys.executable, "-c", f'import ROOT; ROOT.gROOT.ProcessLine(".x {tutorial}")'], + check=True, + timeout=60, + capture_output=True, + text=True + ) + except subprocess.TimeoutExpired: + pytest.skip(f"Tutorial {tutorial} timed out") + except subprocess.CalledProcessError as e: + if e.returncode == -signal.SIGILL or e.returncode == 132: + pytest.fail(f"Failing {tutorial.name} (illegal instruction on this platform)") + elif "EOFError" in e.stderr: + pytest.skip(f"Skipping {tutorial.name} (requires user input)") + raise \ No newline at end of file