diff --git a/.github/workflows/cleanup_pypi.yml b/.github/workflows/cleanup_pypi.yml index c4300be3..e290faae 100644 --- a/.github/workflows/cleanup_pypi.yml +++ b/.github/workflows/cleanup_pypi.yml @@ -52,7 +52,7 @@ jobs: - name: Install Astral UV uses: astral-sh/setup-uv@v6 with: - version: "0.7.14" + version: "0.8.16" - name: Run Cleanup env: diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index fdd2a838..ab696897 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -70,7 +70,7 @@ jobs: - name: Install Astral UV and enable the cache uses: astral-sh/setup-uv@v6 with: - version: "0.7.14" + version: "0.8.16" python-version: 3.9 enable-cache: true cache-suffix: -${{ github.workflow }} diff --git a/.github/workflows/packaging_sdist.yml b/.github/workflows/packaging_sdist.yml index 2723b437..87923f4c 100644 --- a/.github/workflows/packaging_sdist.yml +++ b/.github/workflows/packaging_sdist.yml @@ -58,7 +58,7 @@ jobs: - name: Install Astral UV uses: astral-sh/setup-uv@v6 with: - version: "0.7.14" + version: "0.8.16" python-version: 3.11 - name: Build sdist diff --git a/.github/workflows/packaging_wheels.yml b/.github/workflows/packaging_wheels.yml index b1a393a1..ea13b674 100644 --- a/.github/workflows/packaging_wheels.yml +++ b/.github/workflows/packaging_wheels.yml @@ -30,7 +30,7 @@ jobs: strategy: fail-fast: false matrix: - python: [ cp39, cp310, cp311, cp312, cp313 ] + python: [ cp39, cp310, cp311, cp312, cp313, cp314, cp314t ] platform: - { os: windows-2025, arch: amd64, cibw_system: win } - { os: ubuntu-24.04, arch: x86_64, cibw_system: manylinux } @@ -45,6 +45,10 @@ jobs: - { minimal: true, python: cp311 } - { minimal: true, python: cp312 } - { minimal: true, platform: { arch: universal2 } } + # Windows+cp314t disabled due to test failures in CI. + # TODO: Diagnose why tests fail (access violations) in some configurations + - { python: cp314t, platform: { os: windows-2025 } } + runs-on: ${{ matrix.platform.os }} env: CIBW_TEST_SKIP: ${{ inputs.testsuite == 'none' && '*' || '*-macosx_universal2' }} @@ -80,16 +84,19 @@ jobs: # Install Astral UV, which will be used as build-frontend for cibuildwheel - uses: astral-sh/setup-uv@v6 with: - version: "0.7.14" + version: "0.8.16" enable-cache: false cache-suffix: -${{ matrix.python }}-${{ matrix.platform.cibw_system }}_${{ matrix.platform.arch }} + python-version: ${{ matrix.python }} - name: Build${{ inputs.testsuite != 'none' && ' and test ' || ' ' }}wheels - uses: pypa/cibuildwheel@v3.0 + uses: pypa/cibuildwheel@v3.1 env: CIBW_ARCHS: ${{ matrix.platform.arch == 'amd64' && 'AMD64' || matrix.platform.arch }} CIBW_BUILD: ${{ matrix.python }}-${{ matrix.platform.cibw_system }}_${{ matrix.platform.arch }} - + # PYTHON_GIL=1: Suppresses the RuntimeWarning that the GIL is enabled on free-threaded builds. + # TODO: Remove PYTHON_GIL=1 when free-threaded is supported. + CIBW_ENVIRONMENT: PYTHON_GIL=1 - name: Upload wheel uses: actions/upload-artifact@v4 with: diff --git a/pyproject.toml b/pyproject.toml index 6291b811..bcbb24f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,9 +46,9 @@ all = [ # users can install duckdb with 'duckdb[all]', which will install this l "ipython", # used in duckdb.query_graph "fsspec", # used in duckdb.filesystem "numpy", # used in duckdb.experimental.spark and in duckdb.fetchnumpy() - "pandas", # used for pandas dataframes all over the place - "pyarrow", # used for pyarrow support - "adbc_driver_manager", # for the adbc driver (TODO: this should live under the duckdb package) + "pandas; python_version < '3.14'", # used for pandas dataframes all over the place + "pyarrow; python_version < '3.14'", # used for pyarrow support + "adbc_driver_manager; python_version < '3.14'", # for the adbc driver (TODO: this should live under the duckdb package) ] ###################################################################################################### @@ -123,7 +123,6 @@ if.env.COVERAGE = false inherit.cmake.define = "append" cmake.define.DISABLE_UNITY = "1" - [tool.scikit-build.sdist] include = [ "README.md", @@ -204,6 +203,7 @@ required-environments = [ # ... but do always resolve for all of them "python_version >= '3.9' and sys_platform == 'linux' and platform_machine == 'x86_64'", "python_version >= '3.9' and sys_platform == 'linux' and platform_machine == 'aarch64'", ] +prerelease = "if-necessary-or-explicit" # for 3.14 # We just need pytorch for tests, wihtout GPU acceleration. PyPI doesn't host a cpu-only version for Linux, so we have # to configure the index url for cpu-only pytorch manually @@ -219,9 +219,9 @@ torchvision = [ { index = "pytorch-cpu" } ] [dependency-groups] # used for development only, requires pip >=25.1.0 stubdeps = [ # dependencies used for typehints in the stubs "fsspec", - "pandas", - "polars", - "pyarrow", + "pandas; python_version < '3.14'", + "polars; python_version < '3.14'", + "pyarrow; python_version < '3.14'", ] test = [ # dependencies used for running tests "pytest", @@ -229,21 +229,21 @@ test = [ # dependencies used for running tests "pytest-timeout", "mypy", "coverage", - "gcovr", + "gcovr; python_version < '3.14'", "gcsfs", "packaging", - "polars", + "polars; python_version < '3.14'", "psutil", "py4j", "pyotp", - "pyspark", + "pyspark; python_version < '3.14'", "pytz", "requests", "urllib3", "fsspec>=2022.11.0", - "pandas>=2.0.0", - "pyarrow>=18.0.0", - "torch>=2.2.2; sys_platform != 'darwin' or platform_machine != 'x86_64' or python_version < '3.13'", + "pandas>=2.0.0; python_version < '3.14'", + "pyarrow>=18.0.0; python_version < '3.14'", + "torch>=2.2.2; python_version < '3.14' and (sys_platform != 'darwin' or platform_machine != 'x86_64' or python_version < '3.13')", "tensorflow==2.14.0; sys_platform == 'darwin' and python_version < '3.12'", "tensorflow-cpu>=2.14.0; sys_platform == 'linux' and platform_machine != 'aarch64' and python_version < '3.12'", "tensorflow-cpu>=2.14.0; sys_platform == 'win32' and python_version < '3.12'", @@ -256,10 +256,10 @@ scripts = [ # dependencies used for running scripts "ipython", "ipywidgets", "numpy", - "pandas", + "pandas; python_version < '3.14'", "pcpp", - "polars", - "pyarrow", + "polars; python_version < '3.14'", + "pyarrow; python_version < '3.14'", "pytz" ] pypi = [ # dependencies used by the pypi cleanup script @@ -379,6 +379,7 @@ manylinux-x86_64-image = "manylinux_2_28" manylinux-pypy_x86_64-image = "manylinux_2_28" manylinux-aarch64-image = "manylinux_2_28" manylinux-pypy_aarch64-image = "manylinux_2_28" +enable = ["cpython-freethreading", "cpython-prerelease"] [tool.cibuildwheel.linux] before-build = ["yum install -y ccache"] diff --git a/tests/conftest.py b/tests/conftest.py index ce2d0e68..5e297aee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import os +import sys import pytest import shutil from os.path import abspath, join, dirname, normpath @@ -6,24 +7,37 @@ import duckdb import warnings from importlib import import_module +import sys try: # need to ignore warnings that might be thrown deep inside pandas's import tree (from dateutil in this case) - warnings.simplefilter(action='ignore', category=DeprecationWarning) - pandas = import_module('pandas') + warnings.simplefilter(action="ignore", category=DeprecationWarning) + pandas = import_module("pandas") warnings.resetwarnings() - pyarrow_dtype = getattr(pandas, 'ArrowDtype', None) + pyarrow_dtype = getattr(pandas, "ArrowDtype", None) except ImportError: pandas = None pyarrow_dtype = None + # Only install mock after we've failed to import pandas for conftest.py + class MockPandas: + def __getattr__(self, name): + pytest.skip("pandas not available", allow_module_level=True) + + sys.modules["pandas"] = MockPandas() + sys.modules["pandas.testing"] = MockPandas() + sys.modules["pandas._testing"] = MockPandas() + # Check if pandas has arrow dtypes enabled -try: - from pandas.compat import pa_version_under7p0 +if pandas is not None: + try: + from pandas.compat import pa_version_under7p0 - pyarrow_dtypes_enabled = not pa_version_under7p0 -except ImportError: + pyarrow_dtypes_enabled = not pa_version_under7p0 + except (ImportError, AttributeError): + pyarrow_dtypes_enabled = False +else: pyarrow_dtypes_enabled = False @@ -31,7 +45,7 @@ def import_pandas(): if pandas: return pandas else: - pytest.skip("Couldn't import pandas") + pytest.skip("Couldn't import pandas", allow_module_level=True) # https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option @@ -40,6 +54,23 @@ def pytest_addoption(parser): parser.addoption("--skiplist", action="append", nargs="+", type=str, help="skip listed tests") +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_call(item): + """Convert pandas requirement exceptions to skips""" + + outcome = yield + + # TODO: Remove skip when Pandas releases for 3.14. After, consider bumping to 3.15 + if sys.version_info[:2] == (3, 14): + try: + outcome.get_result() + except duckdb.InvalidInputException as e: + if "'pandas' is required for this operation but it was not installed" in str(e): + pytest.skip("pandas not available - test requires pandas functionality") + else: + raise e + + def pytest_collection_modifyitems(config, items): tests_to_skip = config.getoption("--skiplist") if not tests_to_skip: diff --git a/tests/fast/numpy/test_numpy_new_path.py b/tests/fast/numpy/test_numpy_new_path.py index 4267085c..3735ff6e 100644 --- a/tests/fast/numpy/test_numpy_new_path.py +++ b/tests/fast/numpy/test_numpy_new_path.py @@ -2,6 +2,7 @@ Therefore, we only test the new codes and exec paths. """ +import sys import numpy as np import duckdb from datetime import timedelta