Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/cleanup_pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:
- name: Install Astral UV
uses: astral-sh/setup-uv@v6
with:
version: "0.7.14"
version: "0.8.16"

- name: Run Cleanup
env:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ jobs:
- name: Install Astral UV and enable the cache
uses: astral-sh/setup-uv@v6
with:
version: "0.7.14"
version: "0.8.16"
python-version: 3.9
enable-cache: true
cache-suffix: -${{ github.workflow }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/packaging_sdist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ jobs:
- name: Install Astral UV
uses: astral-sh/setup-uv@v6
with:
version: "0.7.14"
version: "0.8.16"
python-version: 3.11

- name: Build sdist
Expand Down
15 changes: 11 additions & 4 deletions .github/workflows/packaging_wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python: [ cp39, cp310, cp311, cp312, cp313 ]
python: [ cp39, cp310, cp311, cp312, cp313, cp314, cp314t ]
platform:
- { os: windows-2025, arch: amd64, cibw_system: win }
- { os: ubuntu-24.04, arch: x86_64, cibw_system: manylinux }
Expand All @@ -45,6 +45,10 @@ jobs:
- { minimal: true, python: cp311 }
- { minimal: true, python: cp312 }
- { minimal: true, platform: { arch: universal2 } }
# Windows+cp314t disabled due to test failures in CI.
# TODO: Diagnose why tests fail (access violations) in some configurations
- { python: cp314t, platform: { os: windows-2025 } }

runs-on: ${{ matrix.platform.os }}
env:
CIBW_TEST_SKIP: ${{ inputs.testsuite == 'none' && '*' || '*-macosx_universal2' }}
Expand Down Expand Up @@ -80,16 +84,19 @@ jobs:
# Install Astral UV, which will be used as build-frontend for cibuildwheel
- uses: astral-sh/setup-uv@v6
with:
version: "0.7.14"
version: "0.8.16"
enable-cache: false
cache-suffix: -${{ matrix.python }}-${{ matrix.platform.cibw_system }}_${{ matrix.platform.arch }}
python-version: ${{ matrix.python }}

- name: Build${{ inputs.testsuite != 'none' && ' and test ' || ' ' }}wheels
uses: pypa/cibuildwheel@v3.0
uses: pypa/cibuildwheel@v3.1
env:
CIBW_ARCHS: ${{ matrix.platform.arch == 'amd64' && 'AMD64' || matrix.platform.arch }}
CIBW_BUILD: ${{ matrix.python }}-${{ matrix.platform.cibw_system }}_${{ matrix.platform.arch }}

# PYTHON_GIL=1: Suppresses the RuntimeWarning that the GIL is enabled on free-threaded builds.
# TODO: Remove PYTHON_GIL=1 when free-threaded is supported.
CIBW_ENVIRONMENT: PYTHON_GIL=1
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like this enables the GIL on free-threaded python? Maybe add a comment here as well?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added comment and TODO

- name: Upload wheel
uses: actions/upload-artifact@v4
with:
Expand Down
33 changes: 17 additions & 16 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ all = [ # users can install duckdb with 'duckdb[all]', which will install this l
"ipython", # used in duckdb.query_graph
"fsspec", # used in duckdb.filesystem
"numpy", # used in duckdb.experimental.spark and in duckdb.fetchnumpy()
"pandas", # used for pandas dataframes all over the place
"pyarrow", # used for pyarrow support
"adbc_driver_manager", # for the adbc driver (TODO: this should live under the duckdb package)
"pandas; python_version < '3.14'", # used for pandas dataframes all over the place
"pyarrow; python_version < '3.14'", # used for pyarrow support
"adbc_driver_manager; python_version < '3.14'", # for the adbc driver (TODO: this should live under the duckdb package)
]

######################################################################################################
Expand Down Expand Up @@ -123,7 +123,6 @@ if.env.COVERAGE = false
inherit.cmake.define = "append"
cmake.define.DISABLE_UNITY = "1"


[tool.scikit-build.sdist]
include = [
"README.md",
Expand Down Expand Up @@ -204,6 +203,7 @@ required-environments = [ # ... but do always resolve for all of them
"python_version >= '3.9' and sys_platform == 'linux' and platform_machine == 'x86_64'",
"python_version >= '3.9' and sys_platform == 'linux' and platform_machine == 'aarch64'",
]
prerelease = "if-necessary-or-explicit" # for 3.14

# We just need pytorch for tests, wihtout GPU acceleration. PyPI doesn't host a cpu-only version for Linux, so we have
# to configure the index url for cpu-only pytorch manually
Expand All @@ -219,31 +219,31 @@ torchvision = [ { index = "pytorch-cpu" } ]
[dependency-groups] # used for development only, requires pip >=25.1.0
stubdeps = [ # dependencies used for typehints in the stubs
"fsspec",
"pandas",
"polars",
"pyarrow",
"pandas; python_version < '3.14'",
"polars; python_version < '3.14'",
"pyarrow; python_version < '3.14'",
]
test = [ # dependencies used for running tests
"pytest",
"pytest-reraise",
"pytest-timeout",
"mypy",
"coverage",
"gcovr",
"gcovr; python_version < '3.14'",
"gcsfs",
"packaging",
"polars",
"polars; python_version < '3.14'",
"psutil",
"py4j",
"pyotp",
"pyspark",
"pyspark; python_version < '3.14'",
"pytz",
"requests",
"urllib3",
"fsspec>=2022.11.0",
"pandas>=2.0.0",
"pyarrow>=18.0.0",
"torch>=2.2.2; sys_platform != 'darwin' or platform_machine != 'x86_64' or python_version < '3.13'",
"pandas>=2.0.0; python_version < '3.14'",
"pyarrow>=18.0.0; python_version < '3.14'",
"torch>=2.2.2; python_version < '3.14' and (sys_platform != 'darwin' or platform_machine != 'x86_64' or python_version < '3.13')",
"tensorflow==2.14.0; sys_platform == 'darwin' and python_version < '3.12'",
"tensorflow-cpu>=2.14.0; sys_platform == 'linux' and platform_machine != 'aarch64' and python_version < '3.12'",
"tensorflow-cpu>=2.14.0; sys_platform == 'win32' and python_version < '3.12'",
Expand All @@ -256,10 +256,10 @@ scripts = [ # dependencies used for running scripts
"ipython",
"ipywidgets",
"numpy",
"pandas",
"pandas; python_version < '3.14'",
"pcpp",
"polars",
"pyarrow",
"polars; python_version < '3.14'",
"pyarrow; python_version < '3.14'",
"pytz"
]
pypi = [ # dependencies used by the pypi cleanup script
Expand Down Expand Up @@ -379,6 +379,7 @@ manylinux-x86_64-image = "manylinux_2_28"
manylinux-pypy_x86_64-image = "manylinux_2_28"
manylinux-aarch64-image = "manylinux_2_28"
manylinux-pypy_aarch64-image = "manylinux_2_28"
enable = ["cpython-freethreading", "cpython-prerelease"]

[tool.cibuildwheel.linux]
before-build = ["yum install -y ccache"]
Expand Down
47 changes: 39 additions & 8 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,37 +1,51 @@
import os
import sys
import pytest
import shutil
from os.path import abspath, join, dirname, normpath
import glob
import duckdb
import warnings
from importlib import import_module
import sys

try:
# need to ignore warnings that might be thrown deep inside pandas's import tree (from dateutil in this case)
warnings.simplefilter(action='ignore', category=DeprecationWarning)
pandas = import_module('pandas')
warnings.simplefilter(action="ignore", category=DeprecationWarning)
pandas = import_module("pandas")
warnings.resetwarnings()

pyarrow_dtype = getattr(pandas, 'ArrowDtype', None)
pyarrow_dtype = getattr(pandas, "ArrowDtype", None)
except ImportError:
pandas = None
pyarrow_dtype = None

# Only install mock after we've failed to import pandas for conftest.py
class MockPandas:
def __getattr__(self, name):
pytest.skip("pandas not available", allow_module_level=True)

sys.modules["pandas"] = MockPandas()
sys.modules["pandas.testing"] = MockPandas()
sys.modules["pandas._testing"] = MockPandas()

# Check if pandas has arrow dtypes enabled
try:
from pandas.compat import pa_version_under7p0
if pandas is not None:
try:
from pandas.compat import pa_version_under7p0

pyarrow_dtypes_enabled = not pa_version_under7p0
except ImportError:
pyarrow_dtypes_enabled = not pa_version_under7p0
except (ImportError, AttributeError):
pyarrow_dtypes_enabled = False
else:
pyarrow_dtypes_enabled = False


def import_pandas():
if pandas:
return pandas
else:
pytest.skip("Couldn't import pandas")
pytest.skip("Couldn't import pandas", allow_module_level=True)


# https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option
Expand All @@ -40,6 +54,23 @@ def pytest_addoption(parser):
parser.addoption("--skiplist", action="append", nargs="+", type=str, help="skip listed tests")


@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_call(item):
"""Convert pandas requirement exceptions to skips"""

outcome = yield

# TODO: Remove skip when Pandas releases for 3.14. After, consider bumping to 3.15
if sys.version_info[:2] == (3, 14):
try:
outcome.get_result()
except duckdb.InvalidInputException as e:
if "'pandas' is required for this operation but it was not installed" in str(e):
pytest.skip("pandas not available - test requires pandas functionality")
else:
raise e


def pytest_collection_modifyitems(config, items):
tests_to_skip = config.getoption("--skiplist")
if not tests_to_skip:
Expand Down
1 change: 1 addition & 0 deletions tests/fast/numpy/test_numpy_new_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Therefore, we only test the new codes and exec paths.
"""

import sys
import numpy as np
import duckdb
from datetime import timedelta
Expand Down