diff --git a/.github/workflows/validate.yaml b/.github/workflows/validate.yaml
index 838a26ef3..98bce1274 100644
--- a/.github/workflows/validate.yaml
+++ b/.github/workflows/validate.yaml
@@ -12,7 +12,6 @@ env:
POETRY_CACHE_DIR: ${{ github.workspace }}/.var/cache/pypoetry
PIP_CACHE_DIR: ${{ github.workspace }}/.var/cache/pip
-
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
@@ -52,7 +51,7 @@ jobs:
PREPARATION: "sudo apt-get install -y firejail"
extensive-tests: true
TOX_TEST_HARNESS: "firejail --net=none --"
- TOX_PYTEST_EXTRA_ARGS: "-m 'not webtest'"
+ TOX_PYTEST_EXTRA_ARGS: "-m 'not (testcontainer or webtest)'"
steps:
- uses: actions/checkout@v4
- name: Cache XDG_CACHE_HOME
@@ -84,6 +83,13 @@ jobs:
shell: bash
run: |
${{ matrix.PREPARATION }}
+ - name: Set testcontainer exclusion for non-Linux
+ if: ${{ matrix.os != 'ubuntu-latest' }}
+ shell: bash
+ run: |
+ if [ -z "${{ matrix.TOX_PYTEST_EXTRA_ARGS }}" ]; then
+ echo "TOX_PYTEST_EXTRA_ARGS=-m 'not testcontainer'" >> $GITHUB_ENV
+ fi
- name: Run validation
shell: bash
run: |
@@ -97,7 +103,7 @@ jobs:
gha:validate
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- TOX_PYTEST_EXTRA_ARGS: ${{ matrix.TOX_PYTEST_EXTRA_ARGS }}
+ TOX_PYTEST_EXTRA_ARGS: ${{ matrix.TOX_PYTEST_EXTRA_ARGS || env.TOX_PYTEST_EXTRA_ARGS }}
TOX_TEST_HARNESS: ${{ matrix.TOX_TEST_HARNESS }}
TOX_EXTRA_COMMAND: ${{ matrix.TOX_EXTRA_COMMAND }}
- uses: actions/upload-artifact@v4
diff --git a/Taskfile.yml b/Taskfile.yml
index 735b634f7..0c913c83c 100644
--- a/Taskfile.yml
+++ b/Taskfile.yml
@@ -378,3 +378,11 @@ tasks:
sys.stderr.write(f"removing {path}\n")
shutil.rmtree(path, ignore_errors=True)
' {{.RIMRAF_TARGET}}
+
+ test:rdf4j:
+ desc: Run fast tests against rdflib.contrib.rdf4j package
+ cmd: '{{.TEST_HARNESS}}{{.RUN_PREFIX}} pytest -m "not (testcontainer or webtest)" test/test_rdf4j'
+
+ test:rdf4j:all:
+ desc: Run all tests against rdflib.contrib.rdf4j package
+ cmd: '{{.TEST_HARNESS}}{{.RUN_PREFIX}} pytest test/test_rdf4j'
diff --git a/mkdocs.yml b/mkdocs.yml
index 2aa212c2c..da4f8e63f 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -43,6 +43,7 @@ nav:
- Container: apidocs/rdflib.container.md
- Collection: apidocs/rdflib.collection.md
- Paths: apidocs/rdflib.paths.md
+ - RDF4J: apidocs/rdflib.contrib.rdf4j.md
- Util: apidocs/rdflib.util.md
- Plugins:
- Parsers: apidocs/rdflib.plugins.parsers.md
diff --git a/poetry.lock b/poetry.lock
index e0c5133d9..c138e1b9a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,5 +1,27 @@
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
+[[package]]
+name = "anyio"
+version = "4.5.2"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"},
+ {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"},
+]
+
+[package.dependencies]
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
+
+[package.extras]
+doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"]
+trio = ["trio (>=0.26.1)"]
+
[[package]]
name = "babel"
version = "2.17.0"
@@ -372,6 +394,28 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "docker"
+version = "7.1.0"
+description = "A Python library for the Docker Engine API."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"},
+ {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"},
+]
+
+[package.dependencies]
+pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""}
+requests = ">=2.26.0"
+urllib3 = ">=1.26.0"
+
+[package.extras]
+dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"]
+docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"]
+ssh = ["paramiko (>=2.4.3)"]
+websockets = ["websocket-client (>=1.3.0)"]
+
[[package]]
name = "exceptiongroup"
version = "1.3.0"
@@ -420,6 +464,17 @@ files = [
[package.dependencies]
colorama = ">=0.4"
+[[package]]
+name = "h11"
+version = "0.16.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"},
+ {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
+]
+
[[package]]
name = "html5rdf"
version = "1.2.1"
@@ -431,6 +486,51 @@ files = [
{file = "html5rdf-1.2.1.tar.gz", hash = "sha256:ace9b420ce52995bb4f05e7425eedf19e433c981dfe7a831ab391e2fa2e1a195"},
]
+[[package]]
+name = "httpcore"
+version = "1.0.9"
+description = "A minimal low-level HTTP client."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
+ {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
+]
+
+[package.dependencies]
+certifi = "*"
+h11 = ">=0.16"
+
+[package.extras]
+asyncio = ["anyio (>=4.0,<5.0)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+trio = ["trio (>=0.22.0,<1.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.28.1"
+description = "The next generation HTTP client."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
+ {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
+]
+
+[package.dependencies]
+anyio = "*"
+certifi = "*"
+httpcore = "==1.*"
+idna = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+zstd = ["zstandard (>=0.18.0)"]
+
[[package]]
name = "idna"
version = "3.11"
@@ -1336,6 +1436,20 @@ files = [
[package.dependencies]
six = ">=1.5"
+[[package]]
+name = "python-dotenv"
+version = "1.2.1"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"},
+ {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
[[package]]
name = "pytz"
version = "2025.2"
@@ -1347,6 +1461,35 @@ files = [
{file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
]
+[[package]]
+name = "pywin32"
+version = "311"
+description = "Python for Window Extensions"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"},
+ {file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"},
+ {file = "pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b"},
+ {file = "pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151"},
+ {file = "pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503"},
+ {file = "pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2"},
+ {file = "pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31"},
+ {file = "pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067"},
+ {file = "pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852"},
+ {file = "pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d"},
+ {file = "pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d"},
+ {file = "pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a"},
+ {file = "pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee"},
+ {file = "pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87"},
+ {file = "pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42"},
+ {file = "pywin32-311-cp38-cp38-win32.whl", hash = "sha256:6c6f2969607b5023b0d9ce2541f8d2cbb01c4f46bc87456017cf63b73f1e2d8c"},
+ {file = "pywin32-311-cp38-cp38-win_amd64.whl", hash = "sha256:c8015b09fb9a5e188f83b7b04de91ddca4658cee2ae6f3bc483f0b21a77ef6cd"},
+ {file = "pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b"},
+ {file = "pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91"},
+ {file = "pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d"},
+]
+
[[package]]
name = "pyyaml"
version = "6.0.3"
@@ -1519,6 +1662,70 @@ files = [
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
]
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+description = "Sniff out which async library your code is running under"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
+[[package]]
+name = "testcontainers"
+version = "4.13.3"
+description = "Python library for throwaway instances of anything that can run in a Docker container"
+optional = false
+python-versions = ">=3.9.2"
+files = [
+ {file = "testcontainers-4.13.3-py3-none-any.whl", hash = "sha256:063278c4805ffa6dd85e56648a9da3036939e6c0ac1001e851c9276b19b05970"},
+ {file = "testcontainers-4.13.3.tar.gz", hash = "sha256:9d82a7052c9a53c58b69e1dc31da8e7a715e8b3ec1c4df5027561b47e2efe646"},
+]
+
+[package.dependencies]
+docker = "*"
+python-dotenv = "*"
+typing-extensions = "*"
+urllib3 = "*"
+wrapt = "*"
+
+[package.extras]
+arangodb = ["python-arango (>=7.8,<8.0)"]
+aws = ["boto3", "httpx"]
+azurite = ["azure-storage-blob (>=12.19,<13.0)"]
+chroma = ["chromadb-client (>=1.0.0,<2.0.0)"]
+cosmosdb = ["azure-cosmos"]
+db2 = ["ibm_db_sa", "sqlalchemy"]
+generic = ["httpx", "redis"]
+google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"]
+influxdb = ["influxdb", "influxdb-client"]
+k3s = ["kubernetes", "pyyaml (>=6.0.3)"]
+keycloak = ["python-keycloak"]
+localstack = ["boto3"]
+mailpit = ["cryptography"]
+minio = ["minio"]
+mongodb = ["pymongo"]
+mssql = ["pymssql (>=2.3.9)", "sqlalchemy"]
+mysql = ["pymysql[rsa]", "sqlalchemy"]
+nats = ["nats-py"]
+neo4j = ["neo4j"]
+openfga = ["openfga-sdk"]
+opensearch = ["opensearch-py"]
+oracle = ["oracledb (>=3.4.1)", "sqlalchemy"]
+oracle-free = ["oracledb (>=3.4.1)", "sqlalchemy"]
+qdrant = ["qdrant-client"]
+rabbitmq = ["pika"]
+redis = ["redis"]
+registry = ["bcrypt"]
+scylla = ["cassandra-driver (==3.29.1)"]
+selenium = ["selenium"]
+sftp = ["cryptography"]
+test-module-import = ["httpx"]
+trino = ["trino"]
+weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"]
+
[[package]]
name = "tomli"
version = "2.3.0"
@@ -1684,6 +1891,125 @@ files = [
[package.extras]
test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
+[[package]]
+name = "wrapt"
+version = "2.0.1"
+description = "Module for decorators, wrappers and monkey patching."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "wrapt-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:64b103acdaa53b7caf409e8d45d39a8442fe6dcfec6ba3f3d141e0cc2b5b4dbd"},
+ {file = "wrapt-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91bcc576260a274b169c3098e9a3519fb01f2989f6d3d386ef9cbf8653de1374"},
+ {file = "wrapt-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab594f346517010050126fcd822697b25a7031d815bb4fbc238ccbe568216489"},
+ {file = "wrapt-2.0.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:36982b26f190f4d737f04a492a68accbfc6fa042c3f42326fdfbb6c5b7a20a31"},
+ {file = "wrapt-2.0.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23097ed8bc4c93b7bf36fa2113c6c733c976316ce0ee2c816f64ca06102034ef"},
+ {file = "wrapt-2.0.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bacfe6e001749a3b64db47bcf0341da757c95959f592823a93931a422395013"},
+ {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8ec3303e8a81932171f455f792f8df500fc1a09f20069e5c16bd7049ab4e8e38"},
+ {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:3f373a4ab5dbc528a94334f9fe444395b23c2f5332adab9ff4ea82f5a9e33bc1"},
+ {file = "wrapt-2.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f49027b0b9503bf6c8cdc297ca55006b80c2f5dd36cecc72c6835ab6e10e8a25"},
+ {file = "wrapt-2.0.1-cp310-cp310-win32.whl", hash = "sha256:8330b42d769965e96e01fa14034b28a2a7600fbf7e8f0cc90ebb36d492c993e4"},
+ {file = "wrapt-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:1218573502a8235bb8a7ecaed12736213b22dcde9feab115fa2989d42b5ded45"},
+ {file = "wrapt-2.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:eda8e4ecd662d48c28bb86be9e837c13e45c58b8300e43ba3c9b4fa9900302f7"},
+ {file = "wrapt-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0e17283f533a0d24d6e5429a7d11f250a58d28b4ae5186f8f47853e3e70d2590"},
+ {file = "wrapt-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85df8d92158cb8f3965aecc27cf821461bb5f40b450b03facc5d9f0d4d6ddec6"},
+ {file = "wrapt-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1be685ac7700c966b8610ccc63c3187a72e33cab53526a27b2a285a662cd4f7"},
+ {file = "wrapt-2.0.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:df0b6d3b95932809c5b3fecc18fda0f1e07452d05e2662a0b35548985f256e28"},
+ {file = "wrapt-2.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da7384b0e5d4cae05c97cd6f94faaf78cc8b0f791fc63af43436d98c4ab37bb"},
+ {file = "wrapt-2.0.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ec65a78fbd9d6f083a15d7613b2800d5663dbb6bb96003899c834beaa68b242c"},
+ {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7de3cc939be0e1174969f943f3b44e0d79b6f9a82198133a5b7fc6cc92882f16"},
+ {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fb1a5b72cbd751813adc02ef01ada0b0d05d3dcbc32976ce189a1279d80ad4a2"},
+ {file = "wrapt-2.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3fa272ca34332581e00bf7773e993d4f632594eb2d1b0b162a9038df0fd971dd"},
+ {file = "wrapt-2.0.1-cp311-cp311-win32.whl", hash = "sha256:fc007fdf480c77301ab1afdbb6ab22a5deee8885f3b1ed7afcb7e5e84a0e27be"},
+ {file = "wrapt-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:47434236c396d04875180171ee1f3815ca1eada05e24a1ee99546320d54d1d1b"},
+ {file = "wrapt-2.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:837e31620e06b16030b1d126ed78e9383815cbac914693f54926d816d35d8edf"},
+ {file = "wrapt-2.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1fdbb34da15450f2b1d735a0e969c24bdb8d8924892380126e2a293d9902078c"},
+ {file = "wrapt-2.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3d32794fe940b7000f0519904e247f902f0149edbe6316c710a8562fb6738841"},
+ {file = "wrapt-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:386fb54d9cd903ee0012c09291336469eb7b244f7183d40dc3e86a16a4bace62"},
+ {file = "wrapt-2.0.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7b219cb2182f230676308cdcacd428fa837987b89e4b7c5c9025088b8a6c9faf"},
+ {file = "wrapt-2.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:641e94e789b5f6b4822bb8d8ebbdfc10f4e4eae7756d648b717d980f657a9eb9"},
+ {file = "wrapt-2.0.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe21b118b9f58859b5ebaa4b130dee18669df4bd111daad082b7beb8799ad16b"},
+ {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:17fb85fa4abc26a5184d93b3efd2dcc14deb4b09edcdb3535a536ad34f0b4dba"},
+ {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:b89ef9223d665ab255ae42cc282d27d69704d94be0deffc8b9d919179a609684"},
+ {file = "wrapt-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a453257f19c31b31ba593c30d997d6e5be39e3b5ad9148c2af5a7314061c63eb"},
+ {file = "wrapt-2.0.1-cp312-cp312-win32.whl", hash = "sha256:3e271346f01e9c8b1130a6a3b0e11908049fe5be2d365a5f402778049147e7e9"},
+ {file = "wrapt-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:2da620b31a90cdefa9cd0c2b661882329e2e19d1d7b9b920189956b76c564d75"},
+ {file = "wrapt-2.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:aea9c7224c302bc8bfc892b908537f56c430802560e827b75ecbde81b604598b"},
+ {file = "wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9"},
+ {file = "wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f"},
+ {file = "wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218"},
+ {file = "wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9"},
+ {file = "wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c"},
+ {file = "wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db"},
+ {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233"},
+ {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2"},
+ {file = "wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b"},
+ {file = "wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7"},
+ {file = "wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3"},
+ {file = "wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8"},
+ {file = "wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3"},
+ {file = "wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1"},
+ {file = "wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d"},
+ {file = "wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7"},
+ {file = "wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3"},
+ {file = "wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b"},
+ {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10"},
+ {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf"},
+ {file = "wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e"},
+ {file = "wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c"},
+ {file = "wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92"},
+ {file = "wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f"},
+ {file = "wrapt-2.0.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5e53b428f65ece6d9dad23cb87e64506392b720a0b45076c05354d27a13351a1"},
+ {file = "wrapt-2.0.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ad3ee9d0f254851c71780966eb417ef8e72117155cff04821ab9b60549694a55"},
+ {file = "wrapt-2.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d7b822c61ed04ee6ad64bc90d13368ad6eb094db54883b5dde2182f67a7f22c0"},
+ {file = "wrapt-2.0.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7164a55f5e83a9a0b031d3ffab4d4e36bbec42e7025db560f225489fa929e509"},
+ {file = "wrapt-2.0.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e60690ba71a57424c8d9ff28f8d006b7ad7772c22a4af432188572cd7fa004a1"},
+ {file = "wrapt-2.0.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3cd1a4bd9a7a619922a8557e1318232e7269b5fb69d4ba97b04d20450a6bf970"},
+ {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b4c2e3d777e38e913b8ce3a6257af72fb608f86a1df471cb1d4339755d0a807c"},
+ {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3d366aa598d69416b5afedf1faa539fac40c1d80a42f6b236c88c73a3c8f2d41"},
+ {file = "wrapt-2.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c235095d6d090aa903f1db61f892fffb779c1eaeb2a50e566b52001f7a0f66ed"},
+ {file = "wrapt-2.0.1-cp314-cp314-win32.whl", hash = "sha256:bfb5539005259f8127ea9c885bdc231978c06b7a980e63a8a61c8c4c979719d0"},
+ {file = "wrapt-2.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:4ae879acc449caa9ed43fc36ba08392b9412ee67941748d31d94e3cedb36628c"},
+ {file = "wrapt-2.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:8639b843c9efd84675f1e100ed9e99538ebea7297b62c4b45a7042edb84db03e"},
+ {file = "wrapt-2.0.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:9219a1d946a9b32bb23ccae66bdb61e35c62773ce7ca6509ceea70f344656b7b"},
+ {file = "wrapt-2.0.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fa4184e74197af3adad3c889a1af95b53bb0466bced92ea99a0c014e48323eec"},
+ {file = "wrapt-2.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c5ef2f2b8a53b7caee2f797ef166a390fef73979b15778a4a153e4b5fedce8fa"},
+ {file = "wrapt-2.0.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e042d653a4745be832d5aa190ff80ee4f02c34b21f4b785745eceacd0907b815"},
+ {file = "wrapt-2.0.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2afa23318136709c4b23d87d543b425c399887b4057936cd20386d5b1422b6fa"},
+ {file = "wrapt-2.0.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6c72328f668cf4c503ffcf9434c2b71fdd624345ced7941bc6693e61bbe36bef"},
+ {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3793ac154afb0e5b45d1233cb94d354ef7a983708cc3bb12563853b1d8d53747"},
+ {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fec0d993ecba3991645b4857837277469c8cc4c554a7e24d064d1ca291cfb81f"},
+ {file = "wrapt-2.0.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:949520bccc1fa227274da7d03bf238be15389cd94e32e4297b92337df9b7a349"},
+ {file = "wrapt-2.0.1-cp314-cp314t-win32.whl", hash = "sha256:be9e84e91d6497ba62594158d3d31ec0486c60055c49179edc51ee43d095f79c"},
+ {file = "wrapt-2.0.1-cp314-cp314t-win_amd64.whl", hash = "sha256:61c4956171c7434634401db448371277d07032a81cc21c599c22953374781395"},
+ {file = "wrapt-2.0.1-cp314-cp314t-win_arm64.whl", hash = "sha256:35cdbd478607036fee40273be8ed54a451f5f23121bd9d4be515158f9498f7ad"},
+ {file = "wrapt-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:90897ea1cf0679763b62e79657958cd54eae5659f6360fc7d2ccc6f906342183"},
+ {file = "wrapt-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50844efc8cdf63b2d90cd3d62d4947a28311e6266ce5235a219d21b195b4ec2c"},
+ {file = "wrapt-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49989061a9977a8cbd6d20f2efa813f24bf657c6990a42967019ce779a878dbf"},
+ {file = "wrapt-2.0.1-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:09c7476ab884b74dce081ad9bfd07fe5822d8600abade571cb1f66d5fc915af6"},
+ {file = "wrapt-2.0.1-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1a8a09a004ef100e614beec82862d11fc17d601092c3599afd22b1f36e4137e"},
+ {file = "wrapt-2.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:89a82053b193837bf93c0f8a57ded6e4b6d88033a499dadff5067e912c2a41e9"},
+ {file = "wrapt-2.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f26f8e2ca19564e2e1fdbb6a0e47f36e0efbab1acc31e15471fad88f828c75f6"},
+ {file = "wrapt-2.0.1-cp38-cp38-win32.whl", hash = "sha256:115cae4beed3542e37866469a8a1f2b9ec549b4463572b000611e9946b86e6f6"},
+ {file = "wrapt-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c4012a2bd37059d04f8209916aa771dfb564cccb86079072bdcd48a308b6a5c5"},
+ {file = "wrapt-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:68424221a2dc00d634b54f92441914929c5ffb1c30b3b837343978343a3512a3"},
+ {file = "wrapt-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6bd1a18f5a797fe740cb3d7a0e853a8ce6461cc62023b630caec80171a6b8097"},
+ {file = "wrapt-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb3a86e703868561c5cad155a15c36c716e1ab513b7065bd2ac8ed353c503333"},
+ {file = "wrapt-2.0.1-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5dc1b852337c6792aa111ca8becff5bacf576bf4a0255b0f05eb749da6a1643e"},
+ {file = "wrapt-2.0.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c046781d422f0830de6329fa4b16796096f28a92c8aef3850674442cdcb87b7f"},
+ {file = "wrapt-2.0.1-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f73f9f7a0ebd0db139253d27e5fc8d2866ceaeef19c30ab5d69dcbe35e1a6981"},
+ {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b667189cf8efe008f55bbda321890bef628a67ab4147ebf90d182f2dadc78790"},
+ {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:a9a83618c4f0757557c077ef71d708ddd9847ed66b7cc63416632af70d3e2308"},
+ {file = "wrapt-2.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e9b121e9aeb15df416c2c960b8255a49d44b4038016ee17af03975992d03931"},
+ {file = "wrapt-2.0.1-cp39-cp39-win32.whl", hash = "sha256:1f186e26ea0a55f809f232e92cc8556a0977e00183c3ebda039a807a42be1494"},
+ {file = "wrapt-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:bf4cb76f36be5de950ce13e22e7fdf462b35b04665a12b64f3ac5c1bbbcf3728"},
+ {file = "wrapt-2.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:d6cc985b9c8b235bd933990cdbf0f891f8e010b65a3911f7a55179cd7b0fc57b"},
+ {file = "wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca"},
+ {file = "wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f"},
+]
+
+[package.extras]
+dev = ["pytest", "setuptools"]
+
[[package]]
name = "zipp"
version = "3.20.2"
@@ -1709,8 +2035,9 @@ html = ["html5rdf"]
lxml = ["lxml"]
networkx = ["networkx"]
orjson = ["orjson"]
+rdf4j = ["httpx"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8.1"
-content-hash = "5113fb643b174ebedd081933dc427c4cad3fe80266c3d81c6be6291ce2ea2620"
+content-hash = "a9f7643c46c8fa5f969aa0554e147cc9d1ebc19284aabf9ed19991fa96f105c7"
diff --git a/pyproject.toml b/pyproject.toml
index 9ccea14b3..4924a5af5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -49,6 +49,7 @@ networkx = {version = ">=2,<4", optional = true}
html5rdf = {version = ">=1.2,<2", optional = true}
lxml = {version = ">=4.3,<6.0", optional = true}
orjson = {version = ">=3.9.14,<4", optional = true}
+httpx = {version = "^0.28.1", optional = true}
[tool.poetry.group.dev.dependencies]
black = "24.8.0"
@@ -63,6 +64,7 @@ coverage = {version = "^7.0.1", extras = ["toml"]}
types-setuptools = ">=68.0.0.3,<72.0.0.0"
setuptools = ">=68,<72"
wheel = ">=0.42,<0.46"
+testcontainers = {version = "^4.13.2", python = ">=3.9.2"}
[tool.poetry.group.docs.dependencies]
typing-extensions = "^4.11.0"
@@ -85,6 +87,7 @@ html = ["html5rdf"]
# lxml support is optional, it is used only for parsing XML-formatted SPARQL results
lxml = ["lxml"]
orjson = ["orjson"]
+rdf4j = ["httpx"]
[build-system]
requires = ["poetry-core>=1.4.0"]
@@ -207,6 +210,8 @@ addopts = [
"--ignore=admin",
"--ignore=devtools",
"--ignore=rdflib/extras/external_graph_libs.py",
+ "--ignore=rdflib/contrib/graphdb/client.py",
+ "--ignore=rdflib/contrib/rdf4j/client.py",
"--ignore-glob=docs/*.py",
"--ignore-glob=site/*",
"--strict-markers",
@@ -218,6 +223,7 @@ filterwarnings = [
"ignore:Code. _pytestfixturefunction is not defined in namespace .*:UserWarning",
]
markers = [
+ "testcontainer: mark a test that uses testcontainer",
"webtest: mark a test as using the internet",
]
# log_cli = true
diff --git a/rdflib/contrib/__init__.py b/rdflib/contrib/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/rdflib/contrib/graphdb/__init__.py b/rdflib/contrib/graphdb/__init__.py
new file mode 100644
index 000000000..2c112ef55
--- /dev/null
+++ b/rdflib/contrib/graphdb/__init__.py
@@ -0,0 +1,6 @@
+from rdflib.contrib.rdf4j import has_httpx
+
+if has_httpx:
+ from .client import GraphDBClient
+
+ __all__ = ["GraphDBClient"]
diff --git a/rdflib/contrib/graphdb/client.py b/rdflib/contrib/graphdb/client.py
new file mode 100644
index 000000000..10c396020
--- /dev/null
+++ b/rdflib/contrib/graphdb/client.py
@@ -0,0 +1,61 @@
+import httpx
+
+import rdflib.contrib.rdf4j
+from rdflib.contrib.rdf4j import RDF4JClient
+from rdflib.contrib.rdf4j.exceptions import (
+ RepositoryNotFoundError,
+ RepositoryNotHealthyError,
+)
+
+
+class Repository(rdflib.contrib.rdf4j.client.Repository):
+ """GraphDB Repository"""
+
+ def health(self, timeout: int = 5) -> bool:
+ """Repository health check.
+
+ Parameters:
+ timeout: A timeout parameter in seconds. If provided, the endpoint attempts
+ to retrieve the repository within this timeout. If not, the passive
+ check is performed.
+
+ Returns:
+ bool: True if the repository is healthy, otherwise an error is raised.
+
+ Raises:
+ RepositoryNotFoundError: If the repository is not found.
+ RepositoryNotHealthyError: If the repository is not healthy.
+ httpx.RequestError: On network/connection issues.
+ httpx.HTTPStatusError: Unhandled status code error.
+ """
+ try:
+ params = {"passive": str(timeout)}
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}/health", params=params
+ )
+ response.raise_for_status()
+ return True
+ except httpx.HTTPStatusError as err:
+ if err.response.status_code == 404:
+ raise RepositoryNotFoundError(
+ f"Repository {self._identifier} not found."
+ )
+ raise RepositoryNotHealthyError(
+ f"Repository {self._identifier} is not healthy. {err.response.status_code} - {err.response.text}"
+ )
+ except httpx.RequestError:
+ raise
+
+
+class RepositoryManager(rdflib.contrib.rdf4j.client.RepositoryManager):
+ """GraphDB Repository Manager"""
+
+ def get(self, repository_id: str) -> Repository:
+ _repo = super().get(repository_id)
+ return Repository(_repo.identifier, _repo.http_client)
+
+
+class GraphDBClient(RDF4JClient):
+ """GraphDB Client"""
+
+ # TODO: GraphDB specific API methods.
diff --git a/rdflib/contrib/rdf4j/__init__.py b/rdflib/contrib/rdf4j/__init__.py
new file mode 100644
index 000000000..f0a2e8cf9
--- /dev/null
+++ b/rdflib/contrib/rdf4j/__init__.py
@@ -0,0 +1,10 @@
+from importlib.util import find_spec
+
+has_httpx = find_spec("httpx") is not None
+
+if has_httpx:
+ from .client import RDF4JClient
+
+ __all__ = ["RDF4JClient", "has_httpx"]
+else:
+ __all__ = ["has_httpx"]
diff --git a/rdflib/contrib/rdf4j/client.py b/rdflib/contrib/rdf4j/client.py
new file mode 100644
index 000000000..41b97b4e9
--- /dev/null
+++ b/rdflib/contrib/rdf4j/client.py
@@ -0,0 +1,1218 @@
+"""RDF4J client module."""
+
+from __future__ import annotations
+
+import contextlib
+import io
+import typing as t
+from dataclasses import dataclass
+from typing import Any, BinaryIO, Iterable
+
+import httpx
+
+from rdflib import BNode
+from rdflib.contrib.rdf4j.exceptions import (
+ RDF4JUnsupportedProtocolError,
+ RDFLibParserError,
+ RepositoryAlreadyExistsError,
+ RepositoryError,
+ RepositoryFormatError,
+ RepositoryNotFoundError,
+ RepositoryNotHealthyError,
+ TransactionClosedError,
+ TransactionCommitError,
+ TransactionPingError,
+ TransactionRollbackError,
+)
+from rdflib.contrib.rdf4j.util import (
+ build_context_param,
+ build_infer_param,
+ build_sparql_query_accept_header,
+ build_spo_param,
+ rdf_payload_to_stream,
+ validate_graph_name,
+ validate_no_bnodes,
+)
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph
+from rdflib.query import Result
+from rdflib.term import IdentifiedNode, Literal, URIRef
+
+SubjectType = t.Union[URIRef, None]
+PredicateType = t.Union[URIRef, None]
+ObjectType = t.Union[URIRef, Literal, None]
+
+
+@dataclass(frozen=True)
+class NamespaceListingResult:
+ """RDF4J namespace and prefix name result."""
+
+ prefix: str
+ namespace: str
+
+
+class RDF4JNamespaceManager:
+ """A namespace manager for RDF4J repositories.
+
+ Parameters:
+ identifier: The identifier of the repository.
+ http_client: The httpx.Client instance.
+ """
+
+ def __init__(self, identifier: str, http_client: httpx.Client):
+ self._identifier = identifier
+ self._http_client = http_client
+
+ @property
+ def http_client(self):
+ return self._http_client
+
+ @property
+ def identifier(self):
+ """Repository identifier."""
+ return self._identifier
+
+ def list(self) -> list[NamespaceListingResult]:
+ """List all namespace declarations in the repository.
+
+ Returns:
+ list[NamespaceListingResult]: List of namespace and prefix name results.
+
+ Raises:
+ RepositoryFormatError: If the response format is unrecognized.
+ """
+ headers = {
+ "Accept": "application/sparql-results+json",
+ }
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}/namespaces", headers=headers
+ )
+ response.raise_for_status()
+
+ try:
+ data = response.json()
+ results = data["results"]["bindings"]
+ return [
+ NamespaceListingResult(
+ prefix=row["prefix"]["value"],
+ namespace=row["namespace"]["value"],
+ )
+ for row in results
+ ]
+ except (KeyError, ValueError) as err:
+ raise RepositoryFormatError(f"Unrecognised response format: {err}")
+
+ def clear(self):
+ """Clear all namespace declarations in the repository."""
+ headers = {
+ "Accept": "application/sparql-results+json",
+ }
+ response = self.http_client.delete(
+ f"/repositories/{self.identifier}/namespaces", headers=headers
+ )
+ response.raise_for_status()
+
+ def get(self, prefix: str) -> str | None:
+ """Get the namespace URI for a given prefix.
+
+ Parameters:
+ prefix: The prefix to lookup.
+
+ Returns:
+ The namespace URI or `None` if not found.
+ """
+ if not prefix:
+ raise ValueError("Prefix cannot be empty.")
+ headers = {
+ "Accept": "text/plain",
+ }
+ try:
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}/namespaces/{prefix}", headers=headers
+ )
+ response.raise_for_status()
+ return response.text
+ except httpx.HTTPStatusError as err:
+ if err.response.status_code == 404:
+ return None
+ raise
+
+ def set(self, prefix: str, namespace: str):
+ """Set the namespace URI for a given prefix.
+
+ !!! note
+ If the prefix was previously mapped to a different namespace, this will be
+ overwritten.
+
+ Parameters:
+ prefix: The prefix to set.
+ namespace: The namespace URI to set.
+ """
+ if not prefix:
+ raise ValueError("Prefix cannot be empty.")
+ if not namespace:
+ raise ValueError("Namespace cannot be empty.")
+ headers = {
+ "Content-Type": "text/plain",
+ }
+ response = self.http_client.put(
+ f"/repositories/{self.identifier}/namespaces/{prefix}",
+ headers=headers,
+ content=namespace,
+ )
+ response.raise_for_status()
+
+ def remove(self, prefix: str):
+ """Remove the namespace declaration for a given prefix.
+
+ Parameters:
+ prefix: The prefix to remove.
+ """
+ if not prefix:
+ raise ValueError("Prefix cannot be empty.")
+ response = self.http_client.delete(
+ f"/repositories/{self.identifier}/namespaces/{prefix}"
+ )
+ response.raise_for_status()
+
+
+class GraphStoreManager:
+ """An RDF4J Graph Store Protocol Client.
+
+ Parameters:
+ identifier: The identifier of the repository.
+ http_client: The httpx.Client instance.
+ """
+
+ def __init__(self, identifier: str, http_client: httpx.Client):
+ self._identifier = identifier
+ self._http_client = http_client
+ self._content_type = "application/n-triples"
+
+ @property
+ def http_client(self):
+ return self._http_client
+
+ @property
+ def identifier(self):
+ """Repository identifier."""
+ return self._identifier
+
+ @staticmethod
+ def _build_graph_name_params(graph_name: URIRef | str):
+ params = {}
+ if (
+ isinstance(graph_name, URIRef)
+ and graph_name == DATASET_DEFAULT_GRAPH_ID
+ or isinstance(graph_name, str)
+ and graph_name == str(DATASET_DEFAULT_GRAPH_ID)
+ ):
+ # Do nothing; GraphDB does not work with `?default=`
+ # (note the trailing equal character), which is the default
+ # behavior of httpx when setting the param value to an empty string.
+ # httpx completely omits query parameters whose values are `None`, so that's
+ # not an option either.
+ # The workaround is to construct our own query parameter URL when we target
+ # the default graph.
+ pass
+ else:
+ params["graph"] = str(graph_name)
+ return params
+
+ def _build_url(self, graph_name: URIRef | str):
+ url = f"/repositories/{self.identifier}/rdf-graphs/service"
+ if isinstance(graph_name, URIRef) and graph_name == DATASET_DEFAULT_GRAPH_ID:
+ url += "?default"
+ return url
+
+ def get(self, graph_name: URIRef | str) -> Graph:
+ """Fetch all statements in the specified graph.
+
+ Parameters:
+ graph_name: The graph name of the graph.
+
+ For the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ Returns:
+ A [`Graph`][rdflib.graph.Graph] object containing all statements in the
+ graph.
+ """
+ if not graph_name:
+ raise ValueError("Graph name must be provided.")
+ validate_graph_name(graph_name)
+ headers = {
+ "Accept": self._content_type,
+ }
+ params = self._build_graph_name_params(graph_name) or None
+
+ response = self.http_client.get(
+ self._build_url(graph_name),
+ headers=headers,
+ params=params,
+ )
+ response.raise_for_status()
+
+ return Graph(identifier=graph_name).parse(
+ data=response.text, format=self._content_type
+ )
+
+ def add(self, graph_name: URIRef | str, data: str | bytes | BinaryIO | Graph):
+ """Add statements to the specified graph.
+
+ Parameters:
+ graph_name: The graph name of the graph.
+
+ For the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ data: The RDF data to add.
+ """
+ if not graph_name:
+ raise ValueError("Graph name must be provided.")
+ validate_graph_name(graph_name)
+ stream, should_close = rdf_payload_to_stream(data)
+ headers = {
+ "Content-Type": self._content_type,
+ }
+ params = self._build_graph_name_params(graph_name) or None
+ try:
+ response = self.http_client.post(
+ self._build_url(graph_name),
+ headers=headers,
+ params=params,
+ content=stream,
+ )
+ response.raise_for_status()
+ finally:
+ if should_close:
+ stream.close()
+
+ def overwrite(self, graph_name: URIRef | str, data: str | bytes | BinaryIO | Graph):
+ """Overwrite statements in the specified graph.
+
+ Parameters:
+ graph_name: The graph name of the graph.
+
+ For the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ data: The RDF data to overwrite with.
+ """
+ if not graph_name:
+ raise ValueError("Graph name must be provided.")
+ validate_graph_name(graph_name)
+ stream, should_close = rdf_payload_to_stream(data)
+ headers = {
+ "Content-Type": self._content_type,
+ }
+ params = self._build_graph_name_params(graph_name) or None
+ try:
+ response = self.http_client.put(
+ self._build_url(graph_name),
+ headers=headers,
+ params=params,
+ content=stream,
+ )
+ response.raise_for_status()
+ finally:
+ if should_close:
+ stream.close()
+
+ def clear(self, graph_name: URIRef | str):
+ """Clear all statements in the specified graph.
+
+ Parameters:
+ graph_name: The graph name of the graph.
+
+ For the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+ """
+ if not graph_name:
+ raise ValueError("Graph name must be provided.")
+ validate_graph_name(graph_name)
+ params = self._build_graph_name_params(graph_name) or None
+ response = self.http_client.delete(self._build_url(graph_name), params=params)
+ response.raise_for_status()
+
+
+@dataclass(frozen=True)
+class RepositoryListingResult:
+ """RDF4J repository listing result.
+
+ Parameters:
+ identifier: Repository identifier.
+ uri: Repository URI.
+ readable: Whether the repository is readable by the client.
+ writable: Whether the repository is writable by the client.
+ title: Repository title.
+ """
+
+ identifier: str
+ uri: str
+ readable: bool
+ writable: bool
+ title: str | None = None
+
+
+class Repository:
+ """RDF4J repository client.
+
+ Parameters:
+ identifier: The identifier of the repository.
+ http_client: The httpx.Client instance.
+ """
+
+ def __init__(self, identifier: str, http_client: httpx.Client):
+ self._identifier = identifier
+ self._http_client = http_client
+ self._namespace_manager: RDF4JNamespaceManager | None = None
+ self._graph_store_manager: GraphStoreManager | None = None
+
+ @property
+ def http_client(self):
+ return self._http_client
+
+ @property
+ def identifier(self):
+ """Repository identifier."""
+ return self._identifier
+
+ @property
+ def namespaces(self) -> RDF4JNamespaceManager:
+ """Namespace manager for the repository."""
+ if self._namespace_manager is None:
+ self._namespace_manager = RDF4JNamespaceManager(
+ self.identifier, self.http_client
+ )
+ return self._namespace_manager
+
+ @property
+ def graphs(self) -> GraphStoreManager:
+ """Graph store manager for the repository."""
+ if self._graph_store_manager is None:
+ self._graph_store_manager = GraphStoreManager(
+ self.identifier, self.http_client
+ )
+ return self._graph_store_manager
+
+ def health(self) -> bool:
+ """Repository health check.
+
+ Returns:
+ bool: True if the repository is healthy, otherwise an error is raised.
+
+ Raises:
+ RepositoryNotFoundError: If the repository is not found.
+ RepositoryNotHealthyError: If the repository is not healthy.
+ """
+ headers = {
+ "Content-Type": "application/sparql-query",
+ "Accept": "application/sparql-results+json",
+ }
+ try:
+ response = self.http_client.post(
+ f"/repositories/{self._identifier}", headers=headers, content="ASK {}"
+ )
+ response.raise_for_status()
+ return True
+ except httpx.HTTPStatusError as err:
+ if err.response.status_code == 404:
+ raise RepositoryNotFoundError(
+ f"Repository {self._identifier} not found."
+ )
+ raise RepositoryNotHealthyError(
+ f"Repository {self._identifier} is not healthy. {err.response.status_code} - {err.response.text}"
+ )
+
+ def size(self, graph_name: URIRef | Iterable[URIRef] | str | None = None) -> int:
+ """The number of statements in the repository or in the specified graph name.
+
+ Parameters:
+ graph_name: Graph name(s) to restrict to.
+
+ The default value `None` queries all graphs.
+
+ To query just the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ Returns:
+ The number of statements.
+
+ Raises:
+ RepositoryFormatError: Fails to parse the repository size.
+ """
+ validate_graph_name(graph_name)
+ params: dict[str, str] = {}
+ build_context_param(params, graph_name)
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}/size", params=params
+ )
+ response.raise_for_status()
+ return self._to_size(response.text)
+
+ @staticmethod
+ def _to_size(size: str):
+ try:
+ value = int(size)
+ if value >= 0:
+ return value
+ raise ValueError(f"Invalid repository size: {value}")
+ except ValueError as err:
+ raise RepositoryFormatError(
+ f"Failed to parse repository size: {err}"
+ ) from err
+
+ def query(self, query: str, **kwargs):
+ """Execute a SPARQL query against the repository.
+
+ !!! note
+ A POST request is used by default. If any keyword arguments are provided,
+ a GET request is used instead, and the arguments are passed as query parameters.
+
+ Parameters:
+ query: The SPARQL query to execute.
+ **kwargs: Additional keyword arguments to include as query parameters
+ in the request. See
+ [RDF4J REST API - Execute SPARQL query](https://rdf4j.org/documentation/reference/rest-api/#tag/SPARQL/paths/~1repositories~1%7BrepositoryID%7D/get)
+ for the list of supported query parameters.
+ """
+ headers = {"Content-Type": "application/sparql-query"}
+ build_sparql_query_accept_header(query, headers)
+
+ if not kwargs:
+ response = self.http_client.post(
+ f"/repositories/{self.identifier}", headers=headers, content=query
+ )
+ else:
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}",
+ headers=headers,
+ params={"query": query, **kwargs},
+ )
+ response.raise_for_status()
+ try:
+ return Result.parse(
+ io.BytesIO(response.content),
+ content_type=response.headers["Content-Type"].split(";")[0],
+ )
+ except KeyError as err:
+ raise RDFLibParserError(
+ f"Failed to parse SPARQL query result {response.headers.get('Content-Type')}: {err}"
+ ) from err
+
+ def update(self, query: str):
+ """Execute a SPARQL update operation on the repository.
+
+ Parameters:
+ query: The SPARQL update query to execute.
+ """
+ headers = {"Content-Type": "application/sparql-update"}
+ response = self.http_client.post(
+ f"/repositories/{self.identifier}/statements",
+ headers=headers,
+ content=query,
+ )
+ response.raise_for_status()
+
+ def graph_names(self) -> list[IdentifiedNode]:
+ """Get a list of all graph names in the repository.
+
+ Returns:
+ A list of graph names.
+
+ Raises:
+ RepositoryFormatError: Fails to parse the repository graph names.
+ """
+ headers = {
+ "Accept": "application/sparql-results+json",
+ }
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}/contexts", headers=headers
+ )
+ response.raise_for_status()
+ try:
+ values: list[IdentifiedNode] = []
+ for row in response.json()["results"]["bindings"]:
+ value = row["contextID"]["value"]
+ value_type = row["contextID"]["type"]
+ if value_type == "uri":
+ values.append(URIRef(value))
+ elif value_type == "bnode":
+ values.append(BNode(value))
+ else:
+ raise ValueError(f"Invalid graph name type: {value_type}")
+ return values
+ except Exception as err:
+ raise RepositoryFormatError(
+ f"Failed to parse repository graph names: {err}"
+ ) from err
+
+ def get(
+ self,
+ subj: SubjectType = None,
+ pred: PredicateType = None,
+ obj: ObjectType = None,
+ graph_name: URIRef | Iterable[URIRef] | str | None = None,
+ infer: bool = True,
+ content_type: str | None = None,
+ ) -> Graph | Dataset:
+ """Get RDF statements from the repository matching the filtering parameters.
+
+ !!! Note
+ The terms for `subj`, `pred`, `obj` or `graph_name` cannot be
+ [`BNodes`][rdflib.term.BNode].
+
+ Parameters:
+ subj: Subject of the statement to filter by, or `None` to match all.
+ pred: Predicate of the statement to filter by, or `None` to match all.
+ obj: Object of the statement to filter by, or `None` to match all.
+ graph_name: Graph name(s) to restrict to.
+
+ The default value `None` queries all graphs.
+
+ To query just the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ infer: Specifies whether inferred statements should be included in the
+ result.
+ content_type: The content type of the response.
+ A triple-based format returns a [Graph][rdflib.graph.Graph], while a
+ quad-based format returns a [`Dataset`][rdflib.graph.Dataset].
+
+ Returns:
+ A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object
+ with the repository namespace prefixes bound to it.
+ """
+ validate_no_bnodes(subj, pred, obj, graph_name)
+ if content_type is None:
+ content_type = "application/n-quads"
+ headers = {"Accept": content_type}
+ params: dict[str, str] = {}
+ build_context_param(params, graph_name)
+ build_spo_param(params, subj, pred, obj)
+ build_infer_param(params, infer=infer)
+
+ response = self.http_client.get(
+ f"/repositories/{self.identifier}/statements",
+ headers=headers,
+ params=params,
+ )
+ response.raise_for_status()
+ triple_formats = [
+ "application/n-triples",
+ "text/turtle",
+ "application/rdf+xml",
+ ]
+ try:
+ if content_type in triple_formats:
+ retval = Graph().parse(data=response.text, format=content_type)
+ else:
+ retval = Dataset().parse(data=response.text, format=content_type)
+ for result in self.namespaces.list():
+ retval.bind(result.prefix, result.namespace, replace=True)
+ return retval
+ except Exception as err:
+ raise RDFLibParserError(f"Error parsing RDF: {err}") from err
+
+ def upload(
+ self,
+ data: str | bytes | BinaryIO | Graph | Dataset,
+ base_uri: str | None = None,
+ content_type: str | None = None,
+ ):
+ """Upload and append statements to the repository.
+
+ Parameters:
+ data: The RDF data to upload.
+ base_uri: The base URI to resolve against for any relative URIs in the data.
+ content_type: The content type of the data. Defaults to
+ `application/n-quads` when the value is `None`.
+ """
+ stream, should_close = rdf_payload_to_stream(data)
+ try:
+ headers = {"Content-Type": content_type or "application/n-quads"}
+ params = {}
+ if base_uri is not None:
+ params["baseURI"] = base_uri
+ response = self.http_client.post(
+ f"/repositories/{self.identifier}/statements",
+ headers=headers,
+ params=params,
+ content=stream,
+ )
+ response.raise_for_status()
+ finally:
+ if should_close:
+ stream.close()
+
+ def overwrite(
+ self,
+ data: str | bytes | BinaryIO | Graph | Dataset,
+ graph_name: URIRef | Iterable[URIRef] | str | None = None,
+ base_uri: str | None = None,
+ content_type: str | None = None,
+ ):
+ """Upload and overwrite statements in the repository.
+
+ Parameters:
+ data: The RDF data to upload.
+ graph_name: Graph name(s) to restrict to.
+
+ The default value `None` applies to all graphs.
+
+ To apply to just the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ base_uri: The base URI to resolve against for any relative URIs in the data.
+ content_type: The content type of the data. Defaults to
+ `application/n-quads` when the value is `None`.
+ """
+ stream, should_close = rdf_payload_to_stream(data)
+ validate_graph_name(graph_name)
+ try:
+ headers = {"Content-Type": content_type or "application/n-quads"}
+ params: dict[str, str] = {}
+ build_context_param(params, graph_name)
+ if base_uri is not None:
+ params["baseURI"] = base_uri
+ response = self.http_client.put(
+ f"/repositories/{self.identifier}/statements",
+ headers=headers,
+ params=params,
+ content=stream,
+ )
+ response.raise_for_status()
+ finally:
+ if should_close:
+ stream.close()
+
+ def delete(
+ self,
+ subj: SubjectType = None,
+ pred: PredicateType = None,
+ obj: ObjectType = None,
+ graph_name: URIRef | Iterable[URIRef] | str | None = None,
+ ) -> None:
+ """Deletes statements from the repository matching the filtering parameters.
+
+ !!! Note
+ The terms for `subj`, `pred`, `obj` or `graph_name` cannot be
+ [`BNodes`][rdflib.term.BNode].
+
+ Parameters:
+ subj: Subject of the statement to filter by, or `None` to match all.
+ pred: Predicate of the statement to filter by, or `None` to match all.
+ obj: Object of the statement to filter by, or `None` to match all.
+ graph_name: Graph name(s) to restrict to.
+
+ The default value `None` queries all graphs.
+
+ To query just the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+ """
+ validate_no_bnodes(subj, pred, obj, graph_name)
+ params: dict[str, str] = {}
+ build_context_param(params, graph_name)
+ build_spo_param(params, subj, pred, obj)
+
+ response = self.http_client.delete(
+ f"/repositories/{self.identifier}/statements",
+ params=params,
+ )
+ response.raise_for_status()
+
+ @contextlib.contextmanager
+ def transaction(self):
+ """Create a new transaction for the repository."""
+ with Transaction(self) as txn:
+ yield txn
+
+
+class Transaction:
+ """An RDF4J transaction.
+
+ Parameters:
+ repo: The repository instance.
+ """
+
+ def __init__(self, repo: Repository):
+ self._repo = repo
+ self._url: str | None = None
+
+ def __enter__(self):
+ self._url = self._start_transaction()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self.is_closed:
+ if exc_type is None:
+ self.commit()
+ else:
+ try:
+ self.rollback()
+ except Exception:
+ pass
+
+ # Propagate errors.
+ return False
+
+ @property
+ def repo(self):
+ """The repository instance."""
+ return self._repo
+
+ @property
+ def url(self):
+ """The transaction URL."""
+ return self._url
+
+ @property
+ def is_closed(self) -> bool:
+ """Whether the transaction is closed."""
+ return self._url is None
+
+ def _raise_for_closed(self):
+ if self.is_closed:
+ raise TransactionClosedError("The transaction has been closed.")
+
+ def _start_transaction(self) -> str:
+ response = self.repo.http_client.post(
+ f"/repositories/{self.repo.identifier}/transactions"
+ )
+ response.raise_for_status()
+ return response.headers["Location"]
+
+ def _close_transaction(self):
+ self._url = None
+
+ def open(self):
+ """Opens a transaction."""
+ self._url = self._start_transaction()
+
+ def commit(self):
+ """Commit the transaction.
+
+ Raises:
+ TransactionCommitError: If the transaction commit fails.
+ TransactionClosedError: If the transaction is closed.
+ """
+ self._raise_for_closed()
+ params = {"action": "COMMIT"}
+ response = self.repo.http_client.put(self.url, params=params)
+ if response.status_code != 200:
+ raise TransactionCommitError(
+ f"Transaction commit failed: {response.status_code} - {response.text}"
+ )
+ self._close_transaction()
+
+ def rollback(self):
+ """Roll back the transaction."""
+ response = self.repo.http_client.delete(self.url)
+ if response.status_code != 204:
+ raise TransactionRollbackError(
+ f"Transaction rollback failed: {response.status_code} - {response.text}"
+ )
+ self._close_transaction()
+
+ def ping(self):
+ """Ping the transaction.
+
+ Raises:
+ RepositoryTransactionPingError: If the transaction ping fails.
+ TransactionClosedError: If the transaction is closed.
+ """
+ self._raise_for_closed()
+ params = {"action": "PING"}
+ response = self.repo.http_client.put(self.url, params=params)
+ if response.status_code != 200:
+ raise TransactionPingError(
+ f"Transaction ping failed: {response.status_code} - {response.text}"
+ )
+
+ def size(self, graph_name: URIRef | Iterable[URIRef] | str | None = None):
+ """The number of statements in the repository or in the specified graph name.
+
+ Parameters:
+ graph_name: Graph name(s) to restrict to.
+
+ The default value `None` queries all graphs.
+
+ To query just the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ Returns:
+ The number of statements.
+
+ Raises:
+ RepositoryFormatError: Fails to parse the repository size.
+ """
+ self._raise_for_closed()
+ validate_graph_name(graph_name)
+ params = {"action": "SIZE"}
+ build_context_param(params, graph_name)
+ response = self.repo.http_client.put(self.url, params=params)
+ response.raise_for_status()
+ return self.repo._to_size(response.text)
+
+ def query(self, query: str, **kwargs):
+ """Execute a SPARQL query against the repository.
+
+ Parameters:
+ query: The SPARQL query to execute.
+ **kwargs: Additional keyword arguments to include as query parameters
+ in the request. See
+ [RDF4J REST API - Execute SPARQL query](https://rdf4j.org/documentation/reference/rest-api/#tag/SPARQL/paths/~1repositories~1%7BrepositoryID%7D/get)
+ for the list of supported query parameters.
+ """
+ headers: dict[str, str] = {}
+ build_sparql_query_accept_header(query, headers)
+ params = {"action": "QUERY", "query": query}
+ response = self.repo.http_client.put(
+ self.url, headers=headers, params={**params, **kwargs}
+ )
+ response.raise_for_status()
+ try:
+ return Result.parse(
+ io.BytesIO(response.content),
+ content_type=response.headers["Content-Type"].split(";")[0],
+ )
+ except KeyError as err:
+ raise RDFLibParserError(
+ f"Failed to parse SPARQL query result {response.headers.get('Content-Type')}: {err}"
+ ) from err
+
+ def update(self, query: str, **kwargs):
+ """Execute a SPARQL update operation on the repository.
+
+ Parameters:
+ query: The SPARQL update query to execute.
+ **kwargs: Additional keyword arguments to include as query parameters
+ See [RDF4J REST API - Execute a transaction action](https://rdf4j.org/documentation/reference/rest-api/#tag/Transactions/paths/~1repositories~1%7BrepositoryID%7D~1transactions~1%7BtransactionID%7D/put)
+ for the list of supported query parameters.
+ """
+ params = {"action": "UPDATE", "update": query}
+ response = self.repo.http_client.put(
+ self.url,
+ params={**params, **kwargs},
+ )
+ response.raise_for_status()
+
+ def upload(
+ self,
+ data: str | bytes | BinaryIO | Graph | Dataset,
+ base_uri: str | None = None,
+ content_type: str | None = None,
+ ):
+ """Upload and append statements to the repository.
+
+ Parameters:
+ data: The RDF data to upload.
+ base_uri: The base URI to resolve against for any relative URIs in the data.
+ content_type: The content type of the data. Defaults to
+ `application/n-quads` when the value is `None`.
+ """
+ stream, should_close = rdf_payload_to_stream(data)
+ headers = {"Content-Type": content_type or "application/n-quads"}
+ params = {"action": "ADD"}
+ if base_uri is not None:
+ params["baseURI"] = base_uri
+ try:
+ response = self.repo.http_client.put(
+ self.url,
+ headers=headers,
+ params=params,
+ content=stream,
+ )
+ response.raise_for_status()
+ finally:
+ if should_close:
+ stream.close()
+
+ def get(
+ self,
+ subj: SubjectType = None,
+ pred: PredicateType = None,
+ obj: ObjectType = None,
+ graph_name: URIRef | Iterable[URIRef] | str | None = None,
+ infer: bool = True,
+ content_type: str | None = None,
+ ) -> Graph | Dataset:
+ """Get RDF statements from the repository matching the filtering parameters.
+
+ !!! Note
+ The terms for `subj`, `pred`, `obj` or `graph_name` cannot be
+ [`BNodes`][rdflib.term.BNode].
+
+ Parameters:
+ subj: Subject of the statement to filter by, or `None` to match all.
+ pred: Predicate of the statement to filter by, or `None` to match all.
+ obj: Object of the statement to filter by, or `None` to match all.
+ graph_name: Graph name(s) to restrict to.
+
+ The default value `None` queries all graphs.
+
+ To query just the default graph, use
+ [`DATASET_DEFAULT_GRAPH_ID`][rdflib.graph.DATASET_DEFAULT_GRAPH_ID].
+
+ infer: Specifies whether inferred statements should be included in the
+ result.
+ content_type: The content type of the response.
+ A triple-based format returns a [Graph][rdflib.graph.Graph], while a
+ quad-based format returns a [`Dataset`][rdflib.graph.Dataset].
+
+ Returns:
+ A [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset] object
+ with the repository namespace prefixes bound to it.
+ """
+ validate_no_bnodes(subj, pred, obj, graph_name)
+ if content_type is None:
+ content_type = "application/n-quads"
+ headers = {"Accept": content_type}
+ params: dict[str, str] = {"action": "GET"}
+ build_context_param(params, graph_name)
+ build_spo_param(params, subj, pred, obj)
+ build_infer_param(params, infer=infer)
+
+ response = self.repo.http_client.put(
+ self.url,
+ headers=headers,
+ params=params,
+ )
+ response.raise_for_status()
+ triple_formats = [
+ "application/n-triples",
+ "text/turtle",
+ "application/rdf+xml",
+ ]
+ try:
+ if content_type in triple_formats:
+ retval = Graph().parse(data=response.text, format=content_type)
+ else:
+ retval = Dataset().parse(data=response.text, format=content_type)
+ for result in self.repo.namespaces.list():
+ retval.bind(result.prefix, result.namespace, replace=True)
+ return retval
+ except Exception as err:
+ raise RDFLibParserError(f"Error parsing RDF: {err}") from err
+
+ def delete(
+ self,
+ data: str | bytes | BinaryIO | Graph | Dataset,
+ base_uri: str | None = None,
+ content_type: str | None = None,
+ ) -> None:
+ """Delete statements from the repository.
+
+ !!! Note
+ This function operates differently to [`Repository.delete`][] as it does
+ not use filter parameters. Instead, it expects a data payload.
+ See the notes from [graphdb.js#Deleting](https://github.com/Ontotext-AD/graphdb.js?tab=readme-ov-file#deleting-1)
+ for more information.
+
+ Parameters:
+ data: The RDF data to upload.
+ base_uri: The base URI to resolve against for any relative URIs in the data.
+ content_type: The content type of the data. Defaults to
+ `application/n-quads` when the value is `None`.
+ """
+ params: dict[str, str] = {"action": "DELETE"}
+ stream, should_close = rdf_payload_to_stream(data)
+ headers = {"Content-Type": content_type or "application/n-quads"}
+ if base_uri is not None:
+ params["baseURI"] = base_uri
+ try:
+ response = self.repo.http_client.put(
+ self.url,
+ headers=headers,
+ params=params,
+ content=stream,
+ )
+ response.raise_for_status()
+ finally:
+ if should_close:
+ stream.close()
+
+
+class RepositoryManager:
+ """A client to manage server-level repository operations.
+
+ Parameters:
+ http_client: The httpx.Client instance.
+ """
+
+ def __init__(self, http_client: httpx.Client):
+ self._http_client = http_client
+
+ @property
+ def http_client(self):
+ return self._http_client
+
+ def list(self) -> list[RepositoryListingResult]:
+ """List all available repositories.
+
+ Returns:
+ list[RepositoryListingResult]: List of repository results.
+
+ Raises:
+ RepositoryFormatError: If the response format is unrecognized.
+ """
+ headers = {
+ "Accept": "application/sparql-results+json",
+ }
+ response = self.http_client.get("/repositories", headers=headers)
+ response.raise_for_status()
+
+ try:
+ data = response.json()
+ results = data["results"]["bindings"]
+ return [
+ RepositoryListingResult(
+ identifier=repo["id"]["value"],
+ uri=repo["uri"]["value"],
+ readable=repo["readable"]["value"],
+ writable=repo["writable"]["value"],
+ title=repo.get("title", {}).get("value"),
+ )
+ for repo in results
+ ]
+ except (KeyError, ValueError) as err:
+ raise RepositoryFormatError(f"Unrecognised response format: {err}")
+
+ def get(self, repository_id: str) -> Repository:
+ """Get a repository by ID.
+
+ !!! Note
+ This performs a health check before returning the repository object.
+
+ Parameters:
+ repository_id: The identifier of the repository.
+
+ Returns:
+ Repository: The repository instance.
+
+ Raises:
+ RepositoryNotFoundError: If the repository is not found.
+ RepositoryNotHealthyError: If the repository is not healthy.
+ """
+ repo = Repository(repository_id, self.http_client)
+ repo.health()
+ return repo
+
+ def create(
+ self, repository_id: str, data: str, content_type: str = "text/turtle"
+ ) -> Repository:
+ """Create a new repository.
+
+ Parameters:
+ repository_id: The identifier of the repository.
+ data: The repository configuration in RDF.
+ content_type: The repository configuration content type.
+
+ Raises:
+ RepositoryAlreadyExistsError: If the repository already exists.
+ RepositoryNotHealthyError: If the repository is not healthy.
+ """
+ try:
+ headers = {"Content-Type": content_type}
+ response = self.http_client.put(
+ f"/repositories/{repository_id}", headers=headers, content=data
+ )
+ response.raise_for_status()
+ return self.get(repository_id)
+ except httpx.HTTPStatusError as err:
+ if err.response.status_code == 409:
+ raise RepositoryAlreadyExistsError(
+ f"Repository {repository_id} already exists."
+ )
+ raise
+
+ def delete(self, repository_id: str) -> None:
+ """Delete a repository.
+
+ Parameters:
+ repository_id: The identifier of the repository.
+
+ Raises:
+ RepositoryNotFoundError: If the repository is not found.
+ RepositoryError: If the repository is not deleted successfully.
+ """
+ try:
+ response = self.http_client.delete(f"/repositories/{repository_id}")
+ response.raise_for_status()
+ if response.status_code != 204:
+ raise RepositoryError(
+ f"Unexpected response status code when deleting repository {repository_id}: {response.status_code} - {response.text.strip()}"
+ )
+ except httpx.HTTPStatusError as err:
+ if err.response.status_code == 404:
+ raise RepositoryNotFoundError(f"Repository {repository_id} not found.")
+ raise
+
+
+class RDF4JClient:
+ """RDF4J client.
+
+ Parameters:
+ base_url: The base URL of the RDF4J server.
+ auth: Authentication tuple (username, password).
+ timeout: Request timeout in seconds (default: 30.0).
+ kwargs: Additional keyword arguments to pass to the httpx.Client.
+ """
+
+ def __init__(
+ self,
+ base_url: str,
+ auth: tuple[str, str] | None = None,
+ timeout: float = 30.0,
+ **kwargs: Any,
+ ):
+ if not base_url.endswith("/"):
+ base_url += "/"
+ self._http_client = httpx.Client(
+ base_url=base_url, auth=auth, timeout=timeout, **kwargs
+ )
+ self._repository_manager: RepositoryManager | None = None
+ try:
+ protocol_version = self.protocol
+ except httpx.RequestError as err:
+ self.close()
+ raise RDF4JUnsupportedProtocolError(
+ f"Failed to check protocol version: {err}"
+ ) from err
+ if protocol_version < 12:
+ self.close()
+ raise RDF4JUnsupportedProtocolError(
+ f"RDF4J server protocol version {protocol_version} is not supported. Minimum required version is 12."
+ )
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+
+ @property
+ def http_client(self):
+ return self._http_client
+
+ @property
+ def repositories(self) -> RepositoryManager:
+ """Server-level repository management operations."""
+ if self._repository_manager is None:
+ self._repository_manager = RepositoryManager(self.http_client)
+ return self._repository_manager
+
+ @property
+ def protocol(self) -> float:
+ """The RDF4J REST API protocol version.
+
+ Returns:
+ The protocol version number.
+ """
+ response = self.http_client.get("/protocol", headers={"Accept": "text/plain"})
+ response.raise_for_status()
+ return float(response.text.strip())
+
+ def close(self):
+ """Close the underlying httpx.Client."""
+ self.http_client.close()
diff --git a/rdflib/contrib/rdf4j/exceptions.py b/rdflib/contrib/rdf4j/exceptions.py
new file mode 100644
index 000000000..273ae5e6d
--- /dev/null
+++ b/rdflib/contrib/rdf4j/exceptions.py
@@ -0,0 +1,49 @@
+"""RDF4J exceptions."""
+
+
+class RepositoryError(Exception):
+ """Raised when interactions on a repository result in an error."""
+
+
+class RepositoryFormatError(RepositoryError):
+ """Raised when the repository format is invalid."""
+
+
+class RepositoryNotFoundError(RepositoryError):
+ """Raised when the repository is not found."""
+
+
+class RepositoryNotHealthyError(RepositoryError):
+ """Raised when the repository is not healthy."""
+
+
+class RepositoryAlreadyExistsError(RepositoryError):
+ """Raised when the repository already exists."""
+
+
+class RDF4JUnsupportedProtocolError(Exception):
+ """Raised when the server does not support the protocol version."""
+
+
+class RDFLibParserError(Exception):
+ """Raised when there is an error parsing the RDF document."""
+
+
+class RepositoryTransactionError(Exception):
+ """Raised when there is an error with the transaction."""
+
+
+class TransactionClosedError(RepositoryTransactionError):
+ """Raised when the transaction has been closed."""
+
+
+class TransactionPingError(RepositoryTransactionError):
+ """Raised when there is an error pinging the transaction."""
+
+
+class TransactionCommitError(RepositoryTransactionError):
+ """Raised when there is an error committing the transaction."""
+
+
+class TransactionRollbackError(RepositoryTransactionError):
+ """Raised when there is an error rolling back the transaction."""
diff --git a/rdflib/contrib/rdf4j/util.py b/rdflib/contrib/rdf4j/util.py
new file mode 100644
index 000000000..9d99a8007
--- /dev/null
+++ b/rdflib/contrib/rdf4j/util.py
@@ -0,0 +1,182 @@
+"""RDF4J utility functions."""
+
+from __future__ import annotations
+
+import io
+import typing as t
+
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph
+from rdflib.plugins.sparql.processor import prepareQuery
+from rdflib.term import BNode, IdentifiedNode, URIRef
+
+if t.TYPE_CHECKING:
+ from rdflib.contrib.rdf4j.client import ObjectType, PredicateType, SubjectType
+
+
+def build_context_param(
+ params: dict[str, str],
+ graph_name: IdentifiedNode | t.Iterable[IdentifiedNode] | str | None = None,
+) -> None:
+ """Build the RDF4J http context query parameters dictionary.
+
+ !!! Note
+ This mutates the params dictionary key `context`.
+
+ Parameters:
+ params: The `httpx.Request` parameter dictionary.
+ graph_name: The graph name or iterable of graph names.
+
+ This is the `context` query parameter value.
+ """
+ if graph_name is not None and isinstance(graph_name, IdentifiedNode):
+ if graph_name == DATASET_DEFAULT_GRAPH_ID:
+ # Special RDF4J null value for context-less statements.
+ params["context"] = "null"
+ else:
+ params["context"] = graph_name.n3()
+ elif graph_name is not None and isinstance(graph_name, str):
+ params["context"] = URIRef(graph_name).n3()
+ elif graph_name is not None and isinstance(graph_name, t.Iterable):
+ # type error: "str" has no attribute "n3"
+ graph_names = ",".join([x.n3() for x in graph_name]) # type: ignore[attr-defined]
+ params["context"] = graph_names
+
+
+def build_spo_param(
+ params: dict[str, str],
+ subj: SubjectType = None,
+ pred: PredicateType = None,
+ obj: ObjectType = None,
+) -> None:
+ """Build the RDF4J http subj, predicate, and object query parameters dictionary.
+
+ !!! Note
+ This mutates the params dictionary key `subj`, `pred`, and `obj`.
+
+ Parameters:
+ params: The `httpx.Request` parameter dictionary.
+ subj: The `subj` query parameter value.
+ pred: The `pred` query parameter value.
+ obj: The `obj` query parameter value.
+ """
+ if subj is not None:
+ params["subj"] = subj.n3()
+ if pred is not None:
+ params["pred"] = pred.n3()
+ if obj is not None:
+ params["obj"] = obj.n3()
+
+
+def build_infer_param(
+ params: dict[str, str],
+ infer: bool = True,
+) -> None:
+ """Build the RDF4J http infer query parameters dictionary.
+
+ !!! Note
+ This mutates the params dictionary key `infer`.
+
+ Parameters:
+ params: The `httpx.Request` parameter dictionary.
+ infer: The `infer` query parameter value.
+ """
+ if not infer:
+ params["infer"] = "false"
+
+
+def rdf_payload_to_stream(
+ data: str | bytes | t.BinaryIO | Graph | Dataset,
+) -> tuple[t.BinaryIO, bool]:
+ """Convert an RDF payload into a file-like object.
+
+ Parameters:
+ data: The RDF payload.
+
+ This can be a python `str`, `bytes`, `BinaryIO`, or a
+ [`Graph`][rdflib.graph.Graph] or [`Dataset`][rdflib.graph.Dataset].
+
+ Returns:
+ A tuple containing the file-like object and a boolean indicating whether the
+ immediate caller should close the stream.
+ """
+ stream: t.BinaryIO
+ if isinstance(data, str):
+ # Check if it looks like a file path. Assumes file path length is less than 260.
+ if "\n" not in data and len(data) < 260:
+ try:
+ stream = open(data, "rb")
+ should_close = True
+ except (FileNotFoundError, OSError):
+ # Treat as raw string content
+ stream = io.BytesIO(data.encode("utf-8"))
+ should_close = False
+ else:
+ # Treat as raw string content
+ stream = io.BytesIO(data.encode("utf-8"))
+ should_close = False
+ elif isinstance(data, bytes):
+ stream = io.BytesIO(data)
+ should_close = False
+ elif isinstance(data, (Graph, Dataset)):
+ if data.context_aware:
+ stream = io.BytesIO(
+ data.serialize(format="application/n-quads", encoding="utf-8")
+ )
+ else:
+ stream = io.BytesIO(
+ data.serialize(format="application/n-triples", encoding="utf-8")
+ )
+ should_close = True
+ else:
+ # Assume it's already a file-like object
+ stream = data
+ should_close = False
+
+ return stream, should_close
+
+
+def build_sparql_query_accept_header(query: str, headers: dict[str, str]):
+ """Build the SPARQL query accept header.
+
+ !!! Note
+ This mutates the headers dictionary key `Accept`.
+
+ Parameters:
+ query: The SPARQL query.
+ """
+ prepared_query = prepareQuery(query)
+ if prepared_query.algebra.name in ("SelectQuery", "AskQuery"):
+ headers["Accept"] = "application/sparql-results+json"
+ elif prepared_query.algebra.name in ("ConstructQuery", "DescribeQuery"):
+ headers["Accept"] = "application/n-triples"
+ else:
+ raise ValueError(f"Unsupported query type: {prepared_query.algebra.name}")
+
+
+def validate_graph_name(graph_name: URIRef | t.Iterable[URIRef] | str | None):
+ if (
+ isinstance(graph_name, BNode)
+ or isinstance(graph_name, t.Iterable)
+ and any(isinstance(x, BNode) for x in graph_name)
+ ):
+ raise ValueError("Graph name must not be a BNode.")
+
+
+def validate_no_bnodes(
+ subj: SubjectType,
+ pred: PredicateType,
+ obj: ObjectType,
+ graph_name: URIRef | t.Iterable[URIRef] | str | None,
+) -> None:
+ """Validate that the subject, predicate, and object are not BNodes."""
+ if (
+ isinstance(subj, BNode)
+ or isinstance(pred, BNode)
+ or isinstance(obj, BNode)
+ or isinstance(graph_name, BNode)
+ ):
+ raise ValueError(
+ "Subject, predicate, and object must not be a BNode: "
+ f"{subj}, {pred}, {obj}"
+ )
+ validate_graph_name(graph_name)
diff --git a/rdflib/plugins/stores/rdf4j.py b/rdflib/plugins/stores/rdf4j.py
new file mode 100644
index 000000000..7f1bb5cad
--- /dev/null
+++ b/rdflib/plugins/stores/rdf4j.py
@@ -0,0 +1,223 @@
+from __future__ import annotations
+
+from textwrap import dedent
+from typing import Any, Generator, Iterable, Iterator, Mapping, Optional, Tuple
+
+from rdflib import Graph
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import RepositoryNotFoundError
+from rdflib.graph import (
+ DATASET_DEFAULT_GRAPH_ID,
+ Dataset,
+ _ContextType,
+ _QuadType,
+ _TriplePatternType,
+ _TripleType,
+)
+from rdflib.store import VALID_STORE, Store
+from rdflib.term import BNode, Node, URIRef, Variable
+
+if has_httpx:
+ from rdflib.contrib.rdf4j import RDF4JClient
+
+
+def _inject_prefixes(query: str, extra_bindings: Mapping[str, Any]) -> str:
+ bindings = set(list(extra_bindings.items()))
+ if not bindings:
+ return query
+ return "\n".join(
+ [
+ "\n".join(["PREFIX %s: <%s>" % (k, v) for k, v in bindings]),
+ "", # separate ns_bindings from query with an empty line
+ query,
+ ]
+ )
+
+
+def _node_to_sparql(node: Node) -> str:
+ if isinstance(node, BNode):
+ raise Exception(
+ "SPARQL-based stores do not support BNodes"
+ "See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes"
+ )
+ return node.n3()
+
+
+def _default_repo_config(repository_id: str) -> str:
+ return dedent(
+ f"""
+ PREFIX config:
+
+ [] a config:Repository ;
+ config:rep.id "{repository_id}" ;
+ config:rep.impl
+ [
+ config:rep.type "openrdf:SailRepository" ;
+ config:sail.impl
+ [
+ config:native.tripleIndexers "spoc,posc" ;
+ config:sail.defaultQueryEvaluationMode "STANDARD" ;
+ config:sail.iterationCacheSyncThreshold "10000" ;
+ config:sail.type "openrdf:NativeStore" ;
+ ] ;
+ ] ;
+ .
+ """
+ )
+
+
+class RDF4JStore(Store):
+ """An RDF4J store."""
+
+ context_aware = True
+ formula_aware = False
+ transaction_aware = False
+ graph_aware = True
+
+ def __init__(
+ self,
+ base_url: str,
+ repository_id: str,
+ configuration: str | None = None,
+ auth: tuple[str, str] | None = None,
+ timeout: float = 30.0,
+ create: bool = False,
+ **kwargs,
+ ):
+ if configuration is None:
+ configuration = _default_repo_config(repository_id)
+ self._client = RDF4JClient(base_url, auth, timeout, **kwargs)
+ self._repository_id = repository_id
+ self._repo = None
+ self.open(configuration, create)
+ super().__init__()
+
+ @property
+ def client(self):
+ return self._client
+
+ @property
+ def repo(self):
+ if self._repo is None:
+ self._repo = self.client.repositories.get(self._repository_id)
+ return self._repo
+
+ def open(
+ self, configuration: str | tuple[str, str] | None, create: bool = False
+ ) -> int | None:
+ try:
+ # Try connecting to the repository.
+ self.repo.health()
+ except RepositoryNotFoundError:
+ if create:
+ self.client.repositories.create(self._repository_id, configuration)
+ self.repo.health()
+ else:
+ raise Exception(f"Repository {self._repository_id} not found.")
+
+ return VALID_STORE
+
+ def close(self, commit_pending_transaction: bool = False) -> None:
+ self.client.close()
+
+ def add(
+ self,
+ triple: _TripleType,
+ context: _ContextType | None = None,
+ quoted: bool = False,
+ ) -> None:
+ s, p, o = triple
+ graph_name = (
+ ""
+ if context is None or context.identifier == DATASET_DEFAULT_GRAPH_ID
+ else context.identifier.n3()
+ )
+ statement = f"{s.n3()} {p.n3()} {o.n3()} {graph_name} ."
+ self.repo.upload(statement)
+
+ def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802
+ statements = ""
+ for s, p, o, c in quads:
+ graph_name = (
+ ""
+ if c is None or c.identifier == DATASET_DEFAULT_GRAPH_ID
+ else c.identifier.n3()
+ )
+ statement = f"{s.n3()} {p.n3()} {o.n3()} {graph_name} .\n"
+ statements += statement
+ self.repo.upload(statements)
+
+ def remove(
+ self,
+ triple: _TriplePatternType,
+ context: Optional[_ContextType] = None,
+ ) -> None:
+ s, p, o = triple
+ g = context.identifier if context is not None else None
+ self.repo.delete(s, p, o, g)
+
+ def triples(
+ self,
+ triple_pattern: _TriplePatternType,
+ context: Optional[_ContextType] = None,
+ ) -> Iterator[Tuple[_TripleType, Iterator[Optional[_ContextType]]]]:
+ s, p, o = triple_pattern
+ graph_name = context.identifier if context is not None else None
+ result_graph = self.repo.get(s, p, o, graph_name)
+ if isinstance(result_graph, Dataset):
+ for s, p, o, g in result_graph:
+ yield (s, p, o), iter([Graph(self, identifier=g)])
+ else:
+ # It's a Graph object.
+ for triple in result_graph:
+ # Returning None for _ContextType as it's not used by the caller.
+ yield triple, iter([None])
+
+ def contexts(
+ self, triple: Optional[_TripleType] = None
+ ) -> Generator[_ContextType, None, None]:
+ if triple is None:
+ for graph_name in self.repo.graph_names():
+ yield Graph(self, identifier=graph_name)
+ else:
+ s, p, o = triple
+ params = (
+ _node_to_sparql(s if s else Variable("s")),
+ _node_to_sparql(p if p else Variable("p")),
+ _node_to_sparql(o if o else Variable("o")),
+ )
+ query = (
+ "SELECT DISTINCT ?graph WHERE { GRAPH ?graph { %s %s %s } }" % params
+ )
+ result = self.repo.query(query)
+ for row in result:
+ yield Graph(self, identifier=row["graph"])
+
+ def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None:
+ # Note: RDF4J namespaces always override.
+ self.repo.namespaces.set(prefix, namespace)
+
+ def prefix(self, namespace: URIRef) -> Optional[str]:
+ namespace_prefixes = dict(
+ [(x.namespace, x.prefix) for x in self.repo.namespaces.list()]
+ )
+ return namespace_prefixes.get(str(namespace))
+
+ def namespace(self, prefix: str) -> Optional[URIRef]:
+ result = self.repo.namespaces.get(prefix)
+ return URIRef(result) if result is not None else None
+
+ def namespaces(self) -> Iterator[Tuple[str, URIRef]]:
+ for result in self.repo.namespaces.list():
+ yield result.prefix, URIRef(result.namespace)
+
+ def add_graph(self, graph: Graph) -> None:
+ if graph.identifier != DATASET_DEFAULT_GRAPH_ID:
+ # Note: this is a no-op since RDF4J doesn't support empty named graphs.
+ self.repo.update(f"CREATE SILENT GRAPH {graph.identifier.n3()}")
+
+ def remove_graph(self, graph: Graph) -> None:
+ self.repo.graphs.clear(graph.identifier)
+
+ def __len__(self, context: _ContextType | None = None) -> int:
+ return self.repo.size(context if context is None else context.identifier)
diff --git a/test/test_rdf4j/data/quads-1.nq b/test/test_rdf4j/data/quads-1.nq
new file mode 100644
index 000000000..332b48ded
--- /dev/null
+++ b/test/test_rdf4j/data/quads-1.nq
@@ -0,0 +1,2 @@
+ .
+ .
diff --git a/test/test_rdf4j/data/quads-2.nq b/test/test_rdf4j/data/quads-2.nq
new file mode 100644
index 000000000..cbd5b8631
--- /dev/null
+++ b/test/test_rdf4j/data/quads-2.nq
@@ -0,0 +1 @@
+ .
diff --git a/test/test_rdf4j/data/quads-3.nq b/test/test_rdf4j/data/quads-3.nq
new file mode 100644
index 000000000..924f94d9e
--- /dev/null
+++ b/test/test_rdf4j/data/quads-3.nq
@@ -0,0 +1 @@
+_:b-test _:c _:graph .
diff --git a/test/test_rdf4j/test_e2e/conftest.py b/test/test_rdf4j/test_e2e/conftest.py
new file mode 100644
index 000000000..9a087f205
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/conftest.py
@@ -0,0 +1,76 @@
+import pathlib
+from importlib.util import find_spec
+
+import pytest
+
+from rdflib import Dataset
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import RepositoryNotFoundError
+from rdflib.namespace import NamespaceManager
+from rdflib.plugins.stores.rdf4j import RDF4JStore
+
+has_testcontainers = find_spec("testcontainers") is not None
+
+pytestmark = pytest.mark.skipif(
+ not (has_httpx and has_testcontainers),
+ reason="skipping rdf4j tests, httpx or testcontainers not available",
+)
+
+if has_httpx and has_testcontainers:
+ from testcontainers.core.container import DockerContainer
+ from testcontainers.core.image import DockerImage
+ from testcontainers.core.waiting_utils import wait_for_logs
+
+ from rdflib.contrib.rdf4j import RDF4JClient
+
+ GRAPHDB_PORT = 7200
+
+ @pytest.fixture(scope="package")
+ def graphdb_container():
+ with DockerImage(str(pathlib.Path(__file__).parent / "docker")) as image:
+ container = DockerContainer(str(image))
+ container.with_exposed_ports(GRAPHDB_PORT)
+ container.start()
+ wait_for_logs(container, "Started GraphDB")
+ yield container
+ container.stop()
+
+ @pytest.fixture(scope="function")
+ def client(graphdb_container: DockerContainer):
+ port = graphdb_container.get_exposed_port(7200)
+ with RDF4JClient(
+ f"http://localhost:{port}/", auth=("admin", "admin")
+ ) as client:
+ yield client
+ try:
+ client.repositories.delete("test-repo")
+ except (RepositoryNotFoundError, RuntimeError):
+ pass
+
+ @pytest.fixture(scope="function")
+ def repo(client: RDF4JClient):
+ config_path = (
+ pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl"
+ )
+ with open(config_path) as file:
+ config = file.read()
+
+ repo = client.repositories.create("test-repo", config)
+ assert repo.identifier == "test-repo"
+ yield repo
+ client.repositories.delete("test-repo")
+
+ @pytest.fixture(scope="function")
+ def ds(graphdb_container: DockerContainer):
+ port = graphdb_container.get_exposed_port(7200)
+ store = RDF4JStore(
+ f"http://localhost:{port}/",
+ "test-repo",
+ auth=("admin", "admin"),
+ create=True,
+ )
+ ds = Dataset(store)
+ ds.namespace_manager = NamespaceManager(ds, "none")
+ yield ds
+ ds.store.client.repositories.delete("test-repo") # type: ignore[attr-defined]
+ ds.close()
diff --git a/test/test_rdf4j/test_e2e/docker/Dockerfile b/test/test_rdf4j/test_e2e/docker/Dockerfile
new file mode 100644
index 000000000..0920351df
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/docker/Dockerfile
@@ -0,0 +1,4 @@
+FROM ontotext/graphdb:10.8.4
+
+COPY settings.txt /opt/graphdb/home/data/settings.js
+COPY users.txt /opt/graphdb/home/data/users.js
diff --git a/test/test_rdf4j/test_e2e/docker/settings.txt b/test/test_rdf4j/test_e2e/docker/settings.txt
new file mode 100644
index 000000000..d861a36c4
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/docker/settings.txt
@@ -0,0 +1,6 @@
+{
+ "properties" : {
+ "current.location" : "",
+ "security.enabled" : "true"
+ }
+}
diff --git a/test/test_rdf4j/test_e2e/docker/users.txt b/test/test_rdf4j/test_e2e/docker/users.txt
new file mode 100644
index 000000000..5480bf133
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/docker/users.txt
@@ -0,0 +1,42 @@
+{
+ "users" : {
+ "admin" : {
+ "username" : "admin",
+ "password" : "{bcrypt}$2a$10$3EYdj3fBH0/.aA/fRodud.T2YPtSVlC7J/d.9Jk8v1pHd1ar8HEau",
+ "grantedAuthorities" : [ "ROLE_ADMIN" ],
+ "appSettings" : {
+ "DEFAULT_INFERENCE" : true,
+ "DEFAULT_VIS_GRAPH_SCHEMA" : true,
+ "DEFAULT_SAMEAS" : true,
+ "IGNORE_SHARED_QUERIES" : false,
+ "EXECUTE_COUNT" : true
+ },
+ "dateCreated" : 1761543222200,
+ "gptThreads" : [ ]
+ }
+ },
+ "user_queries" : {
+ "admin" : {
+ "SPARQL Select template" : {
+ "name" : "SPARQL Select template",
+ "body" : "SELECT ?s ?p ?o\nWHERE {\n\t?s ?p ?o .\n} LIMIT 100",
+ "shared" : false
+ },
+ "Clear graph" : {
+ "name" : "Clear graph",
+ "body" : "CLEAR GRAPH ",
+ "shared" : false
+ },
+ "Add statements" : {
+ "name" : "Add statements",
+ "body" : "PREFIX dc: \nINSERT DATA\n {\n GRAPH {\n dc:title \"A new book\" ;\n dc:creator \"A.N.Other\" .\n }\n }",
+ "shared" : false
+ },
+ "Remove statements" : {
+ "name" : "Remove statements",
+ "body" : "PREFIX dc: \nDELETE DATA\n{\nGRAPH {\n dc:title \"A new book\" ;\n dc:creator \"A.N.Other\" .\n }\n}",
+ "shared" : false
+ }
+ }
+ }
+}
diff --git a/test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl b/test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl
new file mode 100644
index 000000000..b73d67f2d
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/repo-configs/test-repo-config.ttl
@@ -0,0 +1,16 @@
+PREFIX config:
+
+[] a config:Repository ;
+ config:rep.id "test-repo" ;
+ config:rep.impl
+ [
+ config:rep.type "openrdf:SailRepository" ;
+ config:sail.impl
+ [
+ config:native.tripleIndexers "spoc,posc" ;
+ config:sail.defaultQueryEvaluationMode "STANDARD" ;
+ config:sail.iterationCacheSyncThreshold "10000" ;
+ config:sail.type "openrdf:NativeStore" ;
+ ] ;
+ ] ;
+.
diff --git a/test/test_rdf4j/test_e2e/test_client.py b/test/test_rdf4j/test_e2e/test_client.py
new file mode 100644
index 000000000..50976d68b
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_client.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import RDF4JUnsupportedProtocolError
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ from rdflib.contrib.rdf4j import RDF4JClient
+
+ @pytest.mark.testcontainer
+ def test_client_protocol_error(monkeypatch):
+ monkeypatch.setattr(RDF4JClient, "protocol", 11)
+ with pytest.raises(RDF4JUnsupportedProtocolError):
+ RDF4JClient("http://example.com/")
+
+
+@pytest.mark.testcontainer
+def test_client_close_method(client: RDF4JClient):
+ client.close()
+ assert client._http_client.is_closed
+
+
+@pytest.mark.testcontainer
+def test_client_protocol(client: RDF4JClient):
+ assert client.protocol >= 12
diff --git a/test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py b/test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py
new file mode 100644
index 000000000..9f3c47a8b
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_rdf4j_store.py
@@ -0,0 +1,528 @@
+from __future__ import annotations
+
+import typing as t
+
+import pytest
+
+from rdflib import RDF, SKOS, BNode, Dataset, Graph, Literal, URIRef, Variable
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, _TripleChoiceType
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx and t.TYPE_CHECKING:
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.testcontainer
+def test_rdf4j_store_add(ds: Dataset):
+ assert len(ds) == 0
+ ds.add((URIRef("http://example.com/s"), RDF.type, SKOS.Concept))
+ assert len(ds) == 1
+
+
+@pytest.mark.testcontainer
+def test_rdf4j_store_addn(ds: Dataset):
+ assert len(ds) == 0
+ ds.addN(
+ [
+ (
+ URIRef("http://example.com/s"),
+ RDF.type,
+ SKOS.Concept,
+ URIRef("urn:graph:a"), # type: ignore[list-item]
+ ),
+ (
+ URIRef("http://example.com/s"),
+ SKOS.prefLabel,
+ Literal("Label"),
+ DATASET_DEFAULT_GRAPH_ID, # type: ignore[list-item]
+ ),
+ (
+ URIRef("http://example.com/s"),
+ SKOS.definition,
+ Literal("Definition"),
+ URIRef("urn:graph:b"), # type: ignore[list-item]
+ ),
+ ]
+ )
+ assert len(ds) == 3
+
+
+@pytest.mark.testcontainer
+def test_graphs_method_default_graph(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ """
+ # This returns 1 graph, the default graph, even when there are no triples.
+ graphs = list(ds.graphs())
+ assert len(graphs) == 1
+ assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID
+ repo.upload(data)
+ graphs = list(ds.graphs())
+ assert len(graphs) == 1
+ graph = graphs[0]
+ assert graph.identifier == DATASET_DEFAULT_GRAPH_ID
+ assert len(graph) == 1
+
+
+@pytest.mark.testcontainer
+def test_graphs_method_default_and_named_graphs(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ # This returns 1 graph, the default graph, even when there are no triples.
+ graphs = list(ds.graphs())
+ assert len(graphs) == 1
+ assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID
+ repo.upload(data)
+
+ # Retrieve graphs with no triple pattern.
+ graphs = list(ds.graphs())
+ assert len(graphs) == 3
+
+ graph_a = graphs[0]
+ assert graph_a.identifier == URIRef("urn:graph:b")
+ assert len(graph_a) == 1
+
+ graph_b = graphs[1]
+ assert graph_b.identifier == URIRef("urn:graph:a")
+ assert len(graph_b) == 2
+
+ default_graph = graphs[2]
+ assert default_graph.identifier == DATASET_DEFAULT_GRAPH_ID
+ assert len(default_graph) == 1
+
+ # Retrieve graphs with a triple pattern.
+ graphs = list(
+ ds.graphs(triple=(URIRef("http://example.com/s"), RDF.type, SKOS.Concept))
+ )
+ # Note: it's returning 2 graphs instead of 1 because the Dataset class always
+ # includes the default graph.
+ # I don't think this is the correct behaviour. TODO: raise a ticket for this.
+ # What should happen is, ds.graphs() includes the default graph if the triple
+ # pattern is None. Otherwise, it should only include graphs that contain the triple.
+ assert len(graphs) == 2
+ graph_a = graphs[0]
+ assert graph_a.identifier == URIRef("urn:graph:a")
+ assert len(graph_a) == 2
+
+
+@pytest.mark.testcontainer
+def test_add_graph(ds: Dataset):
+ assert len(ds) == 0
+ graphs = list(ds.graphs())
+ assert len(graphs) == 1
+ assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID
+
+ graph_name = URIRef("urn:graph:a")
+
+ # Add a graph to the dataset using a URIRef.
+ # Note, this is a no-op since RDF4J doesn't support named graphs with no statements,
+ # which is why the length of the graphs is 1 (the default graph).
+ ds.add_graph(graph_name)
+ graphs = list(ds.graphs())
+ assert len(graphs) == 1
+ assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID
+
+ # Add a graph object to the dataset.
+ # This will create a new graph in RDF4J, along with the statements.
+ graph = Graph(identifier=graph_name)
+ graph.add((URIRef("http://example.com/s"), RDF.type, SKOS.Concept))
+ graph.add((URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")))
+ ds.add_graph(graph)
+ # Verify that the graph was added.
+ graphs = list(ds.graphs())
+ assert len(graphs) == 2
+ graph_a = graphs[0]
+ assert graphs[1].identifier == DATASET_DEFAULT_GRAPH_ID
+ assert graph_a.identifier == graph_name
+ assert len(graph_a) == 2
+
+
+@pytest.mark.testcontainer
+def test_remove_graph(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ # This returns 1 graph, the default graph, even when there are no triples.
+ graphs = list(ds.graphs())
+ assert len(graphs) == 1
+ assert graphs[0].identifier == DATASET_DEFAULT_GRAPH_ID
+ repo.upload(data)
+ assert len(ds) == 4
+
+ ds.remove_graph(URIRef("urn:graph:a"))
+ assert len(ds) == 2
+ graphs = list(ds.graphs())
+ assert len(graphs) == 2
+ assert graphs[0].identifier == URIRef("urn:graph:b")
+ assert graphs[1].identifier == DATASET_DEFAULT_GRAPH_ID
+
+
+@pytest.mark.testcontainer
+def test_namespaces(ds: Dataset):
+ assert list(ds.namespaces()) == []
+
+ skos_namespace = URIRef(str(SKOS))
+ ds.bind("skos", skos_namespace)
+ assert list(ds.namespaces()) == [("skos", skos_namespace)]
+ assert ds.store.namespace("skos") == skos_namespace
+ assert ds.store.namespace("foo") is None
+ assert ds.store.prefix(skos_namespace) == "skos"
+ assert ds.store.prefix(URIRef("http://example.com/")) is None
+
+
+@pytest.mark.testcontainer
+def test_triples(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 4
+
+ # We don't have default_union enabled, returns the single statement from the
+ # default graph.
+ triples = set(ds.triples((None, None, None)))
+ assert triples == {
+ (URIRef("http://example.com/s"), SKOS.definition, Literal("Definition")),
+ }
+
+ # Enable default_union, returns all distinct statements.
+ ds.default_union = True
+ triples = set(ds.triples((None, None, None)))
+ assert triples == {
+ (URIRef("http://example.com/s"), RDF.type, SKOS.Concept),
+ (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")),
+ (URIRef("http://example.com/s"), SKOS.definition, Literal("Definition")),
+ }
+
+ # Triple pattern, return only the matching statements.
+ triples = set(ds.triples((None, SKOS.prefLabel, None)))
+ assert triples == {
+ (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")),
+ }
+
+ # Disable default_union, returns no statements.
+ ds.default_union = False
+ triples = set(ds.triples((None, SKOS.prefLabel, None)))
+ assert triples == set()
+
+ # Triple pattern, return matching statements in the default graph.
+ triples = set(ds.triples((None, SKOS.definition, None)))
+ assert triples == {
+ (URIRef("http://example.com/s"), SKOS.definition, Literal("Definition")),
+ }
+
+
+@pytest.mark.testcontainer
+def test_quads(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 4
+
+ quads = set(ds.quads((None, None, None, DATASET_DEFAULT_GRAPH_ID))) # type: ignore[arg-type]
+ assert quads == {
+ (
+ URIRef("http://example.com/s"),
+ SKOS.definition,
+ Literal("Definition"),
+ DATASET_DEFAULT_GRAPH_ID,
+ ),
+ }
+
+ quads = set(ds.quads((None, None, None, URIRef("urn:graph:a")))) # type: ignore[arg-type]
+ assert quads == {
+ (
+ URIRef("http://example.com/s"),
+ RDF.type,
+ SKOS.Concept,
+ URIRef("urn:graph:a"),
+ ),
+ (
+ URIRef("http://example.com/s"),
+ SKOS.prefLabel,
+ Literal("Label"),
+ URIRef("urn:graph:a"),
+ ),
+ }
+
+ quads = set(ds.quads((None, None, None, URIRef("urn:graph:b")))) # type: ignore[arg-type]
+ assert quads == {
+ (
+ URIRef("http://example.com/s"),
+ SKOS.prefLabel,
+ Literal("Label"),
+ URIRef("urn:graph:b"),
+ )
+ }
+
+
+@pytest.mark.testcontainer
+@pytest.mark.parametrize(
+ "s, p, o, g, expected_size",
+ [
+ [None, None, None, None, 0],
+ [URIRef("http://example.com/s"), None, None, None, 0],
+ [None, RDF.type, None, None, 3],
+ [None, SKOS.prefLabel, None, None, 2],
+ [None, SKOS.prefLabel, None, URIRef("urn:graph:a"), 3],
+ [None, None, None, DATASET_DEFAULT_GRAPH_ID, 3],
+ ],
+)
+def test_remove(ds: Dataset, s, p, o, g, expected_size):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 4
+ repo.delete(s, p, o, g)
+ assert len(ds) == expected_size
+
+
+@pytest.mark.testcontainer
+@pytest.mark.parametrize(
+ "default_union, triples_choices, expected_triples",
+ [
+ [
+ False,
+ (None, [SKOS.prefLabel, SKOS.definition], None),
+ {
+ (
+ URIRef("http://example.com/s"),
+ SKOS.definition,
+ Literal("Definition"),
+ ),
+ },
+ ],
+ [
+ True,
+ (None, [SKOS.prefLabel, SKOS.definition], None),
+ {
+ (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")),
+ (
+ URIRef("http://example.com/s"),
+ SKOS.definition,
+ Literal("Definition"),
+ ),
+ },
+ ],
+ [
+ True,
+ (None, [RDF.type, SKOS.prefLabel], None),
+ {
+ (URIRef("http://example.com/s"), RDF.type, SKOS.Concept),
+ (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")),
+ },
+ ],
+ [
+ True,
+ (None, [RDF.type, SKOS.definition], None),
+ {
+ (URIRef("http://example.com/s"), RDF.type, SKOS.Concept),
+ (
+ URIRef("http://example.com/s"),
+ SKOS.definition,
+ Literal("Definition"),
+ ),
+ },
+ ],
+ ],
+)
+def test_triples_choices_default_union_on(
+ ds: Dataset,
+ default_union: bool,
+ triples_choices: _TripleChoiceType,
+ expected_triples,
+):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 4
+ ds.default_union = default_union
+ triples = set(ds.triples_choices(triples_choices))
+ assert triples == expected_triples
+
+
+@pytest.mark.testcontainer
+@pytest.mark.parametrize(
+ "default_union, query, expected_result_bindings",
+ [
+ [
+ False,
+ "select * where { ?s ?p ?o }",
+ {(URIRef("http://example.com/s"), SKOS.definition, Literal("Definition"))},
+ ],
+ [
+ True,
+ "select * where { ?s ?p ?o }",
+ {
+ (URIRef("http://example.com/s"), RDF.type, SKOS.Concept),
+ (URIRef("http://example.com/s"), SKOS.prefLabel, Literal("Label")),
+ (
+ URIRef("http://example.com/s"),
+ SKOS.definition,
+ Literal("Definition"),
+ ),
+ },
+ ],
+ ],
+)
+def test_query_default_graph_behaviour(
+ ds: Dataset,
+ default_union: bool,
+ query: str,
+ expected_result_bindings: list[dict[Variable, URIRef | BNode | Literal]],
+):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 4
+ ds.default_union = default_union
+ result = ds.query(query)
+ assert set(tuple(x.values()) for x in result.bindings) == expected_result_bindings
+
+
+@pytest.mark.testcontainer
+def test_query_init_ns(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{RDF.type}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 5
+ query = """
+ select distinct ?s
+ where {
+ graph ?g {
+ ?s a ex:Term .
+ }
+ }
+ """
+ result = ds.query(query, initNs={"ex": "http://example.com/type/"})
+ assert len(result) == 1
+ assert set(tuple(x.values()) for x in result.bindings) == {
+ (URIRef("http://example.com/s"),)
+ }
+
+
+@pytest.mark.testcontainer
+def test_query_init_bindings(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{RDF.type}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 5
+
+ query = """
+ SELECT ?o
+ WHERE {
+ GRAPH ?g {
+ ?s ?p ?o .
+ }
+ }
+ """
+ result = ds.query(query, initBindings={"p": RDF.type})
+ assert len(result) == 2
+ assert set(tuple(x.values()) for x in result.bindings) == {
+ (SKOS.Concept,),
+ (URIRef("http://example.com/type/Term"),),
+ }
+
+
+@pytest.mark.testcontainer
+def test_query_update_delete_default_graph_triples(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{RDF.type}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 5
+
+ query = """
+ DELETE {
+ ?s ?p ?o
+ }
+ WHERE {
+ ?s ?p ?o
+ }
+ """
+ ds.update(query)
+ assert len(ds) == 4
+
+
+@pytest.mark.testcontainer
+def test_query_update(ds: Dataset):
+ repo: Repository = ds.store.repo # type: ignore[attr-defined]
+ data = f"""
+ <{RDF.type}> <{SKOS.Concept}> .
+ <{RDF.type}> .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.prefLabel}> "Label" .
+ <{SKOS.definition}> "Definition" .
+ """
+ repo.upload(data)
+ assert len(ds) == 5
+
+ query = """
+ DELETE {
+ GRAPH ?g {
+ ?s ?p ?o
+ }
+ }
+ WHERE {
+ GRAPH ?g {
+ ?s ?p ?o
+ }
+ }
+ """
+ ds.update(query)
+ assert len(ds) == 1
diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py
new file mode 100644
index 000000000..ec17cafd9
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_repo_graph_store.py
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from rdflib import Dataset, Graph, URIRef
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.parametrize(
+ "graph_name", [URIRef("urn:graph:a"), DATASET_DEFAULT_GRAPH_ID]
+)
+@pytest.mark.testcontainer
+def test_e2e_repo_graph_store_crud(repo: Repository, graph_name: URIRef):
+ path = str(Path(__file__).parent.parent / "data/quads-2.nq")
+ repo.overwrite(path, graph_name)
+ assert repo.size() == 1
+
+ graph = repo.graphs.get(graph_name)
+ assert isinstance(graph, Graph)
+ assert len(graph) == 1
+ ds = Dataset().parse(path, format="nquads")
+ expected_graph = Graph().parse(data=ds.serialize(format="ntriples"))
+ assert len(expected_graph) == 1
+ assert graph.isomorphic(expected_graph)
+
+ # Add to the graph
+ repo.graphs.add(
+ graph_name,
+ " .",
+ )
+ assert repo.size() == 2
+ graph = repo.graphs.get(graph_name)
+ assert isinstance(graph, Graph)
+ assert len(graph) == 2
+ expected_graph.add(
+ (
+ URIRef("http://example.org/s4"),
+ URIRef("http://example.org/p4"),
+ URIRef("http://example.org/o4"),
+ )
+ )
+ assert graph.isomorphic(expected_graph)
+
+ # Overwrite the graph
+ repo.graphs.overwrite(
+ graph_name,
+ " .",
+ )
+ assert repo.size() == 1
+ graph = repo.graphs.get(graph_name)
+ assert isinstance(graph, Graph)
+ assert len(graph) == 1
+ expected_graph = Graph().parse(
+ data=" ."
+ )
+ assert graph.isomorphic(expected_graph)
+
+ # Clear the graph
+ repo.graphs.clear(graph_name)
+ assert repo.size() == 0
+ graph = repo.graphs.get(graph_name)
+ assert isinstance(graph, Graph)
+ assert len(graph) == 0
diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_management.py b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py
new file mode 100644
index 000000000..dba939156
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_repo_management.py
@@ -0,0 +1,216 @@
+from __future__ import annotations
+
+import pathlib
+
+import pytest
+
+from rdflib import BNode, Dataset, URIRef
+from rdflib.compare import isomorphic
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j import RDF4JClient
+ from rdflib.contrib.rdf4j.client import Repository
+ from rdflib.contrib.rdf4j.exceptions import (
+ RepositoryAlreadyExistsError,
+ RepositoryFormatError,
+ RepositoryNotFoundError,
+ RepositoryNotHealthyError,
+ )
+
+
+@pytest.mark.testcontainer
+def test_repos(client: RDF4JClient):
+ assert client.repositories.list() == []
+
+
+@pytest.mark.testcontainer
+def test_list_repo_non_existent(client: RDF4JClient):
+ assert client.repositories.list() == []
+ with pytest.raises(RepositoryNotFoundError):
+ assert client.repositories.get("non-existent") is None
+
+
+@pytest.mark.testcontainer
+def test_list_repo_format_error(client: RDF4JClient, monkeypatch):
+ class MockResponse:
+ def json(self):
+ return {}
+
+ def raise_for_status(self):
+ pass
+
+ monkeypatch.setattr(httpx.Client, "get", lambda *args, **kwargs: MockResponse())
+ with pytest.raises(RepositoryFormatError):
+ client.repositories.list()
+
+
+@pytest.mark.testcontainer
+def test_repo_manager_crud(client: RDF4JClient):
+ # Empty state
+ assert client.repositories.list() == []
+
+ config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl"
+ with open(config_path) as file:
+ config = file.read()
+
+ repo = client.repositories.create("test-repo", config)
+ assert repo.identifier == "test-repo"
+ assert repo.health()
+
+ # New repository created
+ assert len(client.repositories.list()) == 1
+
+ # Repo already exists error
+ with pytest.raises(RepositoryAlreadyExistsError):
+ client.repositories.create("test-repo", config)
+
+ # Confirm repo is empty
+ assert repo.size() == 0
+ ds = repo.get()
+ assert isinstance(ds, Dataset)
+ assert len(ds) == 0
+
+ # Use the overwrite method to add statements to the repo
+ with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file:
+ repo.overwrite(file)
+ assert repo.size() == 1
+ graphs = repo.graph_names()
+ assert len(graphs) == 1
+ assert any(value in graphs for value in [URIRef("urn:graph:a3")])
+ ds = repo.get()
+ assert len(ds) == 1
+ str_result = ds.serialize(format="nquads")
+ assert (
+ " ."
+ in str_result
+ )
+
+ # Overwrite with a different file.
+ with open(pathlib.Path(__file__).parent.parent / "data/quads-1.nq", "rb") as file:
+ repo.overwrite(file)
+ assert repo.size() == 2
+ ds = repo.get()
+ assert len(ds) == 2
+ graphs = repo.graph_names()
+ assert len(graphs) == 2
+ assert any(
+ value in graphs for value in [URIRef("urn:graph:a"), URIRef("urn:graph:b")]
+ )
+ str_result = ds.serialize(format="nquads")
+ assert (
+ " ."
+ in str_result
+ )
+ assert (
+ " ."
+ in str_result
+ )
+
+ # Get statements using a filter pattern
+ ds = repo.get(subj=URIRef("http://example.org/s2"))
+ assert len(ds) == 1
+ str_result = ds.serialize(format="nquads")
+ assert (
+ " ."
+ in str_result
+ )
+
+ # Use the delete method to delete a statement using a filter pattern
+ repo.delete(subj=URIRef("http://example.org/s"))
+ assert repo.size() == 1
+ ds = repo.get()
+ assert len(ds) == 1
+ str_result = ds.serialize(format="nquads")
+ assert (
+ " ."
+ in str_result
+ )
+
+ # Append to the repository a new RDF payload with blank node graph names
+ with open(pathlib.Path(__file__).parent.parent / "data/quads-3.nq", "rb") as file:
+ repo.upload(file)
+ assert repo.size() == 2
+ ds = repo.get()
+ assert len(ds) == 2
+ graphs = repo.graph_names()
+ assert len(graphs) == 2
+ assert any(
+ value in graphs
+ for value in [URIRef("urn:graph:a"), URIRef("urn:graph:b"), BNode("c")]
+ )
+ data = """
+ .
+ _:b-test _:c _:graph .
+ """
+ ds2 = Dataset().parse(data=data, format="nquads")
+ assert isinstance(ds, Dataset)
+ for graph in ds.graphs():
+ assert any(isomorphic(graph, graph2) for graph2 in ds2.graphs())
+
+ # Delete repository
+ client.repositories.delete("test-repo")
+ assert client.repositories.list() == []
+
+ # Deleting non-existent repo
+ with pytest.raises(RepositoryNotFoundError):
+ client.repositories.delete("test-repo")
+
+
+@pytest.mark.testcontainer
+def test_repo_not_healthy(repo: Repository, monkeypatch: pytest.MonkeyPatch):
+ class MockResponse:
+ def raise_for_status(self):
+ raise httpx.HTTPStatusError(
+ "",
+ request=httpx.Request("post", ""),
+ response=httpx.Response(status_code=500),
+ )
+
+ monkeypatch.setattr(httpx.Client, "post", lambda *args, **kwargs: MockResponse())
+ with pytest.raises(RepositoryNotHealthyError):
+ repo.health()
+
+
+@pytest.mark.xfail(
+ reason="RDF4J REST API does not support referencing blank nodes directly."
+)
+def test_repo_delete_statement_with_bnode(client: RDF4JClient):
+ config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl"
+ with open(config_path) as file:
+ config = file.read()
+
+ repo = client.repositories.create("test-repo", config)
+ assert repo.identifier == "test-repo"
+ assert repo.health()
+ with open(pathlib.Path(__file__).parent.parent / "data/quads-2.nq", "rb") as file:
+ repo.overwrite(file)
+
+ assert repo.size() == 1
+ repo.delete(subj=BNode("b-test")) # type: ignore[arg-type]
+ assert repo.size() == 0
+
+
+@pytest.mark.xfail(
+ reason="RDF4J REST API does not support referencing blank nodes directly."
+)
+def test_repo_delete_statement_with_bnode_graph(client: RDF4JClient):
+ config_path = pathlib.Path(__file__).parent / "repo-configs/test-repo-config.ttl"
+ with open(config_path) as file:
+ config = file.read()
+
+ repo = client.repositories.create("test-repo", config)
+ assert repo.identifier == "test-repo"
+ assert repo.health()
+ with open(pathlib.Path(__file__).parent.parent / "data/quads-3.nq", "rb") as file:
+ repo.overwrite(file)
+
+ assert repo.size() == 1
+ repo.delete(subj=BNode("graph")) # type: ignore[arg-type]
+ assert repo.size() == 0
diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py
new file mode 100644
index 000000000..93d30a694
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_repo_namespace.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_namespace_crud(repo: Repository):
+ assert repo.namespaces.list() == []
+
+ # Delete a non-existent prefix
+ repo.namespaces.remove("non-existent")
+
+ # Retrieve a non-existent prefix
+ assert repo.namespaces.get("non-existent") is None
+
+ # Set a new prefix
+ repo.namespaces.set("test", "http://example.org/test/")
+ assert set(repo.namespaces.list()) == {
+ NamespaceListingResult(prefix="test", namespace="http://example.org/test/")
+ }
+ assert repo.namespaces.get("test") == "http://example.org/test/"
+
+ # Set another
+ repo.namespaces.set("test2", "http://example.org/test2/")
+ assert set(repo.namespaces.list()) == {
+ NamespaceListingResult(prefix="test", namespace="http://example.org/test/"),
+ NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/"),
+ }
+ assert repo.namespaces.get("test2") == "http://example.org/test2/"
+
+ # Update an existing prefix (overwrite)
+ repo.namespaces.set("test", "http://example.org/test-updated/")
+ assert set(repo.namespaces.list()) == {
+ NamespaceListingResult(
+ prefix="test", namespace="http://example.org/test-updated/"
+ ),
+ NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/"),
+ }
+ assert repo.namespaces.get("test") == "http://example.org/test-updated/"
+
+ # Delete test prefix
+ repo.namespaces.remove("test")
+ assert set(repo.namespaces.list()) == {
+ NamespaceListingResult(prefix="test2", namespace="http://example.org/test2/")
+ }
+ assert repo.namespaces.get("test") is None
+ assert repo.namespaces.get("test2") == "http://example.org/test2/"
+
+ # Clear
+ repo.namespaces.clear()
+ assert repo.namespaces.list() == []
+ assert repo.namespaces.get("test") is None
+ assert repo.namespaces.get("test2") is None
diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_query.py b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py
new file mode 100644
index 000000000..1b58165e7
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_repo_query.py
@@ -0,0 +1,68 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.term import URIRef, Variable
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_query(repo: Repository):
+ path = str(Path(__file__).parent.parent / "data/quads-1.nq")
+ repo.overwrite(path)
+ assert repo.size() == 2
+
+ query = "select ?s ?p ?o where { ?s ?p ?o }"
+ results = repo.query(query)
+ assert len(results) == 2
+ s_var = Variable("s")
+ p_var = Variable("p")
+ o_var = Variable("o")
+ subjects = [URIRef("http://example.org/s"), URIRef("http://example.org/s2")]
+ predicates = [URIRef("http://example.org/p"), URIRef("http://example.org/p2")]
+ objects = [URIRef("http://example.org/o"), URIRef("http://example.org/o2")]
+ for row in results.bindings:
+ assert row.get(s_var) in subjects
+ assert row.get(p_var) in predicates
+ assert row.get(o_var) in objects
+
+ query = "ask where { ?s ?p ?o }"
+ results = repo.query(query)
+ assert results.askAnswer is True
+
+ query = "ask where { }"
+ results = repo.query(query)
+ assert results.askAnswer is False
+
+ query = "construct { ?s ?p ?o } where { graph { ?s ?p ?o } }"
+ results = repo.query(query)
+ assert len(results.graph) == 1
+ assert (
+ URIRef("http://example.org/s"),
+ URIRef("http://example.org/p"),
+ URIRef("http://example.org/o"),
+ ) in results.graph
+
+ query = "describe "
+ results = repo.query(query)
+ assert len(results.graph) == 1
+ assert (
+ URIRef("http://example.org/s2"),
+ URIRef("http://example.org/p2"),
+ URIRef("http://example.org/o2"),
+ ) in results.graph
+
+ # Provide a keyword argument "limit" to the query method
+ # We have 2 statements in the repository, and this should return only one
+ query = "select ?s ?p ?o where { ?s ?p ?o }"
+ results = repo.query(query, limit=1)
+ assert len(results) == 1
diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py
new file mode 100644
index 000000000..a4d9bb0ac
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_repo_transaction.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import TransactionClosedError
+from rdflib.term import Literal, URIRef, Variable
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ from rdflib.contrib.rdf4j.client import Repository, Transaction
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_transaction(repo: Repository):
+ path = str(Path(__file__).parent.parent / "data/quads-1.nq")
+ repo.overwrite(path)
+ assert repo.size() == 2
+
+ with repo.transaction() as txn:
+ txn.ping()
+ assert txn.size() == 2
+ assert txn.size("urn:graph:a") == 1
+
+ # Open a transaction without a context manager
+ txn = Transaction(repo)
+ txn.open()
+ assert txn.size() == 2
+ txn.rollback()
+ assert txn.url is None
+
+ # Raises an error as the transaction is closed.
+ with pytest.raises(TransactionClosedError):
+ txn.ping()
+
+ path = str(Path(__file__).parent.parent / "data/quads-2.nq")
+ with repo.transaction() as txn:
+ query = "select (count(*) as ?count) where {?s ?p ?o}"
+ result = txn.query(query)
+ # Before upload, the number of statements is 2.
+ assert result.bindings[0][Variable("count")] == Literal(2)
+ # Add data.
+ txn.upload(path)
+ assert txn.size() == 3
+ result = txn.query(query)
+ # Now it's 3.
+ assert result.bindings[0][Variable("count")] == Literal(3)
+ # Repo is still 2 as we've not yet committed.
+ assert repo.size() == 2
+
+ # Transaction committed, size is now 3.
+ assert repo.size() == 3
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_transaction_delete(repo: Repository):
+ path = str(Path(__file__).parent.parent / "data/quads-1.nq")
+ repo.overwrite(path)
+ data = " ."
+ repo.upload(data)
+ assert repo.size() == 3
+ assert repo.size("urn:graph:a2") == 1
+
+ with repo.transaction() as txn:
+ txn.delete(data)
+ assert txn.size() == 2
+ assert txn.size("urn:graph:a2") == 0
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_transaction_update(repo: Repository):
+ path = str(Path(__file__).parent.parent / "data/quads-1.nq")
+ repo.overwrite(path)
+ assert repo.size() == 2
+
+ query = "INSERT DATA { GRAPH { } }"
+ with repo.transaction() as txn:
+ txn.update(query)
+ assert txn.size() == 3
+ assert txn.size("urn:graph:a2") == 1
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_transaction_get(repo: Repository):
+ path = str(Path(__file__).parent.parent / "data/quads-1.nq")
+ repo.overwrite(path)
+ assert repo.size() == 2
+
+ with repo.transaction() as txn:
+ ds = txn.get()
+ assert len(ds) == 2
+
+ repo.upload(str(Path(__file__).parent.parent / "data/quads-2.nq"))
+ repo.upload(str(Path(__file__).parent.parent / "data/quads-3.nq"))
+ assert repo.size() == 4
+
+ with repo.transaction() as txn:
+ ds = txn.get()
+ assert len(ds) == 4
+
+ ds = txn.get(graph_name="urn:graph:a")
+ assert len(ds) == 1
+
+ ds = txn.get(pred=URIRef("http://example.org/p"))
+ assert len(ds) == 2
diff --git a/test/test_rdf4j/test_e2e/test_e2e_repo_update.py b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py
new file mode 100644
index 000000000..12313b901
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_e2e_repo_update.py
@@ -0,0 +1,25 @@
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.testcontainer
+def test_e2e_repo_query(repo: Repository):
+ path = str(Path(__file__).parent.parent / "data/quads-1.nq")
+ repo.overwrite(path)
+ assert repo.size() == 2
+
+ query = """INSERT DATA { GRAPH { } }"""
+ repo.update(query)
+ assert repo.size() == 3
diff --git a/test/test_rdf4j/test_e2e/test_graphdb/conftest.py b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py
new file mode 100644
index 000000000..492d40bf9
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_graphdb/conftest.py
@@ -0,0 +1,25 @@
+from importlib.util import find_spec
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+has_testcontainers = find_spec("testcontainers") is not None
+
+pytestmark = pytest.mark.skipif(
+ not (has_httpx and has_testcontainers),
+ reason="skipping rdf4j tests, httpx or testcontainers not available",
+)
+
+if has_httpx and has_testcontainers:
+ from testcontainers.core.container import DockerContainer
+
+ from rdflib.contrib.graphdb import GraphDBClient
+
+ @pytest.fixture(scope="function")
+ def client(graphdb_container: DockerContainer):
+ port = graphdb_container.get_exposed_port(7200)
+ with GraphDBClient(
+ f"http://localhost:{port}/", auth=("admin", "admin")
+ ) as client:
+ yield client
diff --git a/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py
new file mode 100644
index 000000000..278c9d629
--- /dev/null
+++ b/test/test_rdf4j/test_e2e/test_graphdb/test_graphdb_repo_management.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+import pathlib
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import (
+ RepositoryAlreadyExistsError,
+ RepositoryNotFoundError,
+ RepositoryNotHealthyError,
+)
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.graphdb import GraphDBClient
+
+
+# TODO: consider parameterizing the client (RDF4JClient, GraphDBClient)
+
+
+@pytest.mark.testcontainer
+def test_repo_manager_crud(client: GraphDBClient):
+ # Empty state
+ assert client.repositories.list() == []
+
+ config_path = (
+ pathlib.Path(__file__).parent.parent / "repo-configs/test-repo-config.ttl"
+ )
+ with open(config_path) as file:
+ config = file.read()
+
+ repo = client.repositories.create("test-repo", config)
+ assert repo.identifier == "test-repo"
+ assert repo.health()
+
+ # New repository created
+ assert len(client.repositories.list()) == 1
+
+ # Repo already exists error
+ with pytest.raises(RepositoryAlreadyExistsError):
+ client.repositories.create("test-repo", config)
+
+ # Delete repository
+ client.repositories.delete("test-repo")
+ assert client.repositories.list() == []
+
+ # Deleting non-existent repo
+ with pytest.raises(RepositoryNotFoundError):
+ client.repositories.delete("test-repo")
+
+
+@pytest.mark.testcontainer
+def test_repo_not_healthy(client: GraphDBClient, monkeypatch):
+ config_path = (
+ pathlib.Path(__file__).parent.parent / "repo-configs/test-repo-config.ttl"
+ )
+ with open(config_path) as file:
+ config = file.read()
+
+ repo = client.repositories.create("test-repo", config)
+ assert repo.identifier == "test-repo"
+
+ class MockResponse:
+ def raise_for_status(self):
+ raise httpx.HTTPStatusError(
+ "",
+ request=httpx.Request("post", ""),
+ response=httpx.Response(status_code=500),
+ )
+
+ monkeypatch.setattr(httpx.Client, "post", lambda *args, **kwargs: MockResponse())
+ with pytest.raises(RepositoryNotHealthyError):
+ repo.health()
diff --git a/test/test_rdf4j/test_unit/repository/conftest.py b/test/test_rdf4j/test_unit/repository/conftest.py
new file mode 100644
index 000000000..b74b3017f
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/conftest.py
@@ -0,0 +1,53 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j import RDF4JClient
+ from rdflib.contrib.rdf4j.client import Repository, RepositoryManager
+
+
+@pytest.fixture(scope="function")
+def client(monkeypatch: pytest.MonkeyPatch):
+ monkeypatch.setattr(RDF4JClient, "protocol", 12)
+ with RDF4JClient("http://localhost/", auth=("admin", "admin")) as client:
+ yield client
+
+
+@pytest.fixture(scope="function")
+def repo(client: RDF4JClient, monkeypatch: pytest.MonkeyPatch):
+ with httpx.Client() as http_client:
+ monkeypatch.setattr(
+ RepositoryManager,
+ "create",
+ lambda *args, **kwargs: Repository("test-repo", http_client),
+ )
+
+ repo = client.repositories.create("test-repo", "")
+ assert repo.identifier == "test-repo"
+ yield repo
+
+
+@pytest.fixture
+def txn(repo: Repository, monkeypatch: pytest.MonkeyPatch):
+ transaction_url = "http://example.com/transaction/1"
+ mock_transaction_create_response = Mock(
+ spec=httpx.Response, headers={"Location": transaction_url}
+ )
+ mock_httpx_post = Mock(return_value=mock_transaction_create_response)
+ monkeypatch.setattr(httpx.Client, "post", mock_httpx_post)
+ with repo.transaction() as txn:
+ yield txn
+ mock_commit_response = Mock(spec=httpx.Response, status_code=200)
+ mock_httpx_put = Mock(return_value=mock_commit_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_delete.py
new file mode 100644
index 000000000..287cdb224
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_delete.py
@@ -0,0 +1,93 @@
+from __future__ import annotations
+
+import typing as t
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
+from rdflib.term import URIRef
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ ObjectType,
+ PredicateType,
+ Repository,
+ SubjectType,
+ )
+
+
+@pytest.mark.parametrize(
+ "subj, pred, obj, expected_params",
+ [
+ [
+ URIRef("http://example.com/s"),
+ URIRef("http://example.com/p"),
+ URIRef("http://example.com/o"),
+ {
+ "subj": "",
+ "pred": "",
+ "obj": "",
+ },
+ ],
+ [None, None, None, {}],
+ ],
+)
+def test_repo_delete_spo(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ subj: SubjectType,
+ pred: PredicateType,
+ obj: ObjectType,
+ expected_params: dict[str, str],
+):
+ """Test that the subj, pred, and obj query parameters are set correctly."""
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_get)
+
+ repo.delete(subj=subj, pred=pred, obj=obj)
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ params=expected_params,
+ )
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_graph_name_param",
+ [
+ [DATASET_DEFAULT_GRAPH_ID, "null"],
+ ["http://example.com/graph", ""],
+ [URIRef("http://example.com/graph"), ""],
+ [None, None],
+ ],
+)
+def test_repo_delete_graph_name(
+ repo: Repository,
+ graph_name: URIRef | t.Iterable[URIRef] | str | None,
+ expected_graph_name_param: str,
+ monkeypatch: pytest.MonkeyPatch,
+):
+ """
+ Test that graph_name is passed as a query parameter and correctly handles the
+ different type variations.
+ """
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_get)
+ if graph_name is None:
+ params = {}
+ else:
+ params = {"context": expected_graph_name_param}
+ repo.delete(graph_name=graph_name)
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ params=params,
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_get.py b/test/test_rdf4j/test_unit/repository/test_repo_get.py
new file mode 100644
index 000000000..47f9e6d01
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_get.py
@@ -0,0 +1,199 @@
+from __future__ import annotations
+
+import typing as t
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib import Dataset, Graph
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
+from rdflib.term import URIRef
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ ObjectType,
+ PredicateType,
+ RDF4JNamespaceManager,
+ Repository,
+ SubjectType,
+ )
+
+
+@pytest.mark.parametrize(
+ "content_type, data, expected_class_type",
+ [
+ [
+ None,
+ " .",
+ Dataset,
+ ],
+ [
+ "application/trig",
+ " { . }",
+ Dataset,
+ ],
+ [
+ "application/n-triples",
+ " .",
+ Graph,
+ ],
+ [
+ "text/turtle",
+ " .",
+ Graph,
+ ],
+ [
+ "application/rdf+xml",
+ """
+
+
+
+
+
+""",
+ Graph,
+ ],
+ ],
+)
+def test_repo_content_type(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ content_type: str | None,
+ data: str,
+ expected_class_type: type,
+):
+ """
+ Test that the content type is set correctly on the request and that the response is
+ parsed correctly.
+ """
+ mock_response = Mock(spec=httpx.Response, text=data)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+
+ result = repo.get(content_type=content_type)
+ headers = {"Accept": content_type or "application/n-quads"}
+ params: dict[str, str] = {}
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ )
+ assert isinstance(result, expected_class_type)
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_graph_name_param",
+ [
+ [DATASET_DEFAULT_GRAPH_ID, "null"],
+ ["http://example.com/graph", ""],
+ [URIRef("http://example.com/graph"), ""],
+ [None, None],
+ ],
+)
+def test_repo_get_graph_name(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: URIRef | t.Iterable[URIRef] | str | None,
+ expected_graph_name_param: str,
+):
+ """
+ Test that graph_name is passed as a query parameter and correctly handles the
+ different type variations.
+ """
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ headers = {
+ "Accept": "application/n-quads",
+ }
+ if graph_name is None:
+ params = {}
+ else:
+ params = {"context": expected_graph_name_param}
+ repo.get(graph_name=graph_name)
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ )
+
+
+@pytest.mark.parametrize("infer, expected_value", [[True, KeyError], [False, "false"]])
+def test_repo_get_infer(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ infer: bool,
+ expected_value: Exception | str,
+):
+ """Test that the "infer" query parameter is set correctly."""
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ headers = {
+ "Accept": "application/n-quads",
+ }
+
+ params = {}
+ if isinstance(expected_value, str):
+ params["infer"] = expected_value
+
+ repo.get(infer=infer)
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ )
+
+
+@pytest.mark.parametrize(
+ "subj, pred, obj, expected_params",
+ [
+ [
+ URIRef("http://example.com/s"),
+ URIRef("http://example.com/p"),
+ URIRef("http://example.com/o"),
+ {
+ "subj": "",
+ "pred": "",
+ "obj": "",
+ },
+ ],
+ [None, None, None, {}],
+ ],
+)
+def test_repo_get_spo(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ subj: SubjectType,
+ pred: PredicateType,
+ obj: ObjectType,
+ expected_params: dict[str, str],
+):
+ """Test that the subj, pred, and obj query parameters are set correctly."""
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ headers = {
+ "Accept": "application/n-quads",
+ }
+
+ repo.get(subj=subj, pred=pred, obj=obj)
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=expected_params,
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py
new file mode 100644
index 000000000..c9f7675cf
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_add.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+from unittest.mock import ANY, Mock
+
+import pytest
+
+from rdflib import URIRef
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_params",
+ [
+ ["http://example.com/graph", {"graph": "http://example.com/graph"}],
+ [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}],
+ ],
+)
+def test_repo_graph_store_add(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: str | URIRef,
+ expected_params: dict[str, str],
+):
+ data = " ."
+ mock_response = Mock(spec=httpx.Response, text=data)
+ mock_httpx_post = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "post", mock_httpx_post)
+ repo.graphs.add(graph_name, data)
+ headers = {"Content-Type": "application/n-triples"}
+ mock_httpx_post.assert_called_once_with(
+ "/repositories/test-repo/rdf-graphs/service",
+ headers=headers,
+ params=expected_params,
+ content=ANY,
+ )
+
+
+@pytest.mark.parametrize("graph_name", [None, ""])
+def test_repo_graph_store_add_invalid_graph_name(
+ repo: Repository, graph_name: str | None
+):
+ with pytest.raises(ValueError):
+ repo.graphs.add(graph_name, "") # type: ignore
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py
new file mode 100644
index 000000000..5cdfbd5ce
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_clear.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib import URIRef
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_params",
+ [
+ ["http://example.com/graph", {"graph": "http://example.com/graph"}],
+ [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}],
+ ],
+)
+def test_repo_graph_store_add(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: str | URIRef,
+ expected_params: dict[str, str],
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_delete = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete)
+ repo.graphs.clear(graph_name)
+ mock_httpx_delete.assert_called_once_with(
+ "/repositories/test-repo/rdf-graphs/service",
+ params=expected_params,
+ )
+
+
+@pytest.mark.parametrize("graph_name", [None, ""])
+def test_repo_graph_store_clear_invalid_graph_name(
+ repo: Repository, graph_name: str | None
+):
+ with pytest.raises(ValueError):
+ repo.graphs.clear(graph_name) # type: ignore
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py
new file mode 100644
index 000000000..dedee433b
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_get.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib import Graph, URIRef
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_params",
+ [
+ ["http://example.com/graph", {"graph": "http://example.com/graph"}],
+ [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}],
+ ],
+)
+def test_repo_graph_store_get(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: str | URIRef,
+ expected_params: dict[str, str],
+):
+ data = " ."
+ mock_response = Mock(spec=httpx.Response, text=data)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ graph = repo.graphs.get(graph_name)
+ headers = {"Accept": "application/n-triples"}
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/rdf-graphs/service",
+ headers=headers,
+ params=expected_params,
+ )
+ assert isinstance(graph, Graph)
+ assert graph.isomorphic(Graph().parse(data=data))
+ assert graph.identifier == URIRef(graph_name)
+
+
+@pytest.mark.parametrize("graph_name", [None, ""])
+def test_repo_graph_store_get_invalid_graph_name(
+ repo: Repository, graph_name: str | None
+):
+ with pytest.raises(ValueError):
+ repo.graphs.get(graph_name) # type: ignore
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py
new file mode 100644
index 000000000..6569de25c
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_graph_store_overwrite.py
@@ -0,0 +1,54 @@
+from __future__ import annotations
+
+from unittest.mock import ANY, Mock
+
+import pytest
+
+from rdflib import URIRef
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_params",
+ [
+ ["http://example.com/graph", {"graph": "http://example.com/graph"}],
+ [URIRef("http://example.com/graph"), {"graph": "http://example.com/graph"}],
+ ],
+)
+def test_repo_graph_store_overwrite(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: str | URIRef,
+ expected_params: dict[str, str],
+):
+ data = " ."
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ repo.graphs.overwrite(graph_name, data)
+ headers = {"Content-Type": "application/n-triples"}
+ mock_httpx_put.assert_called_once_with(
+ "/repositories/test-repo/rdf-graphs/service",
+ headers=headers,
+ params=expected_params,
+ content=ANY,
+ )
+
+
+@pytest.mark.parametrize("graph_name", [None, ""])
+def test_repo_graph_store_overwrite_invalid_graph_name(
+ repo: Repository, graph_name: str | None
+):
+ with pytest.raises(ValueError):
+ repo.graphs.overwrite(graph_name, "") # type: ignore
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_graphs.py b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py
new file mode 100644
index 000000000..f554cd9ed
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_graphs.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError
+from rdflib.term import BNode, IdentifiedNode, URIRef
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.parametrize(
+ "response_dict, expected_result",
+ [
+ [{"results": {"bindings": []}}, set()],
+ [
+ {
+ "results": {
+ "bindings": [
+ {
+ "contextID": {
+ "value": "http://example.com/graph",
+ "type": "uri",
+ }
+ }
+ ]
+ }
+ },
+ {URIRef("http://example.com/graph")},
+ ],
+ [
+ {
+ "results": {
+ "bindings": [{"contextID": {"value": "bnode1", "type": "bnode"}}]
+ }
+ },
+ {BNode("bnode1")},
+ ],
+ [
+ {
+ "results": {
+ "bindings": [
+ {"contextID": {"value": "bnode1", "type": "bnode"}},
+ {"contextID": {"value": "urn:blah", "type": "uri"}},
+ ]
+ }
+ },
+ {BNode("bnode1"), URIRef("urn:blah")},
+ ],
+ ],
+)
+def test_repo_graphs(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ response_dict: dict,
+ expected_result: set[IdentifiedNode],
+):
+ """Test that the graphs are returned correctly."""
+ mock_response = Mock(spec=httpx.Response, json=lambda: response_dict)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ result = repo.graph_names()
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/contexts",
+ headers={"Accept": "application/sparql-results+json"},
+ )
+ assert set(result) == expected_result
+
+
+@pytest.mark.parametrize(
+ "response_dict, expected_error",
+ [
+ [{}, RepositoryFormatError],
+ [
+ {
+ "results": {
+ "bindings": [
+ {"contextID": {"type": "invalid", "value": "urn:example"}}
+ ]
+ }
+ },
+ RepositoryFormatError,
+ ],
+ ],
+)
+def test_repo_graphs_invalid_response(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ response_dict: dict,
+ expected_error: type[Exception],
+):
+ """Test that an error is raised when the response is invalid."""
+ mock_response = Mock(spec=httpx.Response, json=lambda: response_dict)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ with pytest.raises(expected_error):
+ repo.graph_names()
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/contexts",
+ headers={"Accept": "application/sparql-results+json"},
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py
new file mode 100644
index 000000000..5253f2115
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_clear.py
@@ -0,0 +1,27 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+def test_repo_namespace_clear(repo: Repository, monkeypatch: pytest.MonkeyPatch):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_delete = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete)
+ repo.namespaces.clear()
+ mock_httpx_delete.assert_called_once_with(
+ "/repositories/test-repo/namespaces",
+ headers={"Accept": "application/sparql-results+json"},
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py
new file mode 100644
index 000000000..5d1d6a19e
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_get.py
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib import Dataset, URIRef
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ NamespaceListingResult,
+ RDF4JNamespaceManager,
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "prefix, response_text, response_status_code, expected_value",
+ [
+ [
+ "skos",
+ "http://www.w3.org/2004/02/skos/core#",
+ 200,
+ "http://www.w3.org/2004/02/skos/core#",
+ ],
+ ["non-existent", "Undefined prefix: non-existent", 404, None],
+ ],
+)
+def test_repo_namespace_get(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ prefix: str,
+ response_text: str,
+ response_status_code: int,
+ expected_value: str | None,
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ repo.namespaces.get(prefix)
+ mock_httpx_get.assert_called_once_with(
+ f"/repositories/test-repo/namespaces/{prefix}",
+ headers={"Accept": "text/plain"},
+ )
+
+
+@pytest.mark.parametrize("prefix", [None, ""])
+def test_repo_namespace_get_error(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch, prefix: str | None
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ with pytest.raises(ValueError):
+ repo.namespaces.get(prefix) # type: ignore
+
+
+def test_repo_get_with_namespace_binding(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch
+):
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ monkeypatch.setattr(
+ RDF4JNamespaceManager,
+ "list",
+ lambda _: [
+ NamespaceListingResult(prefix="test", namespace="http://example.org/test/")
+ ],
+ )
+ ds = repo.get()
+ assert isinstance(ds, Dataset)
+ assert ("test", URIRef("http://example.org/test/")) in set(ds.namespaces())
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py
new file mode 100644
index 000000000..88c70a124
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_list.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.term import IdentifiedNode
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import NamespaceListingResult, Repository
+ from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError
+
+ @pytest.mark.parametrize(
+ "response_dict, expected_result",
+ [
+ [{"results": {"bindings": []}}, set()],
+ [
+ {
+ "results": {
+ "bindings": [
+ {
+ "prefix": {"value": "test"},
+ "namespace": {"value": "http://example.com/test/"},
+ },
+ {
+ "prefix": {"value": "test2"},
+ "namespace": {"value": "http://example.com/test2/"},
+ },
+ ]
+ }
+ },
+ {
+ NamespaceListingResult(
+ prefix="test", namespace="http://example.com/test/"
+ ),
+ NamespaceListingResult(
+ prefix="test2", namespace="http://example.com/test2/"
+ ),
+ },
+ ],
+ ],
+ )
+ def test_repo_namespace_list(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ response_dict: dict,
+ expected_result: set[IdentifiedNode],
+ ):
+ mock_response = Mock(spec=httpx.Response, json=lambda: response_dict)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ result = repo.namespaces.list()
+ assert set(result) == expected_result
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/namespaces",
+ headers={"Accept": "application/sparql-results+json"},
+ )
+
+ def test_repo_namespace_list_error(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch
+ ):
+ response_dict: dict[str, str] = {}
+
+ mock_response = Mock(spec=httpx.Response, json=lambda: response_dict)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ with pytest.raises(RepositoryFormatError):
+ repo.namespaces.list()
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/namespaces",
+ headers={"Accept": "application/sparql-results+json"},
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py
new file mode 100644
index 000000000..22c2fad66
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_remove.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.parametrize(
+ "prefix",
+ [
+ ["skos"],
+ ["schema"],
+ ],
+)
+def test_repo_namespace_remove(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ prefix: str,
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_remove = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_remove)
+ repo.namespaces.remove(prefix)
+ mock_httpx_remove.assert_called_once_with(
+ f"/repositories/test-repo/namespaces/{prefix}",
+ )
+
+
+@pytest.mark.parametrize("prefix", [None, ""])
+def test_repo_namespace_remove_error(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch, prefix: str | None
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_remove = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_remove)
+ with pytest.raises(ValueError):
+ repo.namespaces.remove(prefix) # type: ignore
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py
new file mode 100644
index 000000000..9adc820ce
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_namespace_set.py
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.parametrize(
+ "prefix, namespace",
+ [["test", "http://example.com/test"], ["test2", "http://example.com/test2"]],
+)
+def test_repo_namespace_set(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ prefix: str,
+ namespace: str,
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ repo.namespaces.set(prefix, namespace)
+ mock_httpx_put.assert_called_once_with(
+ f"/repositories/test-repo/namespaces/{prefix}",
+ headers={"Content-Type": "text/plain"},
+ content=namespace,
+ )
+
+
+@pytest.mark.parametrize(
+ "prefix, namespace",
+ [
+ [None, "http://example.com/test"],
+ ["test", None],
+ ["", "http://example.com/test"],
+ ["test", ""],
+ [None, None],
+ ["", ""],
+ ],
+)
+def test_repo_namespace_set_error(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch, prefix: str, namespace: str
+):
+ with pytest.raises(ValueError):
+ repo.namespaces.set(prefix, namespace)
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py
new file mode 100644
index 000000000..f45cfa33c
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_overwrite.py
@@ -0,0 +1,286 @@
+from __future__ import annotations
+
+import io
+import pathlib
+from typing import Iterable
+from unittest.mock import ANY, Mock
+
+import pytest
+
+from rdflib import Dataset, Graph, URIRef
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import Repository
+
+
+@pytest.mark.parametrize("class_type", [Graph, Dataset])
+def test_repo_overwrite_graph(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch, class_type: type[Graph | Dataset]
+):
+ """Test that the overwrite method handles Graphs and Datasets as data input."""
+ file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq"
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ graph = class_type().parse(file_path)
+ repo.overwrite(graph)
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert isinstance(content, io.BytesIO)
+ assert content.closed
+
+
+def test_repo_overwrite_file_path(repo: Repository, monkeypatch: pytest.MonkeyPatch):
+ """Test that a file path is treated as a file to be read and closed when done."""
+ file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq"
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ repo.overwrite(str(file_path), content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert hasattr(content, "read")
+ assert hasattr(content, "name")
+ assert content.name == str(file_path)
+ assert content.closed
+
+
+def test_repo_overwrite_buffered_reader(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch
+):
+ """Test that a file-like object is read and not closed when done."""
+ file_path = pathlib.Path(__file__).parent.parent.parent / "data/quads-1.nq"
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ with open(file_path, "rb") as file:
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ repo.overwrite(file, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=file,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert not content.closed
+
+
+@pytest.mark.parametrize(
+ "data",
+ [
+ " .",
+ b" .",
+ ],
+)
+def test_repo_overwrite_data(
+ repo: Repository, data: str | bytes, monkeypatch: pytest.MonkeyPatch
+):
+ """Test that str and bytes data is treated as content."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ repo.overwrite(data, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert isinstance(content, io.BytesIO)
+ assert not content.closed
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_graph_name_param",
+ [
+ [DATASET_DEFAULT_GRAPH_ID, "null"],
+ ["http://example.com/graph", ""],
+ [URIRef("http://example.com/graph"), ""],
+ [None, None],
+ ],
+)
+def test_repo_overwrite_graph_name(
+ repo: Repository,
+ graph_name: URIRef | Iterable[URIRef] | str | None,
+ expected_graph_name_param: str,
+ monkeypatch: pytest.MonkeyPatch,
+):
+ """Test that graph_name is passed as a query parameter and correctly handles the different type variations."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ if graph_name is None:
+ params: dict[str, str] = {}
+ else:
+ params = {"context": expected_graph_name_param}
+ repo.overwrite("", graph_name=graph_name, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+
+
+@pytest.mark.parametrize(
+ "base_uri, expected_params",
+ [
+ ["", {"baseURI": ""}],
+ ["http://example.com", {"baseURI": "http://example.com"}],
+ [None, {}],
+ ],
+)
+def test_repo_overwrite_base_uri(
+ repo: Repository,
+ base_uri: str | None,
+ expected_params: dict[str, str],
+ monkeypatch: pytest.MonkeyPatch,
+):
+ """Test that base_uri is passed as a query parameter."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ repo.overwrite("", base_uri=base_uri, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=expected_params,
+ content=ANY,
+ )
+
+
+def test_repo_overwrite_nonexistent_file_path(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch
+):
+ """Test that a string that looks like a file path but doesn't exist is treated as content."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ nonexistent_path = "/nonexistent/path/file.nq"
+ repo.overwrite(nonexistent_path, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert isinstance(content, io.BytesIO)
+ assert not content.closed
+
+
+def test_repo_overwrite_string_with_newline(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch
+):
+ """Test that a string with newlines is treated as content, not a file path."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ data_with_newline = " .\n ."
+ repo.overwrite(data_with_newline, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert isinstance(content, io.BytesIO)
+ assert not content.closed
+
+
+def test_repo_overwrite_long_string(repo: Repository, monkeypatch: pytest.MonkeyPatch):
+ """Test that a string longer than 260 characters is treated as content, not a file path."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params: dict[str, str] = {}
+ # Create a string longer than 260 characters
+ long_string = "a" * 261
+ repo.overwrite(long_string, content_type="application/n-quads")
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
+ call_args = mock.call_args
+ content = call_args.kwargs["content"]
+ assert isinstance(content, io.BytesIO)
+ assert not content.closed
+
+
+def test_repo_overwrite_graph_name_and_base_uri(
+ repo: Repository, monkeypatch: pytest.MonkeyPatch
+):
+ """Test that both graph_name and base_uri can be provided together."""
+ mock = Mock()
+ monkeypatch.setattr(httpx.Client, "put", mock)
+ headers = {
+ "Content-Type": "application/n-quads",
+ }
+ params = {
+ "context": "",
+ "baseURI": "http://example.com/base",
+ }
+ repo.overwrite(
+ "",
+ graph_name="http://example.com/graph",
+ base_uri="http://example.com/base",
+ content_type="application/n-quads",
+ )
+ mock.assert_called_once_with(
+ "/repositories/test-repo/statements",
+ headers=headers,
+ params=params,
+ content=ANY,
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_query.py b/test/test_rdf4j/test_unit/repository/test_repo_query.py
new file mode 100644
index 000000000..717524b4e
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_query.py
@@ -0,0 +1,123 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib import Graph
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.term import URIRef, Variable
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "query, accept_header, response_text, expected_result_type",
+ [
+ [
+ "select ?s where { ?s ?p ?o }",
+ "application/sparql-results+json",
+ """
+ {
+ "head": {
+ "vars": ["s"]
+ },
+ "results": {
+ "bindings": [{"s": {"value": "http://example.com/s", "type": "uri"}}]
+ }
+ }
+ """,
+ "SELECT",
+ ],
+ [
+ "ask where { ?s ?p ?o }",
+ "application/sparql-results+json",
+ '{ "boolean": true }',
+ "ASK",
+ ],
+ [
+ "construct { ?s ?p ?o } where { ?s ?p ?o }",
+ "application/n-triples",
+ " .",
+ "CONSTRUCT",
+ ],
+ [
+ "describe ?s",
+ "application/n-triples",
+ " .",
+ "CONSTRUCT",
+ ],
+ ],
+)
+def test_repo_query(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ query: str,
+ accept_header: str,
+ response_text: str,
+ expected_result_type,
+):
+ mock_response = Mock(
+ spec=httpx.Response,
+ content=response_text.encode("utf-8"),
+ headers={"Content-Type": accept_header},
+ )
+ mock_httpx_post = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "post", mock_httpx_post)
+ result = repo.query(query)
+ assert result.type == expected_result_type
+ headers = {"Accept": accept_header, "Content-Type": "application/sparql-query"}
+ mock_httpx_post.assert_called_once_with(
+ "/repositories/test-repo",
+ headers=headers,
+ content=query,
+ )
+
+ if expected_result_type == "SELECT":
+ assert len(result) == 1
+ s_var = Variable("s")
+ assert result.vars == [s_var]
+ assert result.bindings[0].get(s_var) == URIRef("http://example.com/s")
+ elif expected_result_type == "ASK":
+ assert result.askAnswer is True
+ elif expected_result_type == "CONSTRUCT":
+ assert len(result.graph) == 1
+ assert (
+ Graph()
+ .parse(
+ data=" ."
+ )
+ .isomorphic(result.graph)
+ )
+ else:
+ assert False, "Unexpected result type"
+
+
+def test_repo_query_kwargs(repo: Repository, monkeypatch: pytest.MonkeyPatch):
+ """The query method uses GET if a keyword argument is provided."""
+ mock_response = Mock(
+ spec=httpx.Response,
+ content=b" .",
+ headers={"Content-Type": "application/n-triples"},
+ )
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ query = "construct { ?s ?p ?o } where { ?s ?p ?o }"
+ repo.query(query, infer="true")
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo",
+ headers={
+ "Accept": "application/n-triples",
+ "Content-Type": "application/sparql-query",
+ },
+ params={"query": query, "infer": "true"},
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_size.py b/test/test_rdf4j/test_unit/repository/test_repo_size.py
new file mode 100644
index 000000000..5a7c4fb85
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_size.py
@@ -0,0 +1,89 @@
+from __future__ import annotations
+
+import typing as t
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import RepositoryFormatError
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
+from rdflib.term import URIRef
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_graph_name_param",
+ [
+ [DATASET_DEFAULT_GRAPH_ID, "null"],
+ ["http://example.com/graph", ""],
+ [URIRef("http://example.com/graph"), ""],
+ [None, None],
+ ],
+)
+def test_repo_size_graph_name(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: URIRef | t.Iterable[URIRef] | str | None,
+ expected_graph_name_param: str,
+):
+ """
+ Test that graph_name is passed as a query parameter and correctly handles the
+ different type variations.
+ """
+ mock_response = Mock(spec=httpx.Response, text="0")
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+ if graph_name is None:
+ params = {}
+ else:
+ params = {"context": expected_graph_name_param}
+ size = repo.size(graph_name=graph_name)
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/size",
+ params=params,
+ )
+ assert size == 0
+
+
+@pytest.mark.parametrize(
+ "response_value, expected_parsed_value",
+ [
+ ["0", 0],
+ ["123", 123],
+ ["-100", RepositoryFormatError],
+ ["foo", RepositoryFormatError],
+ ],
+)
+def test_repo_size_values(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+ response_value: str,
+ expected_parsed_value: int | type[RepositoryFormatError],
+):
+ """Test that the return value of the response is correctly parsed."""
+ mock_response = Mock(spec=httpx.Response, text=response_value)
+ mock_httpx_get = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "get", mock_httpx_get)
+
+ if isinstance(expected_parsed_value, int):
+ size = repo.size()
+ assert size == expected_parsed_value
+ else:
+ with pytest.raises(expected_parsed_value):
+ repo.size()
+
+ mock_httpx_get.assert_called_once_with(
+ "/repositories/test-repo/size",
+ params={},
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py
new file mode 100644
index 000000000..37dc5472a
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_commit.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import TransactionClosedError
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+def test_repo_transaction_commit(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+):
+ transaction_url = "http://example.com/transaction/1"
+ mock_transaction_create_response = Mock(
+ spec=httpx.Response, headers={"Location": transaction_url}
+ )
+ mock_httpx_post = Mock(return_value=mock_transaction_create_response)
+ monkeypatch.setattr(httpx.Client, "post", mock_httpx_post)
+ with repo.transaction() as txn:
+ # Ensure the transaction is created.
+ assert txn.url == transaction_url
+ mock_httpx_post.assert_called_once_with(
+ "/repositories/test-repo/transactions",
+ )
+
+ # Mock commit response.
+ mock_transaction_commit_response = Mock(spec=httpx.Response, status_code=200)
+ mock_httpx_put = Mock(return_value=mock_transaction_commit_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ # Explicitly commit. This closes the transaction.
+ txn.commit()
+ mock_httpx_put.assert_called_once_with(
+ transaction_url,
+ params={"action": "COMMIT"},
+ )
+ # Ensure it is closed.
+ assert txn.url is None
+ with pytest.raises(TransactionClosedError):
+ txn.ping()
+
+ with repo.transaction() as txn:
+ txn.ping()
+
+ with pytest.raises(TransactionClosedError):
+ # Ensure that the context manager closes the transaction.
+ txn.ping()
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py
new file mode 100644
index 000000000..432d7b709
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_delete.py
@@ -0,0 +1,51 @@
+from __future__ import annotations
+
+from unittest.mock import ANY, Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Transaction,
+ )
+
+
+@pytest.mark.parametrize(
+ "base_uri, content_type, expected_headers, expected_params",
+ [
+ [None, None, {"Content-Type": "application/n-quads"}, {"action": "DELETE"}],
+ [
+ "http://example.com/",
+ "text/turtle",
+ {"Content-Type": "text/turtle"},
+ {"action": "DELETE", "baseURI": "http://example.com/"},
+ ],
+ ],
+)
+def test_repo_transaction_delete(
+ txn: Transaction,
+ monkeypatch: pytest.MonkeyPatch,
+ base_uri: str | None,
+ content_type: str | None,
+ expected_headers: dict[str, str],
+ expected_params: dict[str, str],
+):
+ mock_response = Mock(spec=httpx.Response)
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ txn.delete("", base_uri, content_type)
+
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ headers=expected_headers,
+ params=expected_params,
+ content=ANY,
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py
new file mode 100644
index 000000000..a853977bb
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_get.py
@@ -0,0 +1,223 @@
+from __future__ import annotations
+
+import typing as t
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib import Dataset, Graph, URIRef
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import DATASET_DEFAULT_GRAPH_ID
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ ObjectType,
+ PredicateType,
+ RDF4JNamespaceManager,
+ SubjectType,
+ Transaction,
+ )
+
+
+def test_repo_transaction_get(txn: Transaction, monkeypatch: pytest.MonkeyPatch):
+ mock_response = Mock(
+ spec=httpx.Response,
+ text=" .",
+ )
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ txn.get(pred=URIRef("http://example.org/p"))
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ headers={"Accept": "application/n-quads"},
+ params={"action": "GET", "pred": ""},
+ )
+
+
+@pytest.mark.parametrize(
+ "content_type, data, expected_class_type",
+ [
+ [
+ None,
+ " .",
+ Dataset,
+ ],
+ [
+ "application/trig",
+ " { . }",
+ Dataset,
+ ],
+ [
+ "application/n-triples",
+ " .",
+ Graph,
+ ],
+ [
+ "text/turtle",
+ " .",
+ Graph,
+ ],
+ [
+ "application/rdf+xml",
+ """
+
+
+
+
+
+""",
+ Graph,
+ ],
+ ],
+)
+def test_repo_transaction_get_content_type(
+ txn: Transaction,
+ monkeypatch: pytest.MonkeyPatch,
+ content_type: str | None,
+ data: str,
+ expected_class_type: type,
+):
+ """
+ Test that the content type is set correctly on the request and that the response is
+ parsed correctly.
+ """
+ mock_response = Mock(spec=httpx.Response, text=data)
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+
+ result = txn.get(content_type=content_type)
+ headers = {"Accept": content_type or "application/n-quads"}
+ params: dict[str, str] = {"action": "GET"}
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ headers=headers,
+ params=params,
+ )
+ assert isinstance(result, expected_class_type)
+
+
+@pytest.mark.parametrize(
+ "graph_name, expected_graph_name_param",
+ [
+ [DATASET_DEFAULT_GRAPH_ID, "null"],
+ ["http://example.com/graph", ""],
+ [URIRef("http://example.com/graph"), ""],
+ [None, None],
+ ],
+)
+def test_repo_transaction_get_graph_name(
+ txn: Transaction,
+ monkeypatch: pytest.MonkeyPatch,
+ graph_name: URIRef | t.Iterable[URIRef] | str | None,
+ expected_graph_name_param: str,
+):
+ """
+ Test that graph_name is passed as a query parameter and correctly handles the
+ different type variations.
+ """
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ headers = {
+ "Accept": "application/n-quads",
+ }
+ if graph_name is None:
+ params = {}
+ else:
+ params = {"context": expected_graph_name_param}
+ params["action"] = "GET"
+ txn.get(graph_name=graph_name)
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ headers=headers,
+ params=params,
+ )
+
+
+@pytest.mark.parametrize("infer, expected_value", [[True, KeyError], [False, "false"]])
+def test_repo_transaction_get_infer(
+ txn: Transaction,
+ monkeypatch: pytest.MonkeyPatch,
+ infer: bool,
+ expected_value: Exception | str,
+):
+ """Test that the "infer" query parameter is set correctly."""
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ headers = {
+ "Accept": "application/n-quads",
+ }
+
+ params = {"action": "GET"}
+ if isinstance(expected_value, str):
+ params["infer"] = expected_value
+
+ txn.get(infer=infer)
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ headers=headers,
+ params=params,
+ )
+
+
+@pytest.mark.parametrize(
+ "subj, pred, obj, expected_params",
+ [
+ [
+ URIRef("http://example.com/s"),
+ URIRef("http://example.com/p"),
+ URIRef("http://example.com/o"),
+ {
+ "action": "GET",
+ "subj": "",
+ "pred": "",
+ "obj": "",
+ },
+ ],
+ [
+ None,
+ None,
+ None,
+ {
+ "action": "GET",
+ },
+ ],
+ ],
+)
+def test_repo_transaction_get_spo(
+ txn: Transaction,
+ monkeypatch: pytest.MonkeyPatch,
+ subj: SubjectType,
+ pred: PredicateType,
+ obj: ObjectType,
+ expected_params: dict[str, str],
+):
+ """Test that the subj, pred, and obj query parameters are set correctly."""
+ mock_response = Mock(spec=httpx.Response, text="")
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ monkeypatch.setattr(RDF4JNamespaceManager, "list", lambda _: [])
+ headers = {
+ "Accept": "application/n-quads",
+ }
+
+ txn.get(subj=subj, pred=pred, obj=obj)
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ headers=headers,
+ params=expected_params,
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py
new file mode 100644
index 000000000..82ef0c130
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_ping.py
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import TransactionPingError
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Transaction,
+ )
+
+
+def test_repo_transaction_ping(txn: Transaction, monkeypatch: pytest.MonkeyPatch):
+ # Test a successful ping.
+ mock_ping_response = Mock(spec=httpx.Response, status_code=200)
+ mock_httpx_put = Mock(return_value=mock_ping_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ txn.ping()
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ params={"action": "PING"},
+ )
+
+ # Ensure it raises TransactionPingError.
+ mock_ping_response = Mock(spec=httpx.Response, status_code=405)
+ mock_httpx_put = Mock(return_value=mock_ping_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ with pytest.raises(TransactionPingError):
+ txn.ping()
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py
new file mode 100644
index 000000000..45bcdcfcc
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_query.py
@@ -0,0 +1,100 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.graph import Graph
+from rdflib.term import URIRef, Variable
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Transaction,
+ )
+
+
+@pytest.mark.parametrize(
+ "query, accept_header, response_text, expected_result_type",
+ [
+ [
+ "select ?s where { ?s ?p ?o }",
+ "application/sparql-results+json",
+ """
+ {
+ "head": {
+ "vars": ["s"]
+ },
+ "results": {
+ "bindings": [{"s": {"value": "http://example.com/s", "type": "uri"}}]
+ }
+ }
+ """,
+ "SELECT",
+ ],
+ [
+ "ask where { ?s ?p ?o }",
+ "application/sparql-results+json",
+ '{ "boolean": true }',
+ "ASK",
+ ],
+ [
+ "construct { ?s ?p ?o } where { ?s ?p ?o }",
+ "application/n-triples",
+ " .",
+ "CONSTRUCT",
+ ],
+ [
+ "describe ?s",
+ "application/n-triples",
+ " .",
+ "CONSTRUCT",
+ ],
+ ],
+)
+def test_repo_transaction_query(
+ txn: Transaction,
+ monkeypatch: pytest.MonkeyPatch,
+ query: str,
+ accept_header: str,
+ response_text: str,
+ expected_result_type,
+):
+ mock_response = Mock(
+ spec=httpx.Response,
+ content=response_text.encode("utf-8"),
+ headers={"Content-Type": accept_header},
+ )
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ result = txn.query(query, infer="true")
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ params={"action": "QUERY", "query": query, "infer": "true"},
+ headers={"Accept": accept_header},
+ )
+
+ if expected_result_type == "SELECT":
+ assert len(result) == 1
+ s_var = Variable("s")
+ assert result.vars == [s_var]
+ assert result.bindings[0].get(s_var) == URIRef("http://example.com/s")
+ elif expected_result_type == "ASK":
+ assert result.askAnswer is True
+ elif expected_result_type == "CONSTRUCT":
+ assert len(result.graph) == 1
+ assert (
+ Graph()
+ .parse(
+ data=" ."
+ )
+ .isomorphic(result.graph)
+ )
+ else:
+ assert False, "Unexpected result type"
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py
new file mode 100644
index 000000000..7fb54b4d9
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_rollback.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+from rdflib.contrib.rdf4j.exceptions import TransactionClosedError, TransactionPingError
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Repository,
+ )
+
+
+def test_repo_transaction_rollback(
+ repo: Repository,
+ monkeypatch: pytest.MonkeyPatch,
+):
+ transaction_url = "http://example.com/transaction/1"
+ mock_transaction_create_response = Mock(
+ spec=httpx.Response, headers={"Location": transaction_url}
+ )
+ mock_httpx_post = Mock(return_value=mock_transaction_create_response)
+ monkeypatch.setattr(httpx.Client, "post", mock_httpx_post)
+ with repo.transaction() as txn:
+ mock_rollback_response = Mock(spec=httpx.Response, status_code=204)
+ mock_httpx_delete = Mock(return_value=mock_rollback_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete)
+ txn.rollback()
+ assert txn.url is None
+ mock_httpx_delete.assert_called_once_with(
+ transaction_url,
+ )
+ with pytest.raises(TransactionClosedError):
+ txn.ping()
+
+ mock_rollback_response = Mock(spec=httpx.Response, status_code=204)
+ mock_httpx_delete = Mock(return_value=mock_rollback_response)
+ monkeypatch.setattr(httpx.Client, "delete", mock_httpx_delete)
+ with pytest.raises(TransactionPingError):
+ with repo.transaction() as txn:
+ mock_response = Mock(spec=httpx.Response, status_code=405)
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ txn.ping()
+
+ # Confirm transaction rollback is performed automatically.
+ mock_httpx_delete.assert_called_once_with(
+ transaction_url,
+ )
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py
new file mode 100644
index 000000000..de5b214e8
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_size.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Transaction,
+ )
+
+
+def test_repo_transaction_size(txn: Transaction, monkeypatch: pytest.MonkeyPatch):
+ mock_response = Mock(spec=httpx.Response, text="10")
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ size = txn.size()
+ mock_httpx_put.assert_called_once_with(
+ txn.url,
+ params={"action": "SIZE"},
+ )
+ assert size == 10
diff --git a/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py
new file mode 100644
index 000000000..e14bb59f8
--- /dev/null
+++ b/test/test_rdf4j/test_unit/repository/test_repo_transaction_update.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from unittest.mock import Mock
+
+import pytest
+
+from rdflib.contrib.rdf4j import has_httpx
+
+pytestmark = pytest.mark.skipif(
+ not has_httpx, reason="skipping rdf4j tests, httpx not available"
+)
+
+if has_httpx:
+ import httpx
+
+ from rdflib.contrib.rdf4j.client import (
+ Transaction,
+ )
+
+
+def test_repo_update(txn: Transaction, monkeypatch: pytest.MonkeyPatch):
+ mock_response = Mock(spec=httpx.Response, status_code=204)
+ mock_httpx_put = Mock(return_value=mock_response)
+ monkeypatch.setattr(httpx.Client, "put", mock_httpx_put)
+ query = "insert data {