From 82281a77255a63b6c76f321b9eeea27243fd6ff6 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Fri, 29 Aug 2025 07:24:17 +0000 Subject: [PATCH 01/14] pathlike test in integration test --- test/integration/test_service_onprem.py | 43 +++++++++++++++++++------ 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/test/integration/test_service_onprem.py b/test/integration/test_service_onprem.py index 93b3e383..7c4e03a4 100644 --- a/test/integration/test_service_onprem.py +++ b/test/integration/test_service_onprem.py @@ -34,6 +34,7 @@ as_bytes, as_string, ) +import exasol.bucketfs as bfs @contextmanager @@ -341,6 +342,16 @@ def test_upload_and_udf_path( verify=backend_aware_bucketfs_params["verify"], service=backend_aware_bucketfs_params["url"], ) + file_pathlike = bfs.path.build_path( + backend=bfs.path.StorageBackend.onprem, + url=backend_aware_bucketfs_params["url"], + bucket_name=backend_aware_bucketfs_params["bucket_name"], + service_name=backend_aware_bucketfs_params["service_name"], + path=file_name, + username=backend_aware_bucketfs_params["username"], + password=backend_aware_bucketfs_params["password"], + verify=backend_aware_bucketfs_params["verify"], + ) elif backend == BACKEND_SAAS: bucket = SaaSBucket( url=backend_aware_bucketfs_params["url"], @@ -348,14 +359,25 @@ def test_upload_and_udf_path( database_id=backend_aware_bucketfs_params["database_id"], pat=backend_aware_bucketfs_params["pat"], ) + file_pathlike = bfs.path.build_path( + backend=bfs.path.StorageBackend.saas, + url=backend_aware_bucketfs_params["url"], + account_id=backend_aware_bucketfs_params["account_id"], + database_id=backend_aware_bucketfs_params["database_id"], + pat=backend_aware_bucketfs_params["pat"], + path=file_name, # <--- The file here, too! + ) content = "".join("1" for _ in range(0, 10)) try: bucket.upload(file_name, content) assert file_name in bucket.files, "File upload failed" # Generate UDF path - udf_path = bucket.udf_path - assert udf_path is not None, "UDF path generation failed" + bucket_udf_path = bucket.udf_path + assert bucket_udf_path is not None, "UDF path generation failed" + + file_udf_path = file_pathlike.as_udf_path() + assert file_udf_path is not None, "UDF path like udf path generation failed" conn = pyexasol.connect(**backend_aware_database_params) @@ -377,10 +399,10 @@ def run(ctx): ) conn.execute(create_udf_sql) # Verify the path exists inside the UDF - result = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{udf_path}')").fetchone()[ - 0 - ] - assert result == True + res1 = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{bucket_udf_path}/{file_name}')").fetchone()[0] + res2 = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{file_udf_path}')").fetchone()[0] + assert res1 is True + assert res2 is True # return the content of the file create_read_udf_sql = dedent( @@ -397,10 +419,11 @@ def run(ctx): ) conn.execute(create_read_udf_sql) - file_content = conn.execute( - f"SELECT READ_FILE_CONTENT_UDF('{udf_path}/{file_name}')" - ).fetchone()[0] - assert file_content == content + content1 = conn.execute(f"SELECT READ_FILE_CONTENT_UDF('{bucket_udf_path}/{file_name}')").fetchone()[0] + content2 = conn.execute(f"SELECT READ_FILE_CONTENT_UDF('{file_udf_path}')").fetchone()[0] + assert content1 == content + assert content2 == content + except Exception as e: print(e) From 14cb430a240af8f3b908f1de3112d4b3e4559f5d Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Fri, 29 Aug 2025 07:26:34 +0000 Subject: [PATCH 02/14] added change log --- doc/changes/unreleased.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/changes/unreleased.md b/doc/changes/unreleased.md index 2911aa26..21d95d1e 100644 --- a/doc/changes/unreleased.md +++ b/doc/changes/unreleased.md @@ -1,4 +1,4 @@ # Unreleased ## Refactorings - * #186: Integration test for correctness of UDF path generation \ No newline at end of file + * #186: Integration test for correctness of UDF path generation, using as_udf_path and pathlike \ No newline at end of file From 2ca644a6ed3fd6c396e9efac59705f0ffa8a1892 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Fri, 29 Aug 2025 07:28:39 +0000 Subject: [PATCH 03/14] formatted --- test/integration/test_service_onprem.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/test/integration/test_service_onprem.py b/test/integration/test_service_onprem.py index 7c4e03a4..7a72299d 100644 --- a/test/integration/test_service_onprem.py +++ b/test/integration/test_service_onprem.py @@ -28,13 +28,13 @@ BACKEND_SAAS, ) +import exasol.bucketfs as bfs from exasol.bucketfs import ( Bucket, Service, as_bytes, as_string, ) -import exasol.bucketfs as bfs @contextmanager @@ -399,8 +399,12 @@ def run(ctx): ) conn.execute(create_udf_sql) # Verify the path exists inside the UDF - res1 = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{bucket_udf_path}/{file_name}')").fetchone()[0] - res2 = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{file_udf_path}')").fetchone()[0] + res1 = conn.execute( + f"SELECT CHECK_FILE_EXISTS_UDF('{bucket_udf_path}/{file_name}')" + ).fetchone()[0] + res2 = conn.execute( + f"SELECT CHECK_FILE_EXISTS_UDF('{file_udf_path}')" + ).fetchone()[0] assert res1 is True assert res2 is True @@ -419,8 +423,12 @@ def run(ctx): ) conn.execute(create_read_udf_sql) - content1 = conn.execute(f"SELECT READ_FILE_CONTENT_UDF('{bucket_udf_path}/{file_name}')").fetchone()[0] - content2 = conn.execute(f"SELECT READ_FILE_CONTENT_UDF('{file_udf_path}')").fetchone()[0] + content1 = conn.execute( + f"SELECT READ_FILE_CONTENT_UDF('{bucket_udf_path}/{file_name}')" + ).fetchone()[0] + content2 = conn.execute( + f"SELECT READ_FILE_CONTENT_UDF('{file_udf_path}')" + ).fetchone()[0] assert content1 == content assert content2 == content From f885f21dbeafe98facd8a9489ddae8619f8fa68b Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Mon, 1 Sep 2025 06:13:38 +0000 Subject: [PATCH 04/14] adding fixtures --- test/integration/test_service_onprem.py | 122 +-------------- test/integration/test_udf_path.py | 190 ++++++++++++++++++++++++ 2 files changed, 191 insertions(+), 121 deletions(-) create mode 100644 test/integration/test_udf_path.py diff --git a/test/integration/test_service_onprem.py b/test/integration/test_service_onprem.py index 7a72299d..97f82472 100644 --- a/test/integration/test_service_onprem.py +++ b/test/integration/test_service_onprem.py @@ -324,124 +324,4 @@ def test_any_log_message_get_emitted(httpserver, caplog): record for record in caplog.records if record.name == "exasol.bucketfs" ] # The log level DEBUG should emit at least one log message - assert log_records - - -def test_upload_and_udf_path( - backend_aware_bucketfs_params, backend_aware_database_params, backend -): - # Upload file to BucketFS - file_name = "Uploaded-File-From-Integration-test.bin" - - if backend == BACKEND_ONPREM: - bucket = Bucket( - name=backend_aware_bucketfs_params["bucket_name"], - service_name=backend_aware_bucketfs_params["service_name"], - password=backend_aware_bucketfs_params["password"], - username=backend_aware_bucketfs_params["username"], - verify=backend_aware_bucketfs_params["verify"], - service=backend_aware_bucketfs_params["url"], - ) - file_pathlike = bfs.path.build_path( - backend=bfs.path.StorageBackend.onprem, - url=backend_aware_bucketfs_params["url"], - bucket_name=backend_aware_bucketfs_params["bucket_name"], - service_name=backend_aware_bucketfs_params["service_name"], - path=file_name, - username=backend_aware_bucketfs_params["username"], - password=backend_aware_bucketfs_params["password"], - verify=backend_aware_bucketfs_params["verify"], - ) - elif backend == BACKEND_SAAS: - bucket = SaaSBucket( - url=backend_aware_bucketfs_params["url"], - account_id=backend_aware_bucketfs_params["account_id"], - database_id=backend_aware_bucketfs_params["database_id"], - pat=backend_aware_bucketfs_params["pat"], - ) - file_pathlike = bfs.path.build_path( - backend=bfs.path.StorageBackend.saas, - url=backend_aware_bucketfs_params["url"], - account_id=backend_aware_bucketfs_params["account_id"], - database_id=backend_aware_bucketfs_params["database_id"], - pat=backend_aware_bucketfs_params["pat"], - path=file_name, # <--- The file here, too! - ) - content = "".join("1" for _ in range(0, 10)) - try: - bucket.upload(file_name, content) - assert file_name in bucket.files, "File upload failed" - - # Generate UDF path - bucket_udf_path = bucket.udf_path - assert bucket_udf_path is not None, "UDF path generation failed" - - file_udf_path = file_pathlike.as_udf_path() - assert file_udf_path is not None, "UDF path like udf path generation failed" - - conn = pyexasol.connect(**backend_aware_database_params) - - conn.execute("CREATE SCHEMA IF NOT EXISTS transact;") - conn.execute("open schema transact;") - - # Create UDF SQL - create_udf_sql = dedent( - f""" - --/ - CREATE OR REPLACE PYTHON3 SCALAR - SCRIPT CHECK_FILE_EXISTS_UDF(file_path VARCHAR(200000)) - RETURNS BOOLEAN AS - import os - def run(ctx): - return os.path.exists(ctx.file_path) - / - """ - ) - conn.execute(create_udf_sql) - # Verify the path exists inside the UDF - res1 = conn.execute( - f"SELECT CHECK_FILE_EXISTS_UDF('{bucket_udf_path}/{file_name}')" - ).fetchone()[0] - res2 = conn.execute( - f"SELECT CHECK_FILE_EXISTS_UDF('{file_udf_path}')" - ).fetchone()[0] - assert res1 is True - assert res2 is True - - # return the content of the file - create_read_udf_sql = dedent( - f""" - --/ - CREATE OR REPLACE PYTHON3 SCALAR - SCRIPT READ_FILE_CONTENT_UDF(file_path VARCHAR(200000)) - RETURNS VARCHAR(200000) AS - def run(ctx): - with open(ctx.file_path, 'rb') as f: - return f.read().decode('utf-8', errors='replace') - / - """ - ) - conn.execute(create_read_udf_sql) - - content1 = conn.execute( - f"SELECT READ_FILE_CONTENT_UDF('{bucket_udf_path}/{file_name}')" - ).fetchone()[0] - content2 = conn.execute( - f"SELECT READ_FILE_CONTENT_UDF('{file_udf_path}')" - ).fetchone()[0] - assert content1 == content - assert content2 == content - - except Exception as e: - print(e) - - finally: - # cleanup - _, _ = delete_file( - bucket._service, - bucket.name, - bucket._username, - bucket._password, - file_name, - ) - pass + assert log_records \ No newline at end of file diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py new file mode 100644 index 00000000..38e7ddb7 --- /dev/null +++ b/test/integration/test_udf_path.py @@ -0,0 +1,190 @@ +import logging +import random +import string +from collections.abc import ( + ByteString, + Iterable, +) +from contextlib import ( + closing, + contextmanager, +) +from inspect import cleandoc +from test.integration.conftest import ( + File, + delete_file, +) +from textwrap import dedent +from typing import ( + Tuple, + Union, +) + +import pyexasol +import pytest +import requests +from exasol.pytest_backend import ( + BACKEND_ONPREM, + BACKEND_SAAS, +) + +import exasol.bucketfs as bfs +from exasol.bucketfs import ( + Bucket, + Service, + as_bytes, + as_string, +) + + +@pytest.fixture +def uploaded_file_and_paths( + backend_aware_bucketfs_params, backend, backend_aware_database_params, request +): + file_name = "Uploaded-File-From-Integration-test.bin" + content = "".join("1" for _ in range(0, 10)) + # ONPREM settings + if backend == BACKEND_ONPREM: + bucket = Bucket( + name=backend_aware_bucketfs_params["bucket_name"], + service_name=backend_aware_bucketfs_params["service_name"], + password=backend_aware_bucketfs_params["password"], + username=backend_aware_bucketfs_params["username"], + verify=backend_aware_bucketfs_params["verify"], + service=backend_aware_bucketfs_params["url"], + ) + pathlike = bfs.path.build_path( + backend=bfs.path.StorageBackend.onprem, + url=backend_aware_bucketfs_params["url"], + bucket_name=backend_aware_bucketfs_params["bucket_name"], + service_name=backend_aware_bucketfs_params["service_name"], + path=file_name, + username=backend_aware_bucketfs_params["username"], + password=backend_aware_bucketfs_params["password"], + verify=backend_aware_bucketfs_params["verify"], + ) + # SAAS settings + elif backend == BACKEND_SAAS: + bucket = SaaSBucket( + url=backend_aware_bucketfs_params["url"], + account_id=backend_aware_bucketfs_params["account_id"], + database_id=backend_aware_bucketfs_params["database_id"], + pat=backend_aware_bucketfs_params["pat"], + ) + pathlike = bfs.path.build_path( + backend=bfs.path.StorageBackend.saas, + url=backend_aware_bucketfs_params["url"], + account_id=backend_aware_bucketfs_params["account_id"], + database_id=backend_aware_bucketfs_params["database_id"], + pat=backend_aware_bucketfs_params["pat"], + path=file_name, + ) + else: + pytest.fail(f"Unknown backend: {backend}") + print(bucket,content) + # Upload file to BucketFS/Bucket + bucket.upload(file_name, content) + + udf_path = bucket.udf_path + pathlike_udf_path = pathlike.as_udf_path() if hasattr(pathlike, 'as_udf_path') else None + + # Setup teardown for cleanup + def cleanup(): + try: + delete_file( + backend_aware_bucketfs_params["url"], + backend_aware_bucketfs_params["bucket_name"], + backend_aware_bucketfs_params.get("username"), + backend_aware_bucketfs_params.get("password"), + file_name, + ) + except Exception: + pass + + request.addfinalizer(cleanup) + + return { + "bucket": bucket, + "pathlike": pathlike, + "file_name": file_name, + "content": content, + "udf_path": udf_path, + "pathlike_udf_path": pathlike_udf_path, + } + + +@pytest.fixture +def setup_schema_and_udfs(backend_aware_database_params): + conn = pyexasol.connect(**backend_aware_database_params) + conn.execute("CREATE SCHEMA IF NOT EXISTS transact;") + conn.execute("OPEN SCHEMA transact;") + # Check file exists UDF + create_check_udf_sql = dedent(""" + --/ + CREATE OR REPLACE PYTHON3 SCALAR + SCRIPT CHECK_FILE_EXISTS_UDF(file_path VARCHAR(200000)) + RETURNS BOOLEAN AS + import os + def run(ctx): + return os.path.exists(ctx.file_path) + / + """) + conn.execute(create_check_udf_sql) + # Read file content UDF + create_read_udf_sql = dedent(""" + --/ + CREATE OR REPLACE PYTHON3 SCALAR + SCRIPT READ_FILE_CONTENT_UDF(file_path VARCHAR(200000)) + RETURNS VARCHAR(200000) AS + def run(ctx): + with open(ctx.file_path, 'rb') as f: + return f.read().decode('utf-8', errors='replace') + / + """) + conn.execute(create_read_udf_sql) + return conn + + +def test_upload_and_udf_path(uploaded_file_and_paths, setup_schema_and_udfs): + bucket = uploaded_file_and_paths["bucket"] + file_name = uploaded_file_and_paths["file_name"] + content = uploaded_file_and_paths["content"] + bucket_udf_path = uploaded_file_and_paths["udf_path"] + + assert bucket_udf_path is not None, "UDF path generation failed" + + conn = setup_schema_and_udfs + + # Verify existence in UDF + result = conn.execute( + f"SELECT CHECK_FILE_EXISTS_UDF('{bucket_udf_path}/{file_name}')" + ).fetchone()[0] + assert result is True + + # Verify content from UDF path + content_from_udf_path = conn.execute( + f"SELECT READ_FILE_CONTENT_UDF('{bucket_udf_path}/{file_name}')" + ).fetchone()[0] + print(content_from_udf_path) + assert content_from_udf_path == content + + +def test_upload_and_udf_pathlike(uploaded_file_and_paths, setup_schema_and_udfs): + file_name = uploaded_file_and_paths["file_name"] + content = uploaded_file_and_paths["content"] + file_udf_path = uploaded_file_and_paths["pathlike_udf_path"] + assert file_udf_path is not None, "Pathlike udf path generation failed" + conn = setup_schema_and_udfs + + # Verify file exists in UDF + exists = conn.execute( + f"SELECT CHECK_FILE_EXISTS_UDF('{file_udf_path}')" + ).fetchone()[0] + assert exists is True + + # Verify content from pathlike udf path + content_of_file_udf_path = conn.execute( + f"SELECT READ_FILE_CONTENT_UDF('{file_udf_path}')" + ).fetchone()[0] + print(content_of_file_udf_path) + assert content_of_file_udf_path == content From 77c60320368683ba58b551b8357e7b6242c45222 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Mon, 1 Sep 2025 11:27:20 +0000 Subject: [PATCH 05/14] format fix --- test/integration/test_service_onprem.py | 2 +- test/integration/test_udf_path.py | 20 +++++++++++++------- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/test/integration/test_service_onprem.py b/test/integration/test_service_onprem.py index 97f82472..214ef8af 100644 --- a/test/integration/test_service_onprem.py +++ b/test/integration/test_service_onprem.py @@ -324,4 +324,4 @@ def test_any_log_message_get_emitted(httpserver, caplog): record for record in caplog.records if record.name == "exasol.bucketfs" ] # The log level DEBUG should emit at least one log message - assert log_records \ No newline at end of file + assert log_records diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index 38e7ddb7..53776a66 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -39,7 +39,7 @@ @pytest.fixture def uploaded_file_and_paths( - backend_aware_bucketfs_params, backend, backend_aware_database_params, request + backend_aware_bucketfs_params, backend, backend_aware_database_params, request ): file_name = "Uploaded-File-From-Integration-test.bin" content = "".join("1" for _ in range(0, 10)) @@ -81,12 +81,14 @@ def uploaded_file_and_paths( ) else: pytest.fail(f"Unknown backend: {backend}") - print(bucket,content) + print(bucket, content) # Upload file to BucketFS/Bucket bucket.upload(file_name, content) udf_path = bucket.udf_path - pathlike_udf_path = pathlike.as_udf_path() if hasattr(pathlike, 'as_udf_path') else None + pathlike_udf_path = ( + pathlike.as_udf_path() if hasattr(pathlike, "as_udf_path") else None + ) # Setup teardown for cleanup def cleanup(): @@ -119,7 +121,8 @@ def setup_schema_and_udfs(backend_aware_database_params): conn.execute("CREATE SCHEMA IF NOT EXISTS transact;") conn.execute("OPEN SCHEMA transact;") # Check file exists UDF - create_check_udf_sql = dedent(""" + create_check_udf_sql = dedent( + """ --/ CREATE OR REPLACE PYTHON3 SCALAR SCRIPT CHECK_FILE_EXISTS_UDF(file_path VARCHAR(200000)) @@ -128,10 +131,12 @@ def setup_schema_and_udfs(backend_aware_database_params): def run(ctx): return os.path.exists(ctx.file_path) / - """) + """ + ) conn.execute(create_check_udf_sql) # Read file content UDF - create_read_udf_sql = dedent(""" + create_read_udf_sql = dedent( + """ --/ CREATE OR REPLACE PYTHON3 SCALAR SCRIPT READ_FILE_CONTENT_UDF(file_path VARCHAR(200000)) @@ -140,7 +145,8 @@ def run(ctx): with open(ctx.file_path, 'rb') as f: return f.read().decode('utf-8', errors='replace') / - """) + """ + ) conn.execute(create_read_udf_sql) return conn From 3361534f1794c62e7b885da8330a16f8362ed212 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Mon, 1 Sep 2025 12:42:42 +0000 Subject: [PATCH 06/14] addeed ccope --- test/integration/test_udf_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index 53776a66..7fde8c10 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -37,7 +37,7 @@ ) -@pytest.fixture +@pytest.fixture(scope="module") def uploaded_file_and_paths( backend_aware_bucketfs_params, backend, backend_aware_database_params, request ): From 8c7939134b8732515e75f6851d4d67c04c2b6d25 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 07:31:32 +0000 Subject: [PATCH 07/14] separation and format --- test/integration/test_udf_path.py | 118 ++++++++++++++++++------------ 1 file changed, 70 insertions(+), 48 deletions(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index 7fde8c10..d418bc97 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -38,59 +38,74 @@ @pytest.fixture(scope="module") -def uploaded_file_and_paths( - backend_aware_bucketfs_params, backend, backend_aware_database_params, request -): - file_name = "Uploaded-File-From-Integration-test.bin" - content = "".join("1" for _ in range(0, 10)) - # ONPREM settings +def exa_bucket(backend_aware_bucketfs_params, backend): + # create and return a Bucket or SaaSBucket depending on backend + params = backend_aware_bucketfs_params if backend == BACKEND_ONPREM: bucket = Bucket( - name=backend_aware_bucketfs_params["bucket_name"], - service_name=backend_aware_bucketfs_params["service_name"], - password=backend_aware_bucketfs_params["password"], - username=backend_aware_bucketfs_params["username"], - verify=backend_aware_bucketfs_params["verify"], - service=backend_aware_bucketfs_params["url"], + name=params["bucket_name"], + service_name=params["service_name"], + password=params["password"], + username=params["username"], + verify=params["verify"], + service=params["url"], + ) + elif backend == BACKEND_SAAS: + bucket = SaaSBucket( + url=params["url"], + account_id=params["account_id"], + database_id=params["database_id"], + pat=params["pat"], ) - pathlike = bfs.path.build_path( + else: + pytest.fail(f"Unknown backend: {backend}") + return bucket + + +@pytest.fixture(scope="module") +def exa_pathlike(backend_aware_bucketfs_params, backend): + # build the pathlike + params = backend_aware_bucketfs_params + file_name = "Uploaded-File-From-Integration-test.bin" + if backend == BACKEND_ONPREM: + return bfs.path.build_path( backend=bfs.path.StorageBackend.onprem, - url=backend_aware_bucketfs_params["url"], - bucket_name=backend_aware_bucketfs_params["bucket_name"], - service_name=backend_aware_bucketfs_params["service_name"], + url=params["url"], + bucket_name=params["bucket_name"], + service_name=params["service_name"], path=file_name, - username=backend_aware_bucketfs_params["username"], - password=backend_aware_bucketfs_params["password"], - verify=backend_aware_bucketfs_params["verify"], + username=params["username"], + password=params["password"], + verify=params["verify"], ) - # SAAS settings elif backend == BACKEND_SAAS: - bucket = SaaSBucket( - url=backend_aware_bucketfs_params["url"], - account_id=backend_aware_bucketfs_params["account_id"], - database_id=backend_aware_bucketfs_params["database_id"], - pat=backend_aware_bucketfs_params["pat"], - ) - pathlike = bfs.path.build_path( + return bfs.path.build_path( backend=bfs.path.StorageBackend.saas, - url=backend_aware_bucketfs_params["url"], - account_id=backend_aware_bucketfs_params["account_id"], - database_id=backend_aware_bucketfs_params["database_id"], - pat=backend_aware_bucketfs_params["pat"], + url=params["url"], + account_id=params["account_id"], + database_id=params["database_id"], + pat=params["pat"], path=file_name, ) else: pytest.fail(f"Unknown backend: {backend}") - print(bucket, content) - # Upload file to BucketFS/Bucket - bucket.upload(file_name, content) - udf_path = bucket.udf_path - pathlike_udf_path = ( - pathlike.as_udf_path() if hasattr(pathlike, "as_udf_path") else None - ) - # Setup teardown for cleanup +@pytest.fixture(scope="module") +def uploaded_file_and_paths( + exa_bucket, exa_pathlike, backend_aware_bucketfs_params, request +): + file_name = "Uploaded-File-From-Integration-test.bin" + content = "1" * 10 + + exa_bucket.upload(file_name, content) + + # udf_path = exa_bucket.udf_path + # pathlike_udf_path = ( + # exa_pathlike.as_udf_path() if hasattr(exa_pathlike, "as_udf_path") else None + # ) + # + def cleanup(): try: delete_file( @@ -99,6 +114,7 @@ def cleanup(): backend_aware_bucketfs_params.get("username"), backend_aware_bucketfs_params.get("password"), file_name, + # TODO: try exa_bucket delete ) except Exception: pass @@ -106,12 +122,12 @@ def cleanup(): request.addfinalizer(cleanup) return { - "bucket": bucket, - "pathlike": pathlike, + "bucket": exa_bucket, + "pathlike": exa_pathlike, "file_name": file_name, "content": content, - "udf_path": udf_path, - "pathlike_udf_path": pathlike_udf_path, + # "udf_path": udf_path, + # "pathlike_udf_path": pathlike_udf_path, } @@ -152,13 +168,15 @@ def run(ctx): def test_upload_and_udf_path(uploaded_file_and_paths, setup_schema_and_udfs): - bucket = uploaded_file_and_paths["bucket"] + """ + Test that verifies upload and UDF path availability using the uploaded_file_and_paths fixture. + """ file_name = uploaded_file_and_paths["file_name"] content = uploaded_file_and_paths["content"] - bucket_udf_path = uploaded_file_and_paths["udf_path"] + bucket = uploaded_file_and_paths["bucket"] + bucket_udf_path = bucket.udf_path assert bucket_udf_path is not None, "UDF path generation failed" - conn = setup_schema_and_udfs # Verify existence in UDF @@ -176,9 +194,13 @@ def test_upload_and_udf_path(uploaded_file_and_paths, setup_schema_and_udfs): def test_upload_and_udf_pathlike(uploaded_file_and_paths, setup_schema_and_udfs): - file_name = uploaded_file_and_paths["file_name"] + """ + Test that verifies upload and pathlike UDF path availability using the uploaded_file_and_paths fixture. + """ content = uploaded_file_and_paths["content"] - file_udf_path = uploaded_file_and_paths["pathlike_udf_path"] + pathlike = uploaded_file_and_paths["pathlike"] + file_udf_path = pathlike.as_udf_path() if hasattr(pathlike, "as_udf_path") else None + assert file_udf_path is not None, "Pathlike udf path generation failed" conn = setup_schema_and_udfs From c5246d2426bcb54fa283870257716bca6bc71283 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 14:37:01 +0530 Subject: [PATCH 08/14] Update test/integration/test_udf_path.py Co-authored-by: Torsten Kilias --- test/integration/test_udf_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index d418bc97..444dfa57 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -93,7 +93,7 @@ def exa_pathlike(backend_aware_bucketfs_params, backend): @pytest.fixture(scope="module") def uploaded_file_and_paths( - exa_bucket, exa_pathlike, backend_aware_bucketfs_params, request + exa_bucket, request ): file_name = "Uploaded-File-From-Integration-test.bin" content = "1" * 10 From ff6bfa3929180334f7287332533113017e588cc4 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 14:38:03 +0530 Subject: [PATCH 09/14] Update test/integration/test_udf_path.py Co-authored-by: Torsten Kilias --- test/integration/test_udf_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index 444dfa57..e2c1cbed 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -92,7 +92,7 @@ def exa_pathlike(backend_aware_bucketfs_params, backend): @pytest.fixture(scope="module") -def uploaded_file_and_paths( +def uploaded_file( exa_bucket, request ): file_name = "Uploaded-File-From-Integration-test.bin" From 1b72ca6ede05528c5a6c70aec94d53c6cd62cbda Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 14:38:13 +0530 Subject: [PATCH 10/14] Update test/integration/test_udf_path.py Co-authored-by: Torsten Kilias --- test/integration/test_udf_path.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index e2c1cbed..75e13088 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -122,8 +122,6 @@ def cleanup(): request.addfinalizer(cleanup) return { - "bucket": exa_bucket, - "pathlike": exa_pathlike, "file_name": file_name, "content": content, # "udf_path": udf_path, From db2cd008167555c9288fa2db0061c7acac059562 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 14:38:29 +0530 Subject: [PATCH 11/14] Update test/integration/test_udf_path.py Co-authored-by: Torsten Kilias --- test/integration/test_udf_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index 75e13088..2befd215 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -165,7 +165,7 @@ def run(ctx): return conn -def test_upload_and_udf_path(uploaded_file_and_paths, setup_schema_and_udfs): +def test_upload_and_udf_path(uploaded_file, setup_schema_and_udfs, exa_bucket): """ Test that verifies upload and UDF path availability using the uploaded_file_and_paths fixture. """ From 1189e617a6a3500597bfa6e33edebdec31a0e5fc Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 17:05:44 +0530 Subject: [PATCH 12/14] Update test/integration/test_udf_path.py Co-authored-by: Torsten Kilias --- test/integration/test_udf_path.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index 2befd215..bd1aef77 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -191,7 +191,7 @@ def test_upload_and_udf_path(uploaded_file, setup_schema_and_udfs, exa_bucket): assert content_from_udf_path == content -def test_upload_and_udf_pathlike(uploaded_file_and_paths, setup_schema_and_udfs): +def test_upload_and_udf_pathlike(uploaded_file, setup_schema_and_udfs, exa_pathlike): """ Test that verifies upload and pathlike UDF path availability using the uploaded_file_and_paths fixture. """ From 1491db7354d93b4a5271bc817f1790b069e70333 Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 11:34:57 +0000 Subject: [PATCH 13/14] review comments fixing --- test/integration/test_udf_path.py | 29 ++++++----------------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index bd1aef77..e893934c 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -100,22 +100,9 @@ def uploaded_file( exa_bucket.upload(file_name, content) - # udf_path = exa_bucket.udf_path - # pathlike_udf_path = ( - # exa_pathlike.as_udf_path() if hasattr(exa_pathlike, "as_udf_path") else None - # ) - # - def cleanup(): try: - delete_file( - backend_aware_bucketfs_params["url"], - backend_aware_bucketfs_params["bucket_name"], - backend_aware_bucketfs_params.get("username"), - backend_aware_bucketfs_params.get("password"), - file_name, - # TODO: try exa_bucket delete - ) + exa_bucket.delete(file_name) except Exception: pass @@ -124,8 +111,6 @@ def cleanup(): return { "file_name": file_name, "content": content, - # "udf_path": udf_path, - # "pathlike_udf_path": pathlike_udf_path, } @@ -169,10 +154,9 @@ def test_upload_and_udf_path(uploaded_file, setup_schema_and_udfs, exa_bucket): """ Test that verifies upload and UDF path availability using the uploaded_file_and_paths fixture. """ - file_name = uploaded_file_and_paths["file_name"] - content = uploaded_file_and_paths["content"] - bucket = uploaded_file_and_paths["bucket"] - bucket_udf_path = bucket.udf_path + file_name = uploaded_file["file_name"] + content = uploaded_file["content"] + bucket_udf_path = exa_bucket.udf_path assert bucket_udf_path is not None, "UDF path generation failed" conn = setup_schema_and_udfs @@ -195,9 +179,8 @@ def test_upload_and_udf_pathlike(uploaded_file, setup_schema_and_udfs, exa_pathl """ Test that verifies upload and pathlike UDF path availability using the uploaded_file_and_paths fixture. """ - content = uploaded_file_and_paths["content"] - pathlike = uploaded_file_and_paths["pathlike"] - file_udf_path = pathlike.as_udf_path() if hasattr(pathlike, "as_udf_path") else None + content = uploaded_file["content"] + file_udf_path = exa_pathlike.as_udf_path() assert file_udf_path is not None, "Pathlike udf path generation failed" conn = setup_schema_and_udfs From 6d162926d8f058d01369df07abc6378669a0e06d Mon Sep 17 00:00:00 2001 From: Janaarthanan Selvarajan Date: Tue, 2 Sep 2025 11:43:45 +0000 Subject: [PATCH 14/14] project:fix --- test/integration/test_udf_path.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/integration/test_udf_path.py b/test/integration/test_udf_path.py index e893934c..96dd45f5 100644 --- a/test/integration/test_udf_path.py +++ b/test/integration/test_udf_path.py @@ -92,9 +92,7 @@ def exa_pathlike(backend_aware_bucketfs_params, backend): @pytest.fixture(scope="module") -def uploaded_file( - exa_bucket, request -): +def uploaded_file(exa_bucket, request): file_name = "Uploaded-File-From-Integration-test.bin" content = "1" * 10