Skip to content

Commit

Permalink
tests: migrate to dcor_shared.testing
Browse files Browse the repository at this point in the history
  • Loading branch information
paulmueller committed Feb 18, 2024
1 parent dea9440 commit f99c5eb
Show file tree
Hide file tree
Showing 6 changed files with 214 additions and 159 deletions.
13 changes: 10 additions & 3 deletions ckanext/dc_serve/helpers.py
@@ -1,6 +1,13 @@
import dcor_shared
from dcor_shared import get_resource_path, s3cc


def resource_has_condensed(resource_id):
rpath = dcor_shared.get_resource_path(resource_id)
return rpath.with_name(rpath.stem + "_condensed.rtdc").exists()
"""Return True if a condensed resource exists"""
rpath = get_resource_path(resource_id)
cpath = rpath.with_name(rpath.stem + "_condensed.rtdc")
return (
# block storage existence
cpath.exists()
# S3 existence
or s3cc.object_exists(resource_id, artifact="condensed")
)
79 changes: 0 additions & 79 deletions ckanext/dc_serve/tests/helper_methods.py

This file was deleted.

77 changes: 77 additions & 0 deletions ckanext/dc_serve/tests/test_helpers.py
@@ -0,0 +1,77 @@
import pathlib
from unittest import mock

import ckanext.dcor_schemas.plugin
import ckanext.dc_serve.helpers as serve_helpers

from dcor_shared import get_resource_path

import pytest
import ckan.tests.factories as factories
from dcor_shared.testing import make_dataset, synchronous_enqueue_job


data_path = pathlib.Path(__file__).parent / "data"


@pytest.mark.ckan_config('ckan.plugins', 'dcor_depot dcor_schemas dc_serve')
@pytest.mark.usefixtures('clean_db', 'with_request_context')
@mock.patch('ckan.plugins.toolkit.enqueue_job',
side_effect=synchronous_enqueue_job)
def test_get_dc_instance_file(enqueue_job_mock, create_with_upload,
monkeypatch):
monkeypatch.setattr(
ckanext.dcor_schemas.plugin,
'DISABLE_AFTER_DATASET_CREATE_FOR_CONCURRENT_JOB_TESTS',
True)

user = factories.User()
owner_org = factories.Organization(users=[{
'name': user['id'],
'capacity': 'admin'
}])
create_context = {'ignore_auth': False,
'user': user['name'],
'api_version': 3}
ds_dict, _ = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True)
rid = ds_dict["resources"][0]["id"]
resource_path = pathlib.Path(get_resource_path(rid))
assert resource_path.exists(), "sanity check"
assert serve_helpers.resource_has_condensed(rid)


@pytest.mark.ckan_config('ckan.plugins', 'dcor_depot dcor_schemas dc_serve')
@pytest.mark.usefixtures('clean_db', 'with_request_context')
@mock.patch('ckan.plugins.toolkit.enqueue_job',
side_effect=synchronous_enqueue_job)
def test_get_dc_instance_s3(enqueue_job_mock, create_with_upload,
monkeypatch):
monkeypatch.setattr(
ckanext.dcor_schemas.plugin,
'DISABLE_AFTER_DATASET_CREATE_FOR_CONCURRENT_JOB_TESTS',
True)

user = factories.User()
owner_org = factories.Organization(users=[{
'name': user['id'],
'capacity': 'admin'
}])
create_context = {'ignore_auth': False,
'user': user['name'],
'api_version': 3}
ds_dict, _ = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True)
res_dict = ds_dict["resources"][0]
rid = res_dict["id"]
resource_path = pathlib.Path(get_resource_path(rid))
# remove the file, so DCOR falls back to the S3 resource
resource_path.unlink()
assert not resource_path.exists(), "sanity check"
assert serve_helpers.resource_has_condensed(rid)
21 changes: 14 additions & 7 deletions ckanext/dc_serve/tests/test_jobs.py
Expand Up @@ -22,7 +22,10 @@
import dcor_shared


from .helper_methods import data_path, make_dataset, synchronous_enqueue_job
from dcor_shared.testing import make_dataset, synchronous_enqueue_job


data_path = pathlib.Path(__file__).parent / "data"


# We need the dcor_depot extension to make sure that the symbolic-
Expand Down Expand Up @@ -96,9 +99,11 @@ def test_upload_condensed_dataset_to_s3_job(
create_context = {'ignore_auth': False,
'user': user['name'],
'api_version': 3}
ds_dict, res_dict = make_dataset(create_context, owner_org,
create_with_upload=create_with_upload,
activate=True)
ds_dict, res_dict = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True)
bucket_name = dcor_shared.get_ckan_config_option(
"dcor_object_store.bucket_name").format(
organization_id=ds_dict["organization"]["id"])
Expand Down Expand Up @@ -147,9 +152,11 @@ def test_upload_condensed_dataset_to_s3_job_and_verify_basin(
create_context = {'ignore_auth': False,
'user': user['name'],
'api_version': 3}
ds_dict, res_dict = make_dataset(create_context, owner_org,
create_with_upload=create_with_upload,
activate=True)
ds_dict, res_dict = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True)
bucket_name = dcor_shared.get_ckan_config_option(
"dcor_object_store.bucket_name").format(
organization_id=ds_dict["organization"]["id"])
Expand Down
38 changes: 24 additions & 14 deletions ckanext/dc_serve/tests/test_route.py
@@ -1,3 +1,4 @@
import pathlib
from unittest import mock

import ckan.common
Expand All @@ -9,7 +10,10 @@

import pytest

from .helper_methods import make_dataset, synchronous_enqueue_job
from dcor_shared.testing import make_dataset, synchronous_enqueue_job


data_path = pathlib.Path(__file__).parent / "data"


@pytest.mark.ckan_config('ckan.plugins', 'dcor_depot dcor_schemas dc_serve')
Expand Down Expand Up @@ -42,11 +46,13 @@ def test_route_redircet_condensed_to_s3_private(
'user': user['name'],
'api_version': 3}
# create a dataset
ds_dict, res_dict = make_dataset(create_context, owner_org,
create_with_upload=create_with_upload,
activate=True,
private=True
)
ds_dict, res_dict = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True,
private=True
)
rid = res_dict["id"]
assert "s3_available" in res_dict
assert "s3_url" in res_dict
Expand Down Expand Up @@ -122,9 +128,11 @@ def test_route_condensed_to_s3_public(
'user': user['name'],
'api_version': 3}
# create a dataset
ds_dict, res_dict = make_dataset(create_context, owner_org,
create_with_upload=create_with_upload,
activate=True)
ds_dict, res_dict = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True)
rid = res_dict["id"]
assert "s3_available" in res_dict
assert "s3_url" in res_dict
Expand Down Expand Up @@ -181,11 +189,13 @@ def test_route_redircet_resource_to_s3_private(
'user': user['name'],
'api_version': 3}
# create a dataset
ds_dict, res_dict = make_dataset(create_context, owner_org,
create_with_upload=create_with_upload,
activate=True,
private=True
)
ds_dict, res_dict = make_dataset(
create_context, owner_org,
create_with_upload=create_with_upload,
resource_path=data_path / "calibration_beads_47.rtdc",
activate=True,
private=True
)
rid = res_dict["id"]
assert "s3_available" in res_dict
assert "s3_url" in res_dict
Expand Down

0 comments on commit f99c5eb

Please sign in to comment.