Skip to content

Commit

Permalink
Merge pull request #1164 from OCR-D/network-tests
Browse files Browse the repository at this point in the history
ocrd_network base integration tests
  • Loading branch information
kba committed Jan 30, 2024
2 parents e9c20e0 + 824a9ab commit b0739dd
Show file tree
Hide file tree
Showing 22 changed files with 1,124 additions and 23 deletions.
4 changes: 2 additions & 2 deletions .circleci/config.yml
@@ -1,7 +1,7 @@
version: 2
version: 2.1

orbs:
python: circleci/python@2.0.3
python: circleci/python@2.1.1

jobs:

Expand Down
3 changes: 3 additions & 0 deletions .dockerignore
Expand Up @@ -7,3 +7,6 @@
!LICENSE
!README.md
!.git
!tests
!requirements_test.txt
!.gitmodules
58 changes: 58 additions & 0 deletions .github/workflows/integration-test.yml
@@ -0,0 +1,58 @@
name: Run ocrd network integration tests

on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]

jobs:
build:

runs-on: ${{ matrix.os }}

strategy:
fail-fast: false
matrix:
python-version:
- '3.7'
- '3.8'
- '3.9'
- '3.10'
- '3.11'
os:
- ubuntu-22.04
# - macos-latest

steps:
- uses: actions/checkout@v3
- name: Set up Homebrew
id: set-up-homebrew
uses: Homebrew/actions/setup-homebrew@master
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
if [[ "${{ matrix.os }}" == "ubuntu"* ]];then
sudo apt-get -y update
sudo make deps-ubuntu
else
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 \
HOMEBREW_NO_AUTO_UPDATE=1 \
brew install imagemagick geos bash # opencv
fi
make install deps-test
- name: Install Docker on macOS
if: runner.os == 'macos'
run: |
brew install docker docker-compose
colima start
- name: Test network integration with pytest
run: |
if [[ "${{ matrix.os }}" == "macos"* ]];then
make integration-test DOCKER_COMPOSE=docker-compose
else
make integration-test
fi
22 changes: 17 additions & 5 deletions Dockerfile
@@ -1,5 +1,5 @@
ARG BASE_IMAGE
FROM $BASE_IMAGE
FROM $BASE_IMAGE as ocrd_core_base
ARG FIXUP=echo
MAINTAINER OCR-D
ENV DEBIAN_FRONTEND noninteractive
Expand Down Expand Up @@ -33,13 +33,25 @@ RUN apt-get update && apt-get -y install software-properties-common \
curl \
sudo \
git \
&& make deps-ubuntu \
&& python3 -m venv /usr/local \
&& make deps-ubuntu
RUN python3 -m venv /usr/local \
&& hash -r \
&& make install \
&& eval $FIXUP \
&& rm -rf /build-ocrd
&& eval $FIXUP

WORKDIR /data

CMD ["/usr/local/bin/ocrd", "--help"]

FROM ocrd_core_base as ocrd_core_test
WORKDIR /build-ocrd
COPY Makefile .
RUN make assets
COPY tests ./tests
COPY .gitmodules .
COPY requirements_test.txt .
RUN pip install -r requirements_test.txt
RUN mkdir /ocrd-data && chmod 777 /ocrd-data

CMD ["yes"]
# CMD ["make", "test", "integration-test"]
11 changes: 10 additions & 1 deletion Makefile
Expand Up @@ -9,6 +9,8 @@ TESTDIR = $(CURDIR)/tests
PYTEST_ARGS = --continue-on-collection-errors
VERSION = $(shell cat VERSION)

DOCKER_COMPOSE = docker compose

SPHINX_APIDOC =

BUILD_ORDER = ocrd_utils ocrd_models ocrd_modelfactory ocrd_validators ocrd_network ocrd
Expand Down Expand Up @@ -213,9 +215,16 @@ test: assets
$(PYTHON) \
-m pytest $(PYTEST_ARGS) --durations=10\
--ignore-glob="$(TESTDIR)/**/*bench*.py" \
--ignore-glob="$(TESTDIR)/network/*.py" \
$(TESTDIR)
cd ocrd_utils ; $(PYTHON) -m pytest --continue-on-collection-errors -k TestLogging -k TestDecorators $(TESTDIR)

INTEGRATION_TEST_IN_DOCKER = docker exec core_test
integration-test:
$(DOCKER_COMPOSE) --file tests/network/docker-compose.yml up -d
-$(INTEGRATION_TEST_IN_DOCKER) pytest -k 'test_rmq or test_db or test_processing_server' -v
$(DOCKER_COMPOSE) --file tests/network/docker-compose.yml down --remove-orphans

benchmark:
$(PYTHON) -m pytest $(TESTDIR)/model/test_ocrd_mets_bench.py

Expand Down Expand Up @@ -296,7 +305,7 @@ docker-cuda: DOCKER_FILE = Dockerfile.cuda
docker-cuda: docker

docker docker-cuda:
docker build --progress=plain -f $(DOCKER_FILE) -t $(DOCKER_TAG) --build-arg BASE_IMAGE=$(DOCKER_BASE_IMAGE) $(DOCKER_ARGS) .
docker build --progress=plain -f $(DOCKER_FILE) -t $(DOCKER_TAG) --target ocrd_core_base --build-arg BASE_IMAGE=$(DOCKER_BASE_IMAGE) $(DOCKER_ARGS) .

# Build wheels and source dist and twine upload them
pypi: build
Expand Down
41 changes: 37 additions & 4 deletions src/ocrd_network/database.py
Expand Up @@ -28,17 +28,17 @@
from .utils import call_sync


async def initiate_database(db_url: str):
async def initiate_database(db_url: str, db_name: str = 'ocrd'):
client = AsyncIOMotorClient(db_url)
await init_beanie(
database=client.get_default_database(default='ocrd'),
database=client.get_default_database(default=db_name),
document_models=[DBProcessorJob, DBWorkflowJob, DBWorkspace, DBWorkflowScript]
)


@call_sync
async def sync_initiate_database(db_url: str):
await initiate_database(db_url)
async def sync_initiate_database(db_url: str, db_name: str = 'ocrd'):
await initiate_database(db_url, db_name)


async def db_create_workspace(mets_path: str) -> DBWorkspace:
Expand All @@ -60,6 +60,11 @@ async def db_create_workspace(mets_path: str) -> DBWorkspace:
return workspace_db


@call_sync
async def sync_db_create_workspace(mets_path: str) -> DBWorkspace:
return await db_create_workspace(mets_path=mets_path)


async def db_get_workspace(workspace_id: str = None, workspace_mets_path: str = None) -> DBWorkspace:
workspace = None
if not workspace_id and not workspace_mets_path:
Expand Down Expand Up @@ -134,6 +139,15 @@ async def sync_db_update_workspace(workspace_id: str = None, workspace_mets_path
return await db_update_workspace(workspace_id=workspace_id, workspace_mets_path=workspace_mets_path, **kwargs)


async def db_create_processing_job(db_processing_job: DBProcessorJob) -> DBProcessorJob:
return await db_processing_job.insert()


@call_sync
async def sync_db_create_processing_job(db_processing_job: DBProcessorJob) -> DBProcessorJob:
return await db_create_processing_job(db_processing_job=db_processing_job)


async def db_get_processing_job(job_id: str) -> DBProcessorJob:
job = await DBProcessorJob.find_one(
DBProcessorJob.job_id == job_id)
Expand Down Expand Up @@ -180,6 +194,15 @@ async def sync_db_update_processing_job(job_id: str, **kwargs) -> DBProcessorJob
return await db_update_processing_job(job_id=job_id, **kwargs)


async def db_create_workflow_job(db_workflow_job: DBWorkflowJob) -> DBWorkflowJob:
return await db_workflow_job.insert()


@call_sync
async def sync_db_create_workflow_job(db_workflow_job: DBWorkflowJob) -> DBWorkflowJob:
return await db_create_workflow_job(db_workflow_job=db_workflow_job)


async def db_get_workflow_job(job_id: str) -> DBWorkflowJob:
job = await DBWorkflowJob.find_one(DBWorkflowJob.job_id == job_id)
if not job:
Expand All @@ -202,6 +225,15 @@ async def sync_db_get_processing_jobs(job_ids: List[str]) -> [DBProcessorJob]:
return await db_get_processing_jobs(job_ids)


async def db_create_workflow_script(db_workflow_script: DBWorkflowScript) -> DBWorkflowScript:
return await db_workflow_script.insert()


@call_sync
async def sync_db_create_workflow_script(db_workflow_script: DBWorkflowScript) -> DBWorkflowScript:
return await db_create_workflow_script(db_workflow_script=db_workflow_script)


async def db_get_workflow_script(workflow_id: str) -> DBWorkflowScript:
workflow = await DBWorkflowScript.find_one(DBWorkflowScript.workflow_id == workflow_id)
if not workflow:
Expand All @@ -221,6 +253,7 @@ async def db_find_first_workflow_script_by_content(content_hash: str) -> DBWorkf
return workflow


# TODO: Resolve the inconsistency between the async and sync versions of the same method
@call_sync
async def sync_db_find_first_workflow_script_by_content(workflow_id: str) -> DBWorkflowScript:
return await db_get_workflow_script(workflow_id)
2 changes: 2 additions & 0 deletions src/ocrd_network/models/job.py
Expand Up @@ -20,6 +20,8 @@ class StateEnum(str, Enum):
success = 'SUCCESS'
# Processing job failed
failed = 'FAILED'
# Processing job has not been assigned yet
unset = 'UNSET'


class PYJobInput(BaseModel):
Expand Down

0 comments on commit b0739dd

Please sign in to comment.