-
Notifications
You must be signed in to change notification settings - Fork 38
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Reapply "future tasks should not cause anything to happen if the inst…
…ance is suspended" This reverts commit 05b50df.
- Loading branch information
Showing
8 changed files
with
195 additions
and
19 deletions.
There are no files selected for viewing
34 changes: 34 additions & 0 deletions
34
spiffworkflow-backend/migrations/versions/acf20342181e_.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
"""empty message | ||
Revision ID: acf20342181e | ||
Revises: 343b406f723d | ||
Create Date: 2024-02-02 16:47:00.942504 | ||
""" | ||
from alembic import op | ||
import sqlalchemy as sa | ||
|
||
|
||
# revision identifiers, used by Alembic. | ||
revision = 'acf20342181e' | ||
down_revision = '343b406f723d' | ||
branch_labels = None | ||
depends_on = None | ||
|
||
|
||
def upgrade(): | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
with op.batch_alter_table('future_task', schema=None) as batch_op: | ||
batch_op.add_column(sa.Column('archived_for_process_instance_status', sa.Boolean(), nullable=False)) | ||
batch_op.create_index(batch_op.f('ix_future_task_archived_for_process_instance_status'), ['archived_for_process_instance_status'], unique=False) | ||
|
||
# ### end Alembic commands ### | ||
|
||
|
||
def downgrade(): | ||
# ### commands auto generated by Alembic - please adjust! ### | ||
with op.batch_alter_table('future_task', schema=None) as batch_op: | ||
batch_op.drop_index(batch_op.f('ix_future_task_archived_for_process_instance_status')) | ||
batch_op.drop_column('archived_for_process_instance_status') | ||
|
||
# ### end Alembic commands ### |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
80 changes: 80 additions & 0 deletions
80
spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_background_processing_service.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,80 @@ | ||
from flask import Flask | ||
from pytest_mock.plugin import MockerFixture | ||
from spiffworkflow_backend.background_processing.background_processing_service import BackgroundProcessingService | ||
from spiffworkflow_backend.models.db import db | ||
from spiffworkflow_backend.models.future_task import FutureTaskModel | ||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel | ||
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor | ||
|
||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest | ||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec | ||
|
||
|
||
class TestBackgroundProcessingService(BaseTest): | ||
def test_process_future_tasks_with_no_future_tasks( | ||
self, | ||
app: Flask, | ||
with_db_and_bpmn_file_cleanup: None, | ||
) -> None: | ||
BackgroundProcessingService(app).process_future_tasks() | ||
|
||
def test_do_process_future_tasks_with_processable_future_task( | ||
self, | ||
app: Flask, | ||
mocker: MockerFixture, | ||
with_db_and_bpmn_file_cleanup: None, | ||
) -> None: | ||
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_CELERY_ENABLED", True): | ||
mock = mocker.patch("celery.current_app.send_task") | ||
process_instance = self._load_up_a_future_task_and_return_instance() | ||
assert mock.call_count == 0 | ||
BackgroundProcessingService.do_process_future_tasks(99999999999999999) | ||
assert mock.call_count == 1 | ||
future_tasks = FutureTaskModel.query.all() | ||
assert len(future_tasks) == 1 | ||
assert future_tasks[0].archived_for_process_instance_status is False | ||
|
||
def test_do_process_future_tasks_with_unprocessable_future_task( | ||
self, | ||
app: Flask, | ||
mocker: MockerFixture, | ||
with_db_and_bpmn_file_cleanup: None, | ||
) -> None: | ||
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_CELERY_ENABLED", True): | ||
mock = mocker.patch("celery.current_app.send_task") | ||
process_instance = self._load_up_a_future_task_and_return_instance() | ||
assert mock.call_count == 0 | ||
process_instance.status = "suspended" | ||
db.session.add(process_instance) | ||
db.session.commit() | ||
future_tasks = BackgroundProcessingService.imminent_future_tasks(99999999999999999) | ||
assert len(future_tasks) == 1 | ||
BackgroundProcessingService.do_process_future_tasks(99999999999999999) | ||
# should not process anything, so nothing goes to queue | ||
assert mock.call_count == 0 | ||
future_tasks = FutureTaskModel.query.all() | ||
assert len(future_tasks) == 1 | ||
assert future_tasks[0].archived_for_process_instance_status is True | ||
|
||
# the next time do_process_future_tasks runs, it will not consider this task, which is nice | ||
future_tasks = BackgroundProcessingService.imminent_future_tasks(99999999999999999) | ||
assert len(future_tasks) == 0 | ||
processor = ProcessInstanceProcessor(process_instance) | ||
processor.resume() | ||
future_tasks = BackgroundProcessingService.imminent_future_tasks(99999999999999999) | ||
assert len(future_tasks) == 1 | ||
|
||
def _load_up_a_future_task_and_return_instance(self) -> ProcessInstanceModel: | ||
process_model = load_test_spec( | ||
process_model_id="test_group/user-task-with-timer", | ||
process_model_source_directory="user-task-with-timer", | ||
) | ||
process_instance = self.create_process_instance_from_process_model(process_model=process_model) | ||
processor = ProcessInstanceProcessor(process_instance) | ||
processor.do_engine_steps(save=True) | ||
|
||
assert process_instance.status == "user_input_required" | ||
|
||
future_tasks = FutureTaskModel.query.all() | ||
assert len(future_tasks) == 1 | ||
return process_instance |