Asynchronous task queue using mysql
- Dispatcher's
fetch_scheduled_tasks
andfetch_pending_tasks
method takes scheduled job and concurrently update their status asWORK IN PROGRESS
in same transaction - Most of tasks that queued in jasyncq would run in
exactly once
byfetch_scheduled_tasks
BUT, some cases job disappeared because of worker shutdown while working. It could be restored byfetch_pending_tasks
(that can check how long worker tolerateWIP
-ed but notCompleted
(deleted row))
import asyncio
import logging
import aiomysql
loop = asyncio.get_event_loop()
pool = await aiomysql.create_pool(
host='127.0.0.1',
port=3306,
user='root',
db='test',
loop=loop,
autocommit=False,
)
from jasyncq.dispatcher.tasks import TasksDispatcher
from jasyncq.repository.tasks import TaskRepository
repository = TaskRepository(pool=pool, topic_name='test_topic')
await repository.initialize()
dispatcher = TasksDispatcher(repository=repository)
- Publish tasks
await dispatcher.apply_tasks(
tasks=[...list of jasyncq.dispatcher.model.task.TaskIn...],
)
- Consume tasks
scheduled_tasks = await dispatcher.fetch_scheduled_tasks(queue_name='QUEUE_TEST', limit=10)
pending_tasks = await dispatcher.fetch_pending_tasks(
queue_name='QUEUE_TEST',
limit=10,
check_term_seconds=60,
)
tasks = [*pending_tasks, *scheduled_tasks]
# ...RUN JOBS WITH tasks
task_ids = [str(task.uuid) for task in tasks]
await dispatcher.complete_tasks(task_ids=task_ids)
genesis = TaskIn(task={}, queue_name=queue_name)
dependent = TaskIn(task={}, queue_name=queue_name, depend_on=task.uuid)
# 'dependent' task might fetched after 'genesis' task is completed
await dispatcher.apply_tasks(tasks=[genesis, dependent])
scheduled_at = time.time() + 60
task = TaskIn(task={}, queue_name=queue_name, scheduled_at=scheduled_at)
# 'task' task might fetched after 60 seconds from now
await dispatcher.apply_tasks(tasks=[task])
normal = TaskIn(task={}, queue_name=queue_name)
urgent = TaskIn(task={}, queue_name=queue_name, is_urgent=True)
# 'urgent' task might fetched earlier than 'normal' task if queue was already fulled
await dispatcher.apply_tasks(tasks=[normal, urgent])
scheduled_tasks = await dispatcher.fetch_scheduled_tasks(
queue_name='QUEUE_TEST',
limit=10,
ignore_dependency=True,
)
pending_tasks = await dispatcher.fetch_pending_tasks(
queue_name='QUEUE_TEST',
limit=10,
check_term_seconds=60,
ignore_dependency=True,
)
tasks = [*pending_tasks, *scheduled_tasks]
# ...RUN JOBS WITH tasks
- Consumer: /example/consumer.py
- Producer: /example/producer.py
$ docker run --name test_db -p 3306:3306 -e MYSQL_ALLOW_EMPTY_PASSWORD=true -d mysql:8.0.17
$ docker exec -it test_db bash -c 'mysql -u root -e "create database test;"'
$ python3 -m example.producer
$ python3 -m example.consumer
$ python3 setup.py sdist
$ python3 -m pip install ./dist/jasyncq-*
$ twine upload ./dist/jasyncq-{version}.tar.gz
$ docker run --name test_db -p 3306:3306 -e MYSQL_ALLOW_EMPTY_PASSWORD=true -d mysql:8.0.17
$ docker exec -it test_db bash -c 'mysql -u root -e "create database test;"'
$ python3 -m pip install pytest==6.2.3
$ pytest