Replicates typical Kafka stack using docker compose.
export CP_STACK_VERSION=4.0.0
docker-compose up
- Kafka Topics UI: http://localhost:8000
- Kafka Schema Registry UI: http://localhost:8001
- Kafka Connect UI: http://localhost:8002
docker-compose -f docker-compose.yml -f docker-compose.rest-proxy.yml up
pip install -r requirements.txt
curl -X POST -H "Content-Type: application/vnd.kafka.avro.v1+json" \
--data "{
\"key_schema_id\": 1,
\"value_schema_id\": 2,
\"records\":
[
{
\"key\": $(python serialize.py ./avro/event.avsc examples/event.json | jq .event_id),
\"value\": $(python serialize.py ./avro/event.avsc examples/event.json)
}
]
}" \
"http://localhost:8082/topics/event"
# Create an instance in a new consumer group
curl -X POST -H "Content-Type: application/vnd.kafka.v1+json" \
--data '{"id": "my_avro_consumer_instance_1", "format": "avro", "auto.offset.reset": "smallest"}' \
http://localhost:8082/consumers/my_avro_consumer
# consume messages with created consumer instance
curl -X GET -H "Accept: application/vnd.kafka.avro.v1+json" \
http://localhost:8082/consumers/my_avro_consumer/instances/my_avro_consumer_instance_1/topics/event 2>/dev/null | jq .
docker-compose -f docker-compose.yml -f docker-compose.python.yml up
Start the consumer:
cd consumer/ && make run
MIT