Skip to content

Commit

Permalink
Merge pull request #4 from Nuvindu/examples
Browse files Browse the repository at this point in the history
Add examples to the package
  • Loading branch information
Nuvindu committed Apr 10, 2024
2 parents 0f38a5b + ac79881 commit 2a509a1
Show file tree
Hide file tree
Showing 30 changed files with 540 additions and 2 deletions.
1 change: 1 addition & 0 deletions .github/workflows/dev-stg-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ jobs:
secrets: inherit
with:
environment: ${{ github.event.inputs.environment }}
additional-publish-flags: "-x :confluent.cavroserdes-examples:build"
4 changes: 2 additions & 2 deletions ballerina/Dependencies.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ dependencies = [
[[package]]
org = "ballerina"
name = "avro"
version = "0.1.0"
version = "0.1.1"
dependencies = [
{org = "ballerina", name = "jballerina.java"}
]
Expand Down Expand Up @@ -359,7 +359,7 @@ modules = [
[[package]]
org = "ballerinax"
name = "confluent.cregistry"
version = "0.1.0"
version = "0.1.1"
dependencies = [
{org = "ballerina", name = "jballerina.java"}
]
Expand Down
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ subprojects {
task build {
dependsOn(":confluent.cavroserdes-native:build")
dependsOn(":confluent.cavroserdes-ballerina:build")
dependsOn(":confluent.cavroserdes-examples:build")
}

def moduleVersion = project.version.replace("-SNAPSHOT", "")
Expand Down
63 changes: 63 additions & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
## Examples

The Ballerina Avro Serializer/Deserializer connector for Confluent Schema Registry provides practical examples illustrating usage in various scenarios. Explore these [examples](https://github.com/ballerina-platform/module-ballerinax-confluent.cavroserdes/tree/main/examples).

1. [Kafka Avro producer](https://github.com/ballerina-platform/module-ballerinax-confluent.cavroserdes/tree/main/examples/kafka-avro-producer)
This example shows how to publish Avro serialized data to a Kafka topic.

2. [Kafka Avro consumer](https://github.com/ballerina-platform/module-ballerinax-confluent.cavroserdes/tree/main/examples/kafka-avro-consumer)
This guide demonstrates how to consume data in the correct format according to Avro schema from a Kafka topic.

## Prerequisites

Create a `Config.toml` file with the base URL, schema capacity, subject, connection configurations and header values. Here's an example of how your `Config.toml` file should look:

```toml
baseUrl = "<BASE_URL>"
identityMapCapacity = <SCHEMA_MAP_CAPACITY>
subject = "<SCHEMA_REGISTRY_TOPIC>"

[originals]
"schema.registry.url" = "<SCHEMA_REGISTRY_ENDPOINT_URL>"
"basic.auth.credentials.source" = "USER_INFO"
"bootstrap.servers" = "<SERVER>:<PORT>"
"schema.registry.basic.auth.user.info" = "<KEY>:<SECRET>"

[headers]
```

## Running an Example

Follow these steps to run the examples.

## Step 01: Start a Kafka Server

Execute the following docker command to start the Kafka server.

```bash
docker-compose -f docker-compose.yaml up -d
```

## Step 02: Start the Kafka Producer

Go to the [kafka-avro-producer](https://github.com/ballerina-platform/module-ballerinax-confluent.cavroserdes/tree/main/examples/kafka-avro-producer) directory and start the Ballerina service.

```bash
bal run
```

## Step 03: Run the Kafka Consumer

Go to the [kafka-avro-consumer](https://github.com/ballerina-platform/module-ballerinax-confluent.cavroserdes/tree/main/examples/kafka-avro-consumer) directory and execute the following command.

```bash
bal run
```

## Step 04: Execute the cURL command

Execute the following curl command in a terminal.

```curl
curl http://localhost:9090/orders -H "Content-type:application/json" -d "{\"orderId\": 1, \"productName\": \"sport-shoes\"}"
```
76 changes: 76 additions & 0 deletions examples/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*
* Copyright (c) 2024, WSO2 Inc. (http://www.wso2.com).
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import org.apache.tools.ant.taskdefs.condition.Os

def graalvmFlag = ""

task testExamples {
if (project.hasProperty("balGraalVMTest")) {
graalvmFlag = "--graalvm"
}
doLast {
try {
exec {
workingDir project.projectDir
println("Working dir: ${workingDir}")
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
commandLine 'sh', "/c", "chmod +x ./build.sh && ./build.sh run && exit %%ERRORLEVEL%%"
} else {
commandLine 'sh', "-c", "chmod +x ./build.sh && ./build.sh run"
}
}
} catch (Exception e) {
println("Example Build failed: " + e.message)
throw e
}
}
}

task buildExamples {
gradle.taskGraph.whenReady { graph ->
if (graph.hasTask(":confluent.cregistry-examples:test")) {
buildExamples.enabled = false
} else {
testExamples.enabled = false
}
}
doLast {
try {
exec {
workingDir project.projectDir
println("Working dir: ${workingDir}")
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
commandLine 'sh', "/c", "chmod +x ./build.sh && ./build.sh build && exit %%ERRORLEVEL%%"
} else {
commandLine 'sh', "-c", "chmod +x ./build.sh && ./build.sh build"
}
}
} catch (Exception e) {
println("Example Build failed: " + e.message)
throw e
}
}
}

task build {
dependsOn buildExamples
}

buildExamples.dependsOn ":confluent.cavroserdes-ballerina:build"
testExamples.dependsOn ":confluent.cavroserdes-ballerina:build"
61 changes: 61 additions & 0 deletions examples/build.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#!/bin/bash

BAL_EXAMPLES_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BAL_CENTRAL_DIR="$HOME/.ballerina/repositories/central.ballerina.io/"
BAL_HOME_DIR="$BAL_EXAMPLES_DIR/../ballerina"

set -e

case "$1" in
build)
BAL_CMD="build"
;;
run)
BAL_CMD="run"
;;
*)
echo "Invalid command provided: '$1'. Please provide 'build' or 'run' as the command."
exit 1
;;
esac

# Read Ballerina package name
BAL_PACKAGE_NAME=$(awk -F'"' '/^name/ {print $2}' "$BAL_HOME_DIR/Ballerina.toml")

# Push the package to the local repository
cd "$BAL_HOME_DIR" &&
bal pack &&
bal push --repository=local

# Remove the cache directories in the repositories
rm -rf "$BAL_CENTRAL_DIR/cache-*"

# Update the central repository
BAL_DESTINATION_DIR="$HOME/.ballerina/repositories/central.ballerina.io/bala/ballerinax"
BAL_SOURCE_DIR="$HOME/.ballerina/repositories/local/bala/ballerinax/$BAL_PACKAGE_NAME"

mkdir -p "$BAL_DESTINATION_DIR"

[ -d "$BAL_SOURCE_DIR" ] && cp -r "$BAL_SOURCE_DIR" "$BAL_DESTINATION_DIR"
echo "Successfully updated the local central repositories"

# Loop through examples in the examples directory
cd "$BAL_EXAMPLES_DIR"
for dir in $(find "$BAL_EXAMPLES_DIR" -type d -maxdepth 1 -mindepth 1); do
# Skip the build directory
if [[ "$dir" == *build ]]; then
continue
fi
if [[ "$dir" == *configs ]]; then
continue
fi
if [[ "$dir" == *secrets ]]; then
continue
fi
(cd "$dir" && bal "$BAL_CMD" && cd ..);
done

# Remove generated JAR files
find "$BAL_HOME_DIR" -maxdepth 1 -type f -name "*.jar" | while read -r JAR_FILE; do
rm "$JAR_FILE"
done
14 changes: 14 additions & 0 deletions examples/configs/kafka_server_jaas.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
KafkaServer {
org.apache.kafka.common.security.plain.PlainLoginModule required
serviceName="test-service"
username="admin"
password="password"
user_admin="password"
user_client="client-secret";
};

Client {
org.apache.zookeeper.server.auth.DigestLoginModule required
username="admin"
password="password";
};
5 changes: 5 additions & 0 deletions examples/configs/zookeeper_jaas.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Server {
org.apache.zookeeper.server.auth.DigestLoginModule required
user_super="admin-secret"
user_admin="password";
};
53 changes: 53 additions & 0 deletions examples/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
version: '2'

services:
zookeeper:
image: 'confluentinc/cp-zookeeper:latest'
hostname: zookeeper
container_name: kafka-test-zookeeper
ports:
- '2181:2181'
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/confluentinc/kafka/config/zookeeper_jaas.conf'
volumes:
- ./configs/zookeeper_jaas.conf:/opt/confluentinc/kafka/config/zookeeper_jaas.conf
broker:
image: 'confluentinc/cp-server:latest'
hostname: broker
container_name: kafka-test-broker
depends_on:
- zookeeper
ports:
- '9092:9092'
- '9093:9093'
- '9094:9094'
- '9095:9095'
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_SASL_ENABLED_MECHANISMS: PLAIN
KAFKA_SECURITY_INTER_BROKER_PROTOCOL: PLAINTEXT
KAFKA_SSL_CLIENT_AUTH: "requested"
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,EX_PLAINTEXT://localhost:9092,EX_SASL_PLAINTEXT://localhost:9093,EX_SSL://localhost:9094,EX_SASL_SSL://localhost:9095
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,EX_PLAINTEXT:PLAINTEXT,EX_SASL_PLAINTEXT:SASL_PLAINTEXT,EX_SSL:SSL,EX_SASL_SSL:SASL_SSL
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_SSL_KEYSTORE_FILENAME: kafka.broker.keystore.jks
KAFKA_SSL_KEYSTORE_CREDENTIALS: broker_keystore_creds
KAFKA_SSL_KEY_CREDENTIALS: broker_sslkey_creds
KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.broker.truststore.jks
KAFKA_SSL_TRUSTSTORE_CREDENTIALS: broker_truststore_creds
KAFKA_OPTS: '-Djava.security.auth.login.config=/opt/confluentinc/kafka/config/kafka_server_jaas.conf'
volumes:
- ./secrets/trustoresandkeystores/broker_keystore_creds:/etc/kafka/secrets/broker_keystore_creds
- ./secrets/trustoresandkeystores/broker_sslkey_creds:/etc/kafka/secrets/broker_sslkey_creds
- ./secrets/trustoresandkeystores/broker_truststore_creds:/etc/kafka/secrets/broker_truststore_creds
- ./secrets/trustoresandkeystores/kafka.broker.keystore.jks:/etc/kafka/secrets/kafka.broker.keystore.jks
- ./secrets/trustoresandkeystores/kafka.broker.truststore.jks:/etc/kafka/secrets/kafka.broker.truststore.jks
- ./configs/kafka_server_jaas.conf:/opt/confluentinc/kafka/config/kafka_server_jaas.conf
1 change: 1 addition & 0 deletions examples/kafka-avro-consumer/.github/README.md
8 changes: 8 additions & 0 deletions examples/kafka-avro-consumer/Ballerina.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[package]
org = "wso2"
name = "kafka_avro_consumer"
version = "0.1.0"
distribution = "2201.8.6"

[build-options]
observabilityIncluded = true
31 changes: 31 additions & 0 deletions examples/kafka-avro-consumer/kafka-avro-consumer.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Consume Avro Serialized Data from a Kafka Topic

## Introduction

This guide demonstrates how to consume data in the correct format according to Avro schema from a Kafka topic.

### Configuration

Configure the followings in `Config.toml` in the directory.

```toml
baseUrl = "<BASE_URL>"
identityMapCapacity = <SCHEMA_MAP_CAPACITY>
subject = "<SCHEMA_REGISTRY_TOPIC>"

[originals]
"schema.registry.url" = "<SCHEMA_REGISTRY_ENDPOINT_URL>"
"basic.auth.credentials.source" = "USER_INFO"
"bootstrap.servers" = "<SERVER>:<PORT>"
"schema.registry.basic.auth.user.info" = "<KEY>:<SECRET>"

[headers]
```

## Run the example

Execute the following command to run the example.

```ballerina
bal run
```
Loading

0 comments on commit 2a509a1

Please sign in to comment.