Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add example for java client #638

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions code-examples/kafka-java-maven/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
.idea/
target/
65 changes: 65 additions & 0 deletions code-examples/kafka-java-maven/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>org.example</groupId>
<artifactId>kafka-java-maven</artifactId>
<version>1.0-SNAPSHOT</version>

<properties>
<maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>11</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.2.1</version>
</dependency>

<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-xml</artifactId>
<version>2.8.5</version>
</dependency>
</dependencies>

<profiles>
<profile>
<id>producer</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<mainClass>org.example.ProducerExample</mainClass>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>consumer</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<mainClass>org.example.ConsumerExample</mainClass>
</configuration>
</plugin>
</plugins>
</build>
</profile>
</profiles>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package org.example;

import java.util.Arrays;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import java.time.Duration;

public class ConsumerExample {

public static void main(String[] args) {

var properties= KafkaConfig.properties();
int MAX_MESSAGES_CONSUMED = 1;
int messagesCount = 0;

if(args.length > 0) {
MAX_MESSAGES_CONSUMED = Integer.parseInt(args[0]);
}

properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"test_group_2");
properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

KafkaConsumer<String,String> consumer = new KafkaConsumer<String,String>(properties);

//Subscribing
consumer.subscribe(Arrays.asList("prices"));

//polling
while(true){
if(messagesCount >= MAX_MESSAGES_CONSUMED) {
break;
}
ConsumerRecords<String,String> records=consumer.poll(Duration.ofMillis(100));
for(ConsumerRecord<String,String> record: records) {
System.out.println("Key: "+ record.key() + ", Value:" +record.value());
System.out.println("Partition:" + record.partition()+",Offset:"+record.offset());
messagesCount ++;
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.example;

import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

public class KafkaConfig {

static Properties properties() {

String kafkaHost = System.getenv("KAFKA_HOST");
String rhoasClientID = System.getenv("RHOAS_SERVICE_ACCOUNT_CLIENT_ID");
String rhoasClientSecret = System.getenv("RHOAS_SERVICE_ACCOUNT_CLIENT_SECRET");
String rhoasOauthTokenUrl = System.getenv("RHOAS_SERVICE_ACCOUNT_OAUTH_TOKEN_URL");

var properties= new Properties();

properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaHost);
properties.setProperty("security.protocol", "SASL_SSL");
properties.setProperty("sasl.mechanism", "OAUTHBEARER");

properties.setProperty("sasl.jaas.config", "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required clientId=\"" + rhoasClientID + "\" clientSecret=\"" + rhoasClientSecret + "\" oauth.token.endpoint.uri=\"" + rhoasOauthTokenUrl + "\";");

properties.setProperty("sasl.login.callback.handler.class", "org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler");
properties.setProperty("sasl.oauthbearer.token.endpoint.url", rhoasOauthTokenUrl);
properties.setProperty("sasl.oauthbearer.scope.claim.name", "api.iam.service_accounts");

return properties;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package org.example;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.common.serialization.StringSerializer;

public class ProducerExample {

public static void main(String[] args) {

//Creating producer properties
var properties= KafkaConfig.properties();
properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

KafkaProducer<String,String> producer= new KafkaProducer<String,String>(properties);
Comment on lines +13 to +17
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We already use var in this example so I think reducing clutter here on line 17 will be good by using it there aswell

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You mean something like var producer = ?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes exactly


producer.send(new ProducerRecord<>("prices", "Test Message"));
producer.flush();
producer.close();
}
}