diff --git a/spring-integration-kafka/.gitignore b/spring-integration-kafka/.gitignore deleted file mode 100644 index 0c7fdb94..00000000 --- a/spring-integration-kafka/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -.classpath -.project -.settings/ -.gradle -build -target/ -bin/ -derby.log diff --git a/spring-integration-kafka/README.md b/spring-integration-kafka/README.md index 7e7cebcb..662edf3b 100644 --- a/spring-integration-kafka/README.md +++ b/spring-integration-kafka/README.md @@ -1,382 +1,4 @@ Spring Integration Kafka Adapter ================================================= -Welcome to the *Spring Integration Kafka adapter*. Apache Kafka is a distributed publish-subscribe messaging system that is designed for handling terra bytes of high throughput -data at constant time. For more information on Kafka and its design goals, please see [Kafka main page](http://kafka.apache.org/) - -Spring Integration Kafka adapters are built for Kafka 0.8 and since 0.8 is not backward compatible with any previous versions, Spring Integration will not -support any Kafka versions prior to 0.8. As of this writing, Kafka 0.8 is still WIP, however a beta release is available [here](http://http://kafka.apache.org/downloads.html). - -Checking out and building ------------------------------ - -Currently Spring Integration Kafka adapter is built against kafka 0.8 that is backed by -Scala 2.9.2. - -In order to build the project: - - ./gradlew build - -In order to install this into your local maven cache: - - ./gradlew install - -Spring Integration Kafka project currently supports the following two components. Please keep in mind that -this is very early stage in development and do not yet fully make use of all the features that Kafka provides. - -* Outbound Channel Adapter -* Inbound Channel Adapter based on the High level consumer API - -Outbound Channel Adapter: --------------------------------------------- - -The Outbound channel adapter is used to send messages to Kafka. Messages are read from a Spring Integration channel. One can specify this channel in the application context and then wire -this in the application where messages are sent to kafka. - -Once a channel is configured, then messages can be sent to Kafka through this channel. Obviously, Spring Integration specific messages are sent to the adapter and then it will -internally get converted into Kafka messages before sending. In the current version of the outbound adapter, -you have to specify a message key and the topic as header values and the message to send as the payload. -Here is an example. - -```java - final MessageChannel channel = ctx.getBean("inputToKafka", MessageChannel.class); - - channel.send( - MessageBuilder.withPayload(payload) - .setHeader("messageKey", "key") - .setHeader("topic", "test").build()); -``` - -This would create a message with a payload. In addition to this, it also creates two header entries as key/value pairs - one for -the message key and another for the topic that this message belongs to. - -Here is how kafka outbound channel adapter is configured: - -```xml - - - -``` - -The key aspect in this configuration is the producer-context-ref. Producer context contains all the producer configuration for all the topics that this adapter is expected to handle. -A channel in which messages are arriving is configured with the adapter and therefore -any message sent to that channel will be handled by this adapter. You can also configure a poller -depending on the -type of the channel used. For example, in the above configuration, we use a queue based channel -and thus a poller is configured with a task executor. If no messages are available in the queue it will timeout immediately because of -the receive-timeout configuration. Then it will poll again with a delay of 1 second. - -Producer context is at the heart of the kafka outbound adapter. Here is an example of how it is configured. - -```xml - - - - - - - -``` - -There are a few things going on here. So, lets go one by one. First of all, producer context is simply a holder of, as the name -indicates, a context for the Kafa producer. It contains one ore more producer configurations. Each producer configuration -is ultimately gets translated into a Kafka native producer. Each producer configuration is per topic based right now. -If you go by the above example, there are two producers generated from this configuration - one for topic named -test1 and another for test2. Each producer can take the following: - - broker-list List of comma separated brokers that this producer connects to - topic Topic name or Java regex pattern of topic name - compression-codec Compression method to be used. Default is no compression. Supported compression codec are gzip and snappy. - Anything else would result in no compression - value-encoder Serializer to be used for encoding messages. - key-encoder Serializer to be used for encoding the partition key - key-class-type Type of the key class. This will be ignored if no key-encoder is provided - value-class-type Type of the value class. This will be ignored if no value-encoder is provided. - partitioner Custom implementation of a Kafka Partitioner interface. - async True/False - default is false. Setting this to true would make the Kafka producer to use - an async producer - batch-num-messages Number of messages to batch at the producer. If async is false, then this has no effect. - -The value-encoder and key-encoder are referring to other spring beans. They are essentially implementations of an -interface provided by Kafka, the Encoder interface. Similarly, partitioner also refers a Spring bean which implements -the Kafka Partitioner interface. - -Here is an example of configuring an encoder. - -```xml - - - -``` - -Spring Integration Kafaka adapter provides Apache Avro backed encoders out of the box, as this is a popular choice -for serialization in the big data spectrum. If no encoders are specified as beans, the default encoders provided -by Kafka will be used. On that not, if the encoder is configured only for the message and not for the key, the same encoder -will be used for both. These are standard Kafka behaviors. Spring Integration Kafka adapter does simply enforce those behaviours. -Kafka default encoder expects the data to come as byte arrays and it is a no-op encoder, i.e. it just takes the byte array as it is. -When default encoders are used, there are two ways a message can be sent. -Either, the sender of the message to the channel -can simply put byte arrays as message key and payload. Or, the key and value can be sent as Java Serializable object. -In the latter case, the Kafka adapter will automatically convert them to byte arrays before sending to Kafka broker. -If the encoders are default and the objets sent are not serializalbe, then that would cause an error. By providing explicit encoders -it is totally up to the developer to configure how the objects are serialized. In that case, the objects may or may not implement -the Serializable interface. - -A bit more on the Avro support. There are two flavors of Avro encoders provided, one based on the Avro ReflectDatum and the other -based on SpecificDatum. The encoding using reflection is fairly simple as you only have to configure your POJO or other class types -along with the XML. Here is an example. - -```xml - - - -``` - -Reflection based encoding may not be appropriate for large scale systems and Avro's SpecificDatum based encoders can be a better fit. In this case, you can -generate a specific Avro object (a glorified POJO) from a schema definition. The generated object will store the schema as well. In order to -do this, you need to generate the Avro object separately though. There are both maven and gradle plugins available to do code generation -automatically. You have to provide the avdl or avsc files to specify your schema. Once you take care of these steps, you can simply configure -a specific datum based Avro encoder (see the first example above) and pass along the fully qualified class name of the generated Avro object -for which you want to encode instances. The samples project has examples of using both of these encoders. - -Encoding String for key and value is a very common use case and Kafka provides a StringEncoder out of the box. It takes a Kafka specific VerifiableProperties object - along with its -constructor that wraps a regular Java.util.Properties object. The StringEncoder is great when writing a - direct Java client that talks to Kafka. -However, when using Spring Integration Kafka adapter, it introduces unnecessary steps to create these -properties objects. Therefore, we provide a wrapper class for this same StringEncoder as part of the SI kafka support, which makes -using it from Spring a bit easier. You can inject -any properties to it in the Spring way. Kafka StringEncoder looks at a specific property for the type of encoding scheme used. -In the wrapper bean provided, this property can simply be injected as a value without constructing any other objects. Spring Integration provided StringEncoder is available -in the package org.springframework.integration.kafka.serializer.common.StringEncoder. The avro support for serialization is -also available in a package called avro under serializer. - -#### Tuning Producer Properties - -Kafka Producer API provides several [Producer Configs] (http://kafka.apache.org/documentation.html#producerconfigs) to fine-tune producers. -To specify those properties, `producer-context` element supports optional `producer-properties` attribute that can reference the Spring properties bean. -These properties will be applied to all Producer Configurations within the producer context. For example: - -```xml - - - - 3600000 - 5 - 5242880 - - - - - - - ... - ... - ... - - -``` - -Inbound Channel Adapter: --------------------------------------------- - -The Inbound channel adapter is used to consume messages from Kafka. These messages will be placed into a channel as Spring Integration specific Messages. -Kafka provides two types of consumer API's primarily. One is called the High Level Consumer and the other is the Simple Consumer. High Level consumer is -pretty complex inside. Nonetheless, for the client, using the high level API is straightforward. Although easy to use, High level consumer -does not provide any offset management. So, if you want to rewind and re-fetch messages, it is not possible to do so using the -High Level Consumer API. Offsets are managed by the Zookeeper internally in the High Level Consumer. If your use case does not require any offset management -or re-reading messages from the same consumer, then high level consumer is a perfect fit. Spring Integration Kafka inbound channel adapter -currently supports only the High Level Consumer. Here are the details of configuring one. - -```xml - - - -``` - -Since this inbound channel adapter uses a Polling Channel under the hood, it must be configured with a Poller. A notable difference -between the poller configured with this inbound adapter and other pollers used in Spring Integration is that the receive-timeout specified on this poller -does not have any effect. The reason for this is because of the way Kafka implements iterators on the consumer stream. -It is using a BlockingQueue internally and thus it would wait indefinitely. Instead of interrupting the underlying thread, -we are leveraging a direct Kafka support for consumer time out. It is configured on the consumer context. Everything else -is pretty much the same as in a regular inbound adapter. Any message that it receives will be sent to the channel configured with it. - -Inbound Kafka Adapter must specify a kafka-consumer-context-ref element and here is how it is configured: - -```xml - - - - - - - - - - - -``` - -`consumer-configuration` supports consuming from specific topic using a `topic` child element or from multiple topics matching a topic regex using `topic-filter` child element. `topic-filter` supports both whitelist and blacklist filter based on `exclude` attribute. - -Consumer context requires a reference to a zookeeper-connect which dictates all the zookeeper specific configuration details. -Here is how a zookeeper-connect is configured. - -```xml - -``` - -zk-connect attribute is where you would specify the zookeeper connection. All the other attributes get translated into their -zookeeper counter-part attributes by the consumer. - -In the above consumer context, you can also specify a consumer-timeout value which would be used to -timeout the consumer in case of no messages to consume. -This timeout would be applicable to all the streams (threads) in the consumer. -The default value for this in Kafka is -1 which would make it wait -indefinitely. However, Sping Integration overrides it to be 5 seconds by default in order to make sure that no -threads are blocking indefinitely in the lifecycle of the application and thereby -giving them a chance to free up any resources or locks that they hold. It is recommended to -override this value so as to meet any specific use case requirements. -By providing a reasonable consumer-timeout on the context and a fixed-delay value on the poller, -this inbound adapter is capable of simulating a message driven behaviour. - -consumer context takes consumer-configurations which are at the core of the inbound adapter. It is a group of one or more -consumer-configuration elements which consists of a consumer group dictated by the group-id. Each consumer-configuration -can be configured with one or more kafka-topics. - -In the above example provided, we have a single consumer-configuration that consumes messages from two topics each having 4 streams. - These streams are fundamentally equivalent to the number of partitions that a topic is configured - with in the producer. For instance, if you configure your topic with -4 partitions, then the maximum number of streams that you may have in the consumer is also 4. -Any more than this would be a no-op. -If you have less number of streams than the available partitions, then messages from -multiple partitions will be sent to available streams. -Therefore, it is a good practice to limit the number of streams for a topic in the consumer -configuration to the number of partitions configured for the topic. There may be situations -in which a partition may be gone during runtime and in that case the stream receiving -data from the partition will simply timeout and whenever this partition comes back, -it would start read data from it again. - -Consumer configuration can also be configured with optional decoders for key and value. -The default ones provided by Kafka are basically no-ops and would consume as byte arrays. -If you provide an encoder for key/value in the producer, then it is recommended to provide -corresponding decoders. -As disussed already in the outbound adapter, Spring Integration Kafka adapter gives Apache Avro based data serialization components -out of the box. You can use any serialization component for this purpose as long as you implement the required encoder/decoder interfaces from Kafka. -As with the Avro encoder support, decoders provided also -implement Reflection and Specific datum based de-serialization. Here is how you would configure kafka decoder beans that is Avro backed. - -Using Avro Specific support: - -```xml - - - -``` - -Using Reflection support: - -```xml - - - -``` - -Another important attribute for the consumer-configuration is the max-messages. -Please note that this is different from the max-messages-per-poll configured on the inbound adapter -element. -There it means the number of times the receive method called on the adapter. -The max-messages on consumer configuration is different. When you use Kafka for ingesting messages, -it usually means an influx of large amount of data constantly. Because of this, -each time a receive is invoked on the adapter, you would basically get a collection of messages. -The maximum number of messages to retrieve for a topic in each execution of the -receive is what configured through the max-messages attribute on the consumer-configuration. -Basically, if the use case is to receive a constant stream of -large number of data, simply specifying a consumer-timeout alone would not be enough. -You would also need to specify the max number of messages to receive. - -The type of the payload of the Message returned by the adapter is the following: - -```java -Map>> -``` - -It is a java.util.Map that contains the topic string consumed as the key and another Map as the value. -The inner map's key is the stream (partition) number and value is a list of message payloads. -The reason for this complex return type is -due to the way Kafka orders the messages. In the high level consumer, -all the messages received in a single stream for a single partition -are guaranteed to be in order. For example, if I have a topic named test configured with -4 partitions and I have 4 corresponding streams -in the consumer, then I would receive data in all the consumer streams in the same order -as they were put in the corresponding partitions. This is another reason to set the number of -consumer streams for a topic same -as the number of broker partitions configured for that topic. Lets say that the number of streams -are less than the number of partitions. Then, normally, there is no -guarantee for any order other than just the fact that a single stream will contain messages -from multiple partitions and the messages from a given single partition received will -still be kept contiguously. By that time probably there is no way to find out which set of messages came from which partition. -By providing this complex map that contains the partition information for the topic, we make sure that the order sent by the producer -is preserved even if the number of streams used was less than the number of broker partitions. - -A downstream component which receives the data from the inbound adapter can cast the SI payload to the above -Map. - -If your use case does not require ordering of messages during consumption, then you can easily pass this -payload to a standard SI transformer and just get a full dump of the actual payload sent by Kafka. - -#### Tuning Consumer Properties -Kafka Consumer API provides several [Consumer Configs] (http://kafka.apache.org/documentation.html#consumerconfigs) to fine tune consumers. -To specify those properties, `consumer-context` element supports optional `consumer-properties` attribute that can reference the spring properties bean. -This properties will be applied to all Consumer Configurations within the consumer context. For Eg: - -```xml - - - - - smallest - 10485760 - 5242880 - 1000 - - - - - - - ... - ... - ... - > - -``` +The project is hosted on https://github.com/spring-projects/spring-integration-kafka \ No newline at end of file diff --git a/spring-integration-kafka/build.gradle b/spring-integration-kafka/build.gradle deleted file mode 100644 index a7229848..00000000 --- a/spring-integration-kafka/build.gradle +++ /dev/null @@ -1,240 +0,0 @@ -description = 'Spring Integration Kafka Support' - -apply plugin: 'java' -apply from: "${rootProject.projectDir}/publish-maven.gradle" -apply plugin: 'eclipse' -apply plugin: 'idea' - -group = 'org.springframework.integration' - -repositories { - maven { - url 'https://repository.apache.org/content/groups/public' - } - maven { url 'http://repo.spring.io/libs-milestone' } -} - -sourceCompatibility = targetCompatibility = 1.6 - -ext { - avroVersion = '1.7.6' - jacocoVersion = '0.7.0.201403182114' - kafkaVersion = '0.8.1.1' - metricsVersion = '2.2.0' - scalaVersion = '2.10' - springIntegrationVersion = '4.0.3.RELEASE' - - idPrefix = 'kafka' - - linkHomepage = 'https://github.com/spring-projects/spring-integration-extensions' - linkCi = 'https://build.spring.io/browse/INTEXT' - linkIssue = 'https://jira.spring.io/browse/INTEXT' - linkScmUrl = 'https://github.com/spring-projects/spring-integration-extensions' - linkScmConnection = 'https://github.com/spring-projects/spring-integration-extensions.git' - linkScmDevConnection = 'git@github.com:spring-projects/spring-integration-extensions.git' - -} - -eclipse.project.natures += 'org.springframework.ide.eclipse.core.springnature' - -sourceSets { - test { - resources { - srcDirs = ['src/test/resources', 'src/test/java'] - } - } -} - -// See http://www.gradle.org/docs/current/userguide/dependency_management.html#sub:configurations -// and http://www.gradle.org/docs/current/dsl/org.gradle.api.artifacts.ConfigurationContainer.html -configurations { - jacoco //Configuration Group used by Sonar to provide Code Coverage using JaCoCo -} - -dependencies { - compile "org.springframework.integration:spring-integration-core:$springIntegrationVersion" - compile "org.apache.avro:avro:$avroVersion" - compile "org.apache.avro:avro-compiler:$avroVersion" - - runtime "com.yammer.metrics:metrics-core:$metricsVersion" - runtime "com.yammer.metrics:metrics-annotation:$metricsVersion" - - compile("org.apache.kafka:kafka_$scalaVersion:$kafkaVersion") { - exclude module: 'jms' - exclude module: 'jmxtools' - exclude module: 'jmxri' - } - - testCompile "org.springframework.integration:spring-integration-test:$springIntegrationVersion" - testCompile "org.springframework.integration:spring-integration-stream:$springIntegrationVersion" - - jacoco "org.jacoco:org.jacoco.agent:$jacocoVersion:runtime" -} - -// enable all compiler warnings; individual projects may customize further -[compileJava, compileTestJava]*.options*.compilerArgs = ['-Xlint:all,-options'] - -test { - // suppress all console output during testing unless running `gradle -i` - logging.captureStandardOutput(LogLevel.INFO) - jvmArgs "-javaagent:${configurations.jacoco.asPath}=destfile=${buildDir}/jacoco.exec,includes=*" -} - -task sourcesJar(type: Jar) { - classifier = 'sources' - from sourceSets.main.allJava -} - -task javadocJar(type: Jar) { - classifier = 'javadoc' - from javadoc -} - -artifacts { - archives sourcesJar - archives javadocJar -} - -apply plugin: 'sonar-runner' - -sonarRunner { - sonarProperties { - property "sonar.jacoco.reportPath", "${buildDir.name}/jacoco.exec" - property "sonar.links.homepage", linkHomepage - property "sonar.links.ci", linkCi - property "sonar.links.issue", linkIssue - property "sonar.links.scm", linkScmUrl - property "sonar.links.scm_dev", linkScmDevConnection - property "sonar.java.coveragePlugin", "jacoco" - } -} - -task api(type: Javadoc) { - group = 'Documentation' - description = 'Generates the Javadoc API documentation.' - title = "${rootProject.description} ${version} API" - options.memberLevel = org.gradle.external.javadoc.JavadocMemberLevel.PROTECTED - options.author = true - options.header = rootProject.description - options.overview = 'src/api/overview.html' - - source = sourceSets.main.allJava - classpath = project.sourceSets.main.compileClasspath - destinationDir = new File(buildDir, "api") -} - -task schemaZip(type: Zip) { - group = 'Distribution' - classifier = 'schema' - description = "Builds -${classifier} archive containing all " + - "XSDs for deployment at static.springframework.org/schema." - - def Properties schemas = new Properties(); - def shortName = idPrefix.replaceFirst("${idPrefix}-", '') - - project.sourceSets.main.resources.find { - it.path.endsWith('META-INF/spring.schemas') - }?.withInputStream { schemas.load(it) } - - for (def key : schemas.keySet()) { - File xsdFile = project.sourceSets.main.resources.find { - it.path.endsWith(schemas.get(key)) - } - assert xsdFile != null - into("integration/${shortName}") { - from xsdFile.path - } - } - -} - -task docsZip(type: Zip) { - group = 'Distribution' - classifier = 'docs' - description = "Builds -${classifier} archive containing api " + - "for deployment at static.spring.io/spring-integration/docs." - - from('src/dist') { - include 'changelog.txt' - } - - from(api) { - into 'api' - } -} - -task distZip(type: Zip, dependsOn: [docsZip, schemaZip]) { - group = 'Distribution' - classifier = 'dist' - description = "Builds -${classifier} archive, containing all jars and docs, " + - "suitable for community download page." - - ext.baseDir = "${project.name}-${project.version}"; - - from('src/dist') { - include 'readme.txt' - include 'license.txt' - include 'notice.txt' - into "${baseDir}" - } - - from(zipTree(docsZip.archivePath)) { - into "${baseDir}/docs" - } - - from(zipTree(schemaZip.archivePath)) { - into "${baseDir}/schema" - } - - into("${baseDir}/libs") { - from project.jar - from project.sourcesJar - from project.javadocJar - } -} - -// Create an optional "with dependencies" distribution. -// Not published by default; only for use when building from source. -task depsZip(type: Zip, dependsOn: distZip) { zipTask -> - group = 'Distribution' - classifier = 'dist-with-deps' - description = "Builds -${classifier} archive, containing everything " + - "in the -${distZip.classifier} archive plus all dependencies." - - from zipTree(distZip.archivePath) - - gradle.taskGraph.whenReady { taskGraph -> - if (taskGraph.hasTask(":${zipTask.name}")) { - def projectName = rootProject.name - def artifacts = new HashSet() - - rootProject.configurations.runtime.resolvedConfiguration.resolvedArtifacts.each { artifact -> - def dependency = artifact.moduleVersion.id - if (!projectName.equals(dependency.name)) { - artifacts << artifact.file - } - } - - zipTask.from(artifacts) { - into "${distZip.baseDir}/deps" - } - } - } -} - -artifacts { - archives distZip - archives docsZip - archives schemaZip -} - -task dist(dependsOn: assemble) { - group = 'Distribution' - description = 'Builds -dist, -docs and -schema distribution archives.' -} - -task wrapper(type: Wrapper) { - description = 'Generates gradlew[.bat] scripts' - gradleVersion = '1.12' - distributionUrl = "http://services.gradle.org/distributions/gradle-${gradleVersion}-all.zip" -} diff --git a/spring-integration-kafka/gradle.properties b/spring-integration-kafka/gradle.properties deleted file mode 100644 index bebfcbcf..00000000 --- a/spring-integration-kafka/gradle.properties +++ /dev/null @@ -1 +0,0 @@ -version=1.0.0.BUILD-SNAPSHOT diff --git a/spring-integration-kafka/gradle/wrapper/gradle-wrapper.jar b/spring-integration-kafka/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 0087cd3b..00000000 Binary files a/spring-integration-kafka/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/spring-integration-kafka/gradle/wrapper/gradle-wrapper.properties b/spring-integration-kafka/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index 4b427bb8..00000000 --- a/spring-integration-kafka/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Fri Jun 13 17:53:58 EEST 2014 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=http\://services.gradle.org/distributions/gradle-1.12-all.zip diff --git a/spring-integration-kafka/gradlew b/spring-integration-kafka/gradlew deleted file mode 100755 index 91a7e269..00000000 --- a/spring-integration-kafka/gradlew +++ /dev/null @@ -1,164 +0,0 @@ -#!/usr/bin/env bash - -############################################################################## -## -## Gradle start up script for UN*X -## -############################################################################## - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" - -APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" - -warn ( ) { - echo "$*" -} - -die ( ) { - echo - echo "$*" - echo - exit 1 -} - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; -esac - -# For Cygwin, ensure paths are in UNIX format before anything is touched. -if $cygwin ; then - [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` -fi - -# Attempt to set APP_HOME -# Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi -done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >&- -APP_HOME="`pwd -P`" -cd "$SAVED" >&- - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD="java" - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi -fi - -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi - -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi - # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" - fi - i=$((i+1)) - done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac -fi - -# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules -function splitJvmOpts() { - JVM_OPTS=("$@") -} -eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS -JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" - -exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/spring-integration-kafka/gradlew.bat b/spring-integration-kafka/gradlew.bat deleted file mode 100644 index aec99730..00000000 --- a/spring-integration-kafka/gradlew.bat +++ /dev/null @@ -1,90 +0,0 @@ -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto init - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:init -@rem Get command-line arguments, handling Windowz variants - -if not "%OS%" == "Windows_NT" goto win9xME_args -if "%@eval[2+2]" == "4" goto 4NT_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* -goto execute - -:4NT_args -@rem Get arguments from the 4NT Shell from JP Software -set CMD_LINE_ARGS=%$ - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/spring-integration-kafka/publish-maven.gradle b/spring-integration-kafka/publish-maven.gradle deleted file mode 100644 index 06781c91..00000000 --- a/spring-integration-kafka/publish-maven.gradle +++ /dev/null @@ -1,62 +0,0 @@ -apply plugin: 'maven' - -ext.optionalDeps = [] -ext.providedDeps = [] - -ext.optional = { optionalDeps << it } -ext.provided = { providedDeps << it } - -install { - repositories.mavenInstaller { - customizePom(pom, project) - } -} - -def customizePom(pom, gradleProject) { - pom.whenConfigured { generatedPom -> - // respect 'optional' and 'provided' dependencies - gradleProject.optionalDeps.each { dep -> - generatedPom.dependencies.find { it.artifactId == dep.name }?.optional = true - } - gradleProject.providedDeps.each { dep -> - generatedPom.dependencies.find { it.artifactId == dep.name }?.scope = 'provided' - } - - // eliminate test-scoped dependencies (no need in maven central poms) - generatedPom.dependencies.removeAll { dep -> - dep.scope == 'test' - } - - // add all items necessary for maven central publication - generatedPom.project { - name = gradleProject.description - description = gradleProject.description - url = linkHomepage - organization { - name = 'SpringSource' - url = 'http://spring.io' - } - licenses { - license { - name 'The Apache Software License, Version 2.0' - url 'http://www.apache.org/licenses/LICENSE-2.0.txt' - distribution 'repo' - } - } - - scm { - url = linkScmUrl - connection = 'scm:git:' + linkScmConnection - developerConnection = 'scm:git:' + linkScmDevConnection - } - - developers { - developer { - id = 'schacko' - name = 'Soby Chacko' - email = 'schacko@gopivotal.com' - } - } - } - } -} diff --git a/spring-integration-kafka/src/api/overview.html b/spring-integration-kafka/src/api/overview.html deleted file mode 100644 index d9e5faad..00000000 --- a/spring-integration-kafka/src/api/overview.html +++ /dev/null @@ -1,22 +0,0 @@ - - -This document is the API specification for Spring Integration Kafka Extension -
-
-

- For further API reference and developer documentation, see the - Spring - Integration reference documentation. - That documentation contains more detailed, developer-targeted - descriptions, with conceptual overviews, definitions of terms, - workarounds, and working code examples. -

- -

- If you are interested in commercial training, consultancy, and - support for Spring Integration, please visit - http://www.springsource.com -

-
- - diff --git a/spring-integration-kafka/src/dist/license.txt b/spring-integration-kafka/src/dist/license.txt deleted file mode 100644 index c94ec852..00000000 --- a/spring-integration-kafka/src/dist/license.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by testData1) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class testData1 and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [testData1 of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/spring-integration-kafka/src/dist/notice.txt b/spring-integration-kafka/src/dist/notice.txt deleted file mode 100644 index f62045a2..00000000 --- a/spring-integration-kafka/src/dist/notice.txt +++ /dev/null @@ -1,21 +0,0 @@ - ======================================================================== - == NOTICE file corresponding to section 4 d of the Apache License, == - == Version 2.0, in this case for the Spring Integration distribution. == - ======================================================================== - - This product includes software developed by - the Apache Software Foundation (http://www.apache.org). - - The end-user documentation included with a redistribution, if any, - must include the following acknowledgement: - - "This product includes software developed by the Spring Framework - Project (http://www.springframework.org)." - - Alternatively, this acknowledgement may appear in the software itself, - if and wherever such third-party acknowledgements normally appear. - - The names "Spring", "Spring Framework", and "Spring Integration" must - not be used to endorse or promote products derived from this software - without prior written permission. For written permission, please contact - enquiries@springsource.com. diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParser.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParser.java deleted file mode 100644 index 7604d0d9..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParser.java +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.config.xml; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.w3c.dom.Element; - -import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.ManagedMap; -import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.integration.config.xml.IntegrationNamespaceUtils; -import org.springframework.integration.kafka.support.ConsumerConfigFactoryBean; -import org.springframework.integration.kafka.support.ConsumerConfiguration; -import org.springframework.integration.kafka.support.ConsumerConnectionProvider; -import org.springframework.integration.kafka.support.ConsumerMetadata; -import org.springframework.integration.kafka.support.KafkaConsumerContext; -import org.springframework.integration.kafka.support.MessageLeftOverTracker; -import org.springframework.integration.kafka.support.TopicFilterConfiguration; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.DomUtils; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @author Artem Bilan - * @author Ilayaperumal Gopinathan - * @since 0.5 - */ -public class KafkaConsumerContextParser extends AbstractSingleBeanDefinitionParser { - - @Override - protected Class getBeanClass(final Element element) { - return KafkaConsumerContext.class; - } - - @Override - protected void doParse(final Element element, final ParserContext parserContext, final BeanDefinitionBuilder builder) { - super.doParse(element, parserContext, builder); - - final Element consumerConfigurations = DomUtils.getChildElementByTagName(element, "consumer-configurations"); - parseConsumerConfigurations(consumerConfigurations, parserContext, builder, element); - } - - private void parseConsumerConfigurations(final Element consumerConfigurations, final ParserContext parserContext, - final BeanDefinitionBuilder builder, final Element parentElem) { - Map consumerConfigurationsMap = new ManagedMap(); - for (final Element consumerConfiguration : DomUtils.getChildElementsByTagName(consumerConfigurations, "consumer-configuration")) { - final BeanDefinitionBuilder consumerConfigurationBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ConsumerConfiguration.class); - final BeanDefinitionBuilder consumerMetadataBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ConsumerMetadata.class); - - IntegrationNamespaceUtils.setValueIfAttributeDefined(consumerMetadataBuilder, consumerConfiguration, - "group-id"); - - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(consumerMetadataBuilder, consumerConfiguration, - "value-decoder"); - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(consumerMetadataBuilder, consumerConfiguration, - "key-decoder"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(consumerMetadataBuilder, consumerConfiguration, - "key-class-type"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(consumerMetadataBuilder, consumerConfiguration, - "value-class-type"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(consumerConfigurationBuilder, consumerConfiguration, - "max-messages"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(consumerMetadataBuilder, parentElem, - "consumer-timeout"); - - final Map topicStreamsMap = new HashMap(); - - final List topicConfigurations = DomUtils.getChildElementsByTagName(consumerConfiguration, "topic"); - - if (topicConfigurations != null) { - for (final Element topicConfiguration : topicConfigurations) { - final String topic = topicConfiguration.getAttribute("id"); - final String streams = topicConfiguration.getAttribute("streams"); - topicStreamsMap.put(topic, streams); - } - consumerMetadataBuilder.addPropertyValue("topicStreamMap", topicStreamsMap); - } - - final Element topicFilter = DomUtils.getChildElementByTagName(consumerConfiguration, "topic-filter"); - - if (topicFilter != null) { - BeanDefinition topicFilterConfigurationBeanDefinition = - BeanDefinitionBuilder.genericBeanDefinition(TopicFilterConfiguration.class) - .addConstructorArgValue(topicFilter.getAttribute("pattern")) - .addConstructorArgValue(topicFilter.getAttribute("streams")) - .addConstructorArgValue(topicFilter.getAttribute("exclude")) - .getBeanDefinition(); - consumerMetadataBuilder.addPropertyValue("topicFilterConfiguration", - topicFilterConfigurationBeanDefinition); - } - - final AbstractBeanDefinition consumerMetadataBeanDefintiion = consumerMetadataBuilder.getBeanDefinition(); - - final String zookeeperConnectBean = parentElem.getAttribute("zookeeper-connect"); - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(builder, parentElem, zookeeperConnectBean); - - final String consumerPropertiesBean = parentElem.getAttribute("consumer-properties"); - - final BeanDefinitionBuilder consumerConfigFactoryBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ConsumerConfigFactoryBean.class); - consumerConfigFactoryBuilder.addConstructorArgValue(consumerMetadataBeanDefintiion); - - if (StringUtils.hasText(zookeeperConnectBean)) { - consumerConfigFactoryBuilder.addConstructorArgReference(zookeeperConnectBean); - } - - if (StringUtils.hasText(consumerPropertiesBean)) { - consumerConfigFactoryBuilder.addConstructorArgReference(consumerPropertiesBean); - } - - AbstractBeanDefinition consumerConfigFactoryBuilderBeanDefinition = - consumerConfigFactoryBuilder.getBeanDefinition(); - - BeanDefinitionBuilder consumerConnectionProviderBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ConsumerConnectionProvider.class); - consumerConnectionProviderBuilder.addConstructorArgValue(consumerConfigFactoryBuilderBeanDefinition); - - AbstractBeanDefinition consumerConnectionProviderBuilderBeanDefinition = - consumerConnectionProviderBuilder.getBeanDefinition(); - - BeanDefinitionBuilder messageLeftOverBeanDefinitionBuilder = - BeanDefinitionBuilder.genericBeanDefinition(MessageLeftOverTracker.class); - AbstractBeanDefinition messageLeftOverBeanDefinition = - messageLeftOverBeanDefinitionBuilder.getBeanDefinition(); - - consumerConfigurationBuilder.addConstructorArgValue(consumerMetadataBeanDefintiion); - consumerConfigurationBuilder.addConstructorArgValue(consumerConnectionProviderBuilderBeanDefinition); - consumerConfigurationBuilder.addConstructorArgValue(messageLeftOverBeanDefinition); - - AbstractBeanDefinition consumerConfigurationBeanDefinition = - consumerConfigurationBuilder.getBeanDefinition(); - consumerConfigurationsMap.put(consumerConfiguration.getAttribute("group-id"), - consumerConfigurationBeanDefinition); - } - builder.addPropertyValue("consumerConfigurations", consumerConfigurationsMap); - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaInboundChannelAdapterParser.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaInboundChannelAdapterParser.java deleted file mode 100644 index 9b9735c6..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaInboundChannelAdapterParser.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.integration.config.xml.AbstractPollingInboundChannelAdapterParser; -import org.springframework.integration.config.xml.IntegrationNamespaceUtils; -import org.springframework.integration.kafka.inbound.KafkaHighLevelConsumerMessageSource; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * The Kafka Inbound Channel adapter parser - * - * @author Soby Chacko - * @since 0.5 - * - */ -public class KafkaInboundChannelAdapterParser extends AbstractPollingInboundChannelAdapterParser { - @Override - protected BeanMetadataElement parseSource(final Element element, final ParserContext parserContext) { - final BeanDefinitionBuilder highLevelConsumerMessageSourceBuilder = - BeanDefinitionBuilder.genericBeanDefinition(KafkaHighLevelConsumerMessageSource.class); - - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(highLevelConsumerMessageSourceBuilder, element, "kafka-decoder"); - - final String kafkaConsumerContext = element.getAttribute("kafka-consumer-context-ref"); - - if (StringUtils.hasText(kafkaConsumerContext)) { - highLevelConsumerMessageSourceBuilder.addConstructorArgReference(kafkaConsumerContext); - } - - return highLevelConsumerMessageSourceBuilder.getBeanDefinition(); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaNamespaceHandler.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaNamespaceHandler.java deleted file mode 100644 index 3cc177b7..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaNamespaceHandler.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.springframework.integration.config.xml.AbstractIntegrationNamespaceHandler; - -/** - * The namespace handler for the Kafka namespace - * - * @author Soby Chacko - * @since 0.5 - * - */ -public class KafkaNamespaceHandler extends AbstractIntegrationNamespaceHandler { - /* (non-Javadoc) - * @see org.springframework.beans.factory.xml.NamespaceHandler#init() - */ - @Override - public void init() { - registerBeanDefinitionParser("zookeeper-connect", new ZookeeperConnectParser()); - registerBeanDefinitionParser("inbound-channel-adapter", new KafkaInboundChannelAdapterParser()); - registerBeanDefinitionParser("outbound-channel-adapter", new KafkaOutboundChannelAdapterParser()); - registerBeanDefinitionParser("producer-context", new KafkaProducerContextParser()); - registerBeanDefinitionParser("consumer-context", new KafkaConsumerContextParser()); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaOutboundChannelAdapterParser.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaOutboundChannelAdapterParser.java deleted file mode 100644 index 2df108fc..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaOutboundChannelAdapterParser.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.integration.config.xml.AbstractOutboundChannelAdapterParser; -import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * - * @author Soby Chacko - * @since 0.5 - * - */ -public class KafkaOutboundChannelAdapterParser extends AbstractOutboundChannelAdapterParser { - @Override - protected AbstractBeanDefinition parseConsumer(final Element element, final ParserContext parserContext) { - final BeanDefinitionBuilder kafkaProducerMessageHandlerBuilder = - BeanDefinitionBuilder.genericBeanDefinition(KafkaProducerMessageHandler.class); - - final String kafkaServerBeanName = element.getAttribute("kafka-producer-context-ref"); - - if (StringUtils.hasText(kafkaServerBeanName)) { - kafkaProducerMessageHandlerBuilder.addConstructorArgReference(kafkaServerBeanName); - } - - return kafkaProducerMessageHandlerBuilder.getBeanDefinition(); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParser.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParser.java deleted file mode 100644 index cfb4c3c6..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParser.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.config.xml; - -import java.util.Map; - -import org.w3c.dom.Element; - -import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.support.AbstractBeanDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.support.ManagedMap; -import org.springframework.beans.factory.xml.AbstractSimpleBeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.integration.config.xml.IntegrationNamespaceUtils; -import org.springframework.integration.kafka.support.KafkaProducerContext; -import org.springframework.integration.kafka.support.ProducerConfiguration; -import org.springframework.integration.kafka.support.ProducerFactoryBean; -import org.springframework.integration.kafka.support.ProducerMetadata; -import org.springframework.util.StringUtils; -import org.springframework.util.xml.DomUtils; - -/** - * @author Soby Chacko - * @author Ilayaperumal Gopinathan - * @since 0.5 - */ -public class KafkaProducerContextParser extends AbstractSimpleBeanDefinitionParser { - - @Override - protected Class getBeanClass(final Element element) { - return KafkaProducerContext.class; - } - - @Override - protected void doParse(final Element element, final ParserContext parserContext, final BeanDefinitionBuilder builder) { - super.doParse(element, parserContext, builder); - - final Element topics = DomUtils.getChildElementByTagName(element, "producer-configurations"); - parseProducerConfigurations(topics, parserContext, builder, element); - } - - private void parseProducerConfigurations(Element topics, ParserContext parserContext, - BeanDefinitionBuilder builder, Element parentElem) { - Map producerConfigurationsMap = new ManagedMap(); - - for (Element producerConfiguration : DomUtils.getChildElementsByTagName(topics, "producer-configuration")) { - BeanDefinitionBuilder producerConfigurationBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ProducerConfiguration.class); - - BeanDefinitionBuilder producerMetadataBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ProducerMetadata.class); - producerMetadataBuilder.addConstructorArgValue(producerConfiguration.getAttribute("topic")); - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "value-encoder"); - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "key-encoder"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "key-class-type"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "value-class-type"); - IntegrationNamespaceUtils.setReferenceIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "partitioner"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "compression-codec"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "async"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(producerMetadataBuilder, producerConfiguration, - "batch-num-messages"); - AbstractBeanDefinition producerMetadataBeanDefinition = producerMetadataBuilder.getBeanDefinition(); - - String producerPropertiesBean = parentElem.getAttribute("producer-properties"); - - BeanDefinitionBuilder producerFactoryBuilder = - BeanDefinitionBuilder.genericBeanDefinition(ProducerFactoryBean.class); - producerFactoryBuilder.addConstructorArgValue(producerMetadataBeanDefinition); - - final String brokerList = producerConfiguration.getAttribute("broker-list"); - if (StringUtils.hasText(brokerList)) { - producerFactoryBuilder.addConstructorArgValue(producerConfiguration.getAttribute("broker-list")); - } - - if (StringUtils.hasText(producerPropertiesBean)) { - producerFactoryBuilder.addConstructorArgReference(producerPropertiesBean); - } - - AbstractBeanDefinition producerFactoryBeanDefinition = producerFactoryBuilder.getBeanDefinition(); - - producerConfigurationBuilder.addConstructorArgValue(producerMetadataBeanDefinition); - producerConfigurationBuilder.addConstructorArgValue(producerFactoryBeanDefinition); - - AbstractBeanDefinition producerConfigurationBeanDefinition = - producerConfigurationBuilder.getBeanDefinition(); - producerConfigurationsMap.put(producerConfiguration.getAttribute("topic"), - producerConfigurationBeanDefinition); - } - - builder.addPropertyValue("producerConfigurations", producerConfigurationsMap); - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParser.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParser.java deleted file mode 100644 index 3cda6c3b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParser.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.AbstractSimpleBeanDefinitionParser; -import org.springframework.beans.factory.xml.BeanDefinitionParserDelegate; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.integration.config.xml.IntegrationNamespaceUtils; -import org.springframework.integration.kafka.support.ZookeeperConnect; -import org.w3c.dom.Element; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ZookeeperConnectParser extends AbstractSimpleBeanDefinitionParser { - @Override - protected Class getBeanClass(final Element element) { - return ZookeeperConnect.class; - } - - @Override - protected void doParse(final Element element, final ParserContext parserContext, final BeanDefinitionBuilder builder) { - super.doParse(element, parserContext, builder); - - IntegrationNamespaceUtils.setValueIfAttributeDefined(builder, element, - BeanDefinitionParserDelegate.SCOPE_ATTRIBUTE); - IntegrationNamespaceUtils.setValueIfAttributeDefined(builder, element, "zk-connect"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(builder, element, "zk-connection-timeout"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(builder, element, "zk-session-timeout"); - IntegrationNamespaceUtils.setValueIfAttributeDefined(builder, element, "zk-sync-time"); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/package-info.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/package-info.java deleted file mode 100644 index 3289f407..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/config/xml/package-info.java +++ /dev/null @@ -1,4 +0,0 @@ -/** - * Provides parser classes to provide Xml namespace support for the Kafka components. - */ -package org.springframework.integration.kafka.config.xml; diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/KafkaConsumerDefaults.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/KafkaConsumerDefaults.java deleted file mode 100644 index e258f90c..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/KafkaConsumerDefaults.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.core; - -/** - * Kafka adapter specific message headers. - * - * @author Soby Chacko - * @since 0.5 - */ -public final class KafkaConsumerDefaults { - //High level consumer - public static final String GROUP_ID = "groupid"; - public static final String SOCKET_TIMEOUT = "30000"; - public static final String SOCKET_BUFFER_SIZE = "64*1024"; - public static final String FETCH_SIZE = "300 * 1024"; - public static final String BACKOFF_INCREMENT = "1000"; - public static final String QUEUED_CHUNKS_MAX = "100"; - public static final String AUTO_COMMIT_ENABLE = "true"; - public static final String AUTO_COMMIT_INTERVAL = "10000"; - public static final String AUTO_OFFSET_RESET = "smallest"; - //Overriding the default value of -1, which will make the consumer to wait indefinitely - public static final String CONSUMER_TIMEOUT = "5000"; - public static final String REBALANCE_RETRIES_MAX = "4"; - - private KafkaConsumerDefaults() { - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/ZookeeperConnectDefaults.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/ZookeeperConnectDefaults.java deleted file mode 100644 index 03428d75..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/ZookeeperConnectDefaults.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.core; - -/** - * - * @author Soby Chacko - * @since 0.5 - * - */ -public final class ZookeeperConnectDefaults { - public static final String ZK_CONNECT = "localhost:2181"; - public static final String ZK_CONNECTION_TIMEOUT = "6000"; - public static final String ZK_SESSION_TIMEOUT = "6000"; - public static final String ZK_SYNC_TIME = "2000"; - - private ZookeeperConnectDefaults() { - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/package-info.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/package-info.java deleted file mode 100644 index f404ec83..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/core/package-info.java +++ /dev/null @@ -1,4 +0,0 @@ -/** - * Provides core classes of the Kafka module. - */ -package org.springframework.integration.kafka.core; diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/inbound/KafkaHighLevelConsumerMessageSource.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/inbound/KafkaHighLevelConsumerMessageSource.java deleted file mode 100644 index 449a8ebe..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/inbound/KafkaHighLevelConsumerMessageSource.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.inbound; - -import org.springframework.integration.context.IntegrationObjectSupport; -import org.springframework.integration.core.MessageSource; -import org.springframework.integration.kafka.support.KafkaConsumerContext; -import org.springframework.messaging.Message; - -import java.util.List; -import java.util.Map; - -/** - * @author Soby Chacko - * @since 0.5 - * - */ -public class KafkaHighLevelConsumerMessageSource extends IntegrationObjectSupport implements MessageSource>>> { - - private final KafkaConsumerContext kafkaConsumerContext; - - public KafkaHighLevelConsumerMessageSource(final KafkaConsumerContext kafkaConsumerContext) { - this.kafkaConsumerContext = kafkaConsumerContext; - } - - @Override - public Message>>> receive() { - return kafkaConsumerContext.receive(); - } - - @Override - public String getComponentType() { - return "kafka:inbound-channel-adapter"; - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/inbound/package-info.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/inbound/package-info.java deleted file mode 100644 index 2688d7b6..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/inbound/package-info.java +++ /dev/null @@ -1,4 +0,0 @@ -/** - * Provides inbound Spring Integration Kafka components. - */ -package org.springframework.integration.kafka.inbound; diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/outbound/KafkaProducerMessageHandler.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/outbound/KafkaProducerMessageHandler.java deleted file mode 100644 index a9062e7b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/outbound/KafkaProducerMessageHandler.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.outbound; - -import org.springframework.integration.handler.AbstractMessageHandler; -import org.springframework.integration.kafka.support.KafkaProducerContext; -import org.springframework.messaging.Message; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class KafkaProducerMessageHandler extends AbstractMessageHandler { - - private final KafkaProducerContext kafkaProducerContext; - - public KafkaProducerMessageHandler(final KafkaProducerContext kafkaProducerContext) { - this.kafkaProducerContext = kafkaProducerContext; - } - - public KafkaProducerContext getKafkaProducerContext() { - return kafkaProducerContext; - } - - @Override - protected void handleMessageInternal(final Message message) throws Exception { - kafkaProducerContext.send(message); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/package-info.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/package-info.java deleted file mode 100644 index 48475904..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/package-info.java +++ /dev/null @@ -1,4 +0,0 @@ -/** - * Root package of the Kafka Module. - */ -package org.springframework.integration.kafka; diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroDatumSupport.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroDatumSupport.java deleted file mode 100644 index 808c5a0b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroDatumSupport.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.springframework.integration.kafka.serializer.avro; - -import org.apache.avro.io.DatumReader; -import org.apache.avro.io.DatumWriter; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import java.io.IOException; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public abstract class AvroDatumSupport { - - private static final Log LOG = LogFactory.getLog(AvroDatumSupport.class); - - private final AvroSerializer avroSerializer; - - protected AvroDatumSupport() { - this.avroSerializer = new AvroSerializer(); - } - - public byte[] toBytes(final T source, final DatumWriter writer) { - try { - return avroSerializer.serialize(source, writer); - } catch (IOException e) { - LOG.error("Failed to encode source: " + e); - } - return null; - } - - public T fromBytes(final byte[] bytes, final DatumReader reader) { - try { - return avroSerializer.deserialize(bytes, reader); - } catch (IOException e) { - LOG.error("Failed to decode byte array: " + e); - } - return null; - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroReflectDatumBackedKafkaDecoder.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroReflectDatumBackedKafkaDecoder.java deleted file mode 100644 index 68a6663c..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroReflectDatumBackedKafkaDecoder.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.serializer.avro; - -import kafka.serializer.Decoder; - -import org.apache.avro.io.DatumReader; -import org.apache.avro.reflect.ReflectDatumReader; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroReflectDatumBackedKafkaDecoder extends AvroDatumSupport implements Decoder { - - private final DatumReader reader; - - public AvroReflectDatumBackedKafkaDecoder(final Class clazz) { - this.reader = new ReflectDatumReader(clazz); - } - - @Override - public T fromBytes(final byte[] bytes) { - return fromBytes(bytes, reader); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroReflectDatumBackedKafkaEncoder.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroReflectDatumBackedKafkaEncoder.java deleted file mode 100644 index 4d51d096..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroReflectDatumBackedKafkaEncoder.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.serializer.avro; - -import kafka.serializer.Encoder; - -import org.apache.avro.io.DatumWriter; -import org.apache.avro.reflect.ReflectDatumWriter; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroReflectDatumBackedKafkaEncoder extends AvroDatumSupport implements Encoder { - - private final DatumWriter writer; - - public AvroReflectDatumBackedKafkaEncoder(final Class clazz) { - this.writer = new ReflectDatumWriter(clazz); - } - - @Override - public byte[] toBytes(final T source) { - return toBytes(source, writer); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSerializer.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSerializer.java deleted file mode 100644 index 2a98894c..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSerializer.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.serializer.avro; - -import org.apache.avro.io.DatumReader; -import org.apache.avro.io.DatumWriter; -import org.apache.avro.io.Decoder; -import org.apache.avro.io.DecoderFactory; -import org.apache.avro.io.Encoder; -import org.apache.avro.io.EncoderFactory; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroSerializer { - - public T deserialize(final byte[] bytes, final DatumReader reader) throws IOException { - final Decoder decoder = DecoderFactory.get().binaryDecoder(bytes, null); - return reader.read(null, decoder); - } - - public byte[] serialize(final T input, final DatumWriter writer) throws IOException { - final ByteArrayOutputStream stream = new ByteArrayOutputStream(); - - final Encoder encoder = EncoderFactory.get().binaryEncoder(stream, null); - writer.write(input, encoder); - encoder.flush(); - - return stream.toByteArray(); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSpecificDatumBackedKafkaDecoder.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSpecificDatumBackedKafkaDecoder.java deleted file mode 100644 index 9ad7ee6b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSpecificDatumBackedKafkaDecoder.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.springframework.integration.kafka.serializer.avro; - -import kafka.serializer.Decoder; - -import org.apache.avro.io.DatumReader; -import org.apache.avro.specific.SpecificDatumReader; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroSpecificDatumBackedKafkaDecoder extends AvroDatumSupport implements Decoder { - - private final DatumReader reader; - - public AvroSpecificDatumBackedKafkaDecoder(final Class specificRecordBase) { - this.reader = new SpecificDatumReader(specificRecordBase); - } - - @Override - public T fromBytes(final byte[] bytes) { - return fromBytes(bytes, reader); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSpecificDatumBackedKafkaEncoder.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSpecificDatumBackedKafkaEncoder.java deleted file mode 100644 index 92fb4acd..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/avro/AvroSpecificDatumBackedKafkaEncoder.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.springframework.integration.kafka.serializer.avro; - -import kafka.serializer.Encoder; - -import org.apache.avro.io.DatumWriter; -import org.apache.avro.specific.SpecificDatumWriter; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroSpecificDatumBackedKafkaEncoder extends AvroDatumSupport implements Encoder { - - private final DatumWriter writer; - - public AvroSpecificDatumBackedKafkaEncoder(final Class specificRecordClazz) { - this.writer = new SpecificDatumWriter(specificRecordClazz); - } - - @Override - public byte[] toBytes(final T source) { - return toBytes(source, writer); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/common/StringDecoder.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/common/StringDecoder.java deleted file mode 100644 index d5b3e200..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/common/StringDecoder.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.serializer.common; - -import java.util.Properties; - -import kafka.serializer.Decoder; -import kafka.utils.VerifiableProperties; - - -/** - * String Decoder for Kafka message key/value decoding. - * The Default decoder returns the same byte array it takes in. - * - * @author Soby Chacko - * @author Ilayaperumal Gopinathan - */ -public class StringDecoder implements Decoder { - - private final kafka.serializer.StringDecoder stringDecoder; - - public StringDecoder() { - this("UTF8"); - } - - public StringDecoder(final String encoding) { - final Properties props = new Properties(); - props.put("serializer.encoding", encoding); - this.stringDecoder = new kafka.serializer.StringDecoder(new VerifiableProperties(props)); - } - - @Override - public String fromBytes(byte[] bytes) { - return this.stringDecoder.fromBytes(bytes); - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/common/StringEncoder.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/common/StringEncoder.java deleted file mode 100644 index 1131c400..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/serializer/common/StringEncoder.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.serializer.common; - -import kafka.serializer.Encoder; -import kafka.utils.VerifiableProperties; - -import java.util.Properties; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class StringEncoder implements Encoder { - private String encoding = "UTF8"; - - public void setEncoding(final String encoding){ - this.encoding = encoding; - } - - @Override - public byte[] toBytes(final Object o) { - final Properties props = new Properties(); - props.put("serializer.encoding", encoding); - - final VerifiableProperties verifiableProperties = new VerifiableProperties(props); - return new kafka.serializer.StringEncoder(verifiableProperties).toBytes((String)o); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConfigFactoryBean.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConfigFactoryBean.java deleted file mode 100644 index cbb81aa6..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConfigFactoryBean.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import java.util.Properties; - -import kafka.consumer.ConsumerConfig; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.beans.factory.FactoryBean; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ConsumerConfigFactoryBean implements FactoryBean { - - private static final Log LOGGER = LogFactory.getLog(ConsumerConfigFactoryBean.class); - private final ConsumerMetadata consumerMetadata; - private final ZookeeperConnect zookeeperConnect; - private Properties consumerProperties = new Properties(); - - public ConsumerConfigFactoryBean(final ConsumerMetadata consumerMetadata, - final ZookeeperConnect zookeeperConnect, final Properties consumerProperties) { - this.consumerMetadata = consumerMetadata; - this.zookeeperConnect = zookeeperConnect; - if (consumerProperties != null) { - this.consumerProperties = consumerProperties; - } - } - - public ConsumerConfigFactoryBean(final ConsumerMetadata consumerMetadata, - final ZookeeperConnect zookeeperConnect) { - this(consumerMetadata, zookeeperConnect, null); - } - - @Override - public ConsumerConfig getObject() throws Exception { - final Properties properties = new Properties(); - properties.putAll(consumerProperties); - properties.put("zookeeper.connect", zookeeperConnect.getZkConnect()); - properties.put("zookeeper.session.timeout.ms", zookeeperConnect.getZkSessionTimeout()); - properties.put("zookeeper.sync.time.ms", zookeeperConnect.getZkSyncTime()); - - // Overriding the default value of -1, which will make the consumer to - // wait indefinitely - if (!properties.containsKey("consumer.timeout.ms")) { - properties.put("consumer.timeout.ms", consumerMetadata.getConsumerTimeout()); - } - - properties.put("group.id", consumerMetadata.getGroupId()); - - LOGGER.info("Using consumer properties => " + properties); - - return new ConsumerConfig(properties); - } - - @Override - public Class getObjectType() { - return ConsumerConfig.class; - } - - @Override - public boolean isSingleton() { - return true; - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConfiguration.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConfiguration.java deleted file mode 100644 index 2514a086..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConfiguration.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, - * either express or implied. See the License for the specific language governing permissions and limitations under the - * License. - */ -package org.springframework.integration.kafka.support; - -import java.util.*; -import java.util.concurrent.*; - -import kafka.consumer.ConsumerTimeoutException; -import kafka.consumer.KafkaStream; -import kafka.javaapi.consumer.ConsumerConnector; -import kafka.message.MessageAndMetadata; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.messaging.MessagingException; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @since 0.5 - */ -public class ConsumerConfiguration { - private static final Log LOGGER = LogFactory.getLog(ConsumerConfiguration.class); - - private final ConsumerMetadata consumerMetadata; - private final ConsumerConnectionProvider consumerConnectionProvider; - private final MessageLeftOverTracker messageLeftOverTracker; - private ConsumerConnector consumerConnector; - private volatile int count = 0; - private int maxMessages = 1; - private Collection>> consumerMessageStreams; - - private final ExecutorService executorService = Executors.newCachedThreadPool(); - - public ConsumerConfiguration(final ConsumerMetadata consumerMetadata, - final ConsumerConnectionProvider consumerConnectionProvider, - final MessageLeftOverTracker messageLeftOverTracker) { - this.consumerMetadata = consumerMetadata; - this.consumerConnectionProvider = consumerConnectionProvider; - this.messageLeftOverTracker = messageLeftOverTracker; - } - - public ConsumerMetadata getConsumerMetadata() { - return consumerMetadata; - } - - public Map>> receive() { - count = messageLeftOverTracker.getCurrentCount(); - final Object lock = new Object(); - - final List>>> tasks = new LinkedList>>>(); - - for (final List> streams : createConsumerMessageStreams()) { - for (final KafkaStream stream : streams) { - tasks.add(new Callable>>() { - @Override - public List> call() throws Exception { - final List> rawMessages = new ArrayList>(); - try { - while (count < maxMessages) { - final MessageAndMetadata messageAndMetadata = stream.iterator().next(); - synchronized (lock) { - if (count < maxMessages) { - rawMessages.add(messageAndMetadata); - count++; - } - else { - messageLeftOverTracker.addMessageAndMetadata(messageAndMetadata); - } - } - } - } catch (ConsumerTimeoutException cte) { - LOGGER.debug("Consumer timed out"); - } - return rawMessages; - } - }); - } - } - return executeTasks(tasks); - } - - private Map>> executeTasks( - final List>>> tasks) { - - final Map>> messages = new ConcurrentHashMap>>(); - messages.putAll(getLeftOverMessageMap()); - - try { - for (final Future>> result : executorService.invokeAll(tasks)) { - if (!result.get().isEmpty()) { - final String topic = result.get().get(0).topic(); - if (!messages.containsKey(topic)) { - messages.put(topic, getPayload(result.get())); - } - else { - - final Map> existingPayloadMap = messages.get(topic); - getPayload(result.get(), existingPayloadMap); - } - } - } - } catch (Exception e) { - throw new MessagingException("Consuming from Kafka failed", e); - } - - if (messages.isEmpty()) { - return null; - } - - return messages; - } - - private Map>> getLeftOverMessageMap() { - - final Map>> messages = new ConcurrentHashMap>>(); - - for (final MessageAndMetadata mamd : messageLeftOverTracker.getMessageLeftOverFromPreviousPoll()) { - final String topic = mamd.topic(); - - if (!messages.containsKey(topic)) { - final List> l = new ArrayList>(); - l.add(mamd); - messages.put(topic, getPayload(l)); - } - else { - final Map> existingPayloadMap = messages.get(topic); - final List> l = new ArrayList>(); - l.add(mamd); - getPayload(l, existingPayloadMap); - } - } - messageLeftOverTracker.clearMessagesLeftOver(); - return messages; - } - - private Map> getPayload(final List> messageAndMetadatas) { - final Map> payloadMap = new ConcurrentHashMap>(); - - for (final MessageAndMetadata messageAndMetadata : messageAndMetadatas) { - if (!payloadMap.containsKey(messageAndMetadata.partition())) { - final List payload = new ArrayList(); - payload.add(messageAndMetadata.message()); - payloadMap.put(messageAndMetadata.partition(), payload); - } - else { - final List payload = payloadMap.get(messageAndMetadata.partition()); - payload.add(messageAndMetadata.message()); - } - - } - - return payloadMap; - } - - private void getPayload(final List> messageAndMetadatas, - final Map> existingPayloadMap) { - for (final MessageAndMetadata messageAndMetadata : messageAndMetadatas) { - if (!existingPayloadMap.containsKey(messageAndMetadata.partition())) { - final List payload = new ArrayList(); - payload.add(messageAndMetadata.message()); - existingPayloadMap.put(messageAndMetadata.partition(), payload); - } - else { - final List payload = existingPayloadMap.get(messageAndMetadata.partition()); - payload.add(messageAndMetadata.message()); - } - } - } - - private Collection>> createConsumerMessageStreams() { - if (consumerMessageStreams == null) { - if (!(consumerMetadata.getTopicStreamMap() == null || consumerMetadata.getTopicStreamMap().isEmpty())) { - consumerMessageStreams = createMessageStreamsForTopic().values(); - } - else { - consumerMessageStreams = new ArrayList>>(); - consumerMessageStreams.add(createMessageStreamsForTopicFilter()); - } - } - return consumerMessageStreams; - } - - public Map>> createMessageStreamsForTopic() { - return getConsumerConnector().createMessageStreams(consumerMetadata.getTopicStreamMap(), - consumerMetadata.getKeyDecoder(), consumerMetadata.getValueDecoder()); - } - - public List> createMessageStreamsForTopicFilter() { - List> messageStream = new ArrayList>(); - TopicFilterConfiguration topicFilterConfiguration = consumerMetadata.getTopicFilterConfiguration(); - if (topicFilterConfiguration != null) { - messageStream = getConsumerConnector().createMessageStreamsByFilter( - topicFilterConfiguration.getTopicFilter(), topicFilterConfiguration.getNumberOfStreams(), - consumerMetadata.getKeyDecoder(), consumerMetadata.getValueDecoder()); - } - else { - LOGGER.warn("No Topic Filter Configuration defined"); - } - - return messageStream; - } - - public int getMaxMessages() { - return maxMessages; - } - - public void setMaxMessages(final int maxMessages) { - this.maxMessages = maxMessages; - } - - public ConsumerConnector getConsumerConnector() { - if (consumerConnector == null) { - consumerConnector = consumerConnectionProvider.getConsumerConnector(); - } - return consumerConnector; - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConnectionProvider.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConnectionProvider.java deleted file mode 100644 index 6b542a45..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerConnectionProvider.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import kafka.consumer.ConsumerConfig; -import kafka.javaapi.consumer.ConsumerConnector; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ConsumerConnectionProvider { - - private final ConsumerConfig consumerConfig; - - public ConsumerConnectionProvider(final ConsumerConfig consumerConfig) { - this.consumerConfig = consumerConfig; - } - - public ConsumerConnector getConsumerConnector() { - return kafka.consumer.Consumer.createJavaConsumerConnector(consumerConfig); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerMetadata.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerMetadata.java deleted file mode 100644 index 55bb6d5e..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ConsumerMetadata.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import java.util.Map; - -import kafka.serializer.Decoder; -import kafka.serializer.DefaultDecoder; -import org.springframework.beans.factory.InitializingBean; -import org.springframework.integration.kafka.core.KafkaConsumerDefaults; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @since 0.5 - */ -public class ConsumerMetadata implements InitializingBean { - - //High level consumer defaults - private String groupId = KafkaConsumerDefaults.GROUP_ID; - private String socketTimeout = KafkaConsumerDefaults.SOCKET_TIMEOUT; - private String socketBufferSize = KafkaConsumerDefaults.SOCKET_BUFFER_SIZE; - private String fetchSize = KafkaConsumerDefaults.FETCH_SIZE; - private String backoffIncrement = KafkaConsumerDefaults.BACKOFF_INCREMENT; - private String queuedChunksMax = KafkaConsumerDefaults.QUEUED_CHUNKS_MAX; - private String autoCommitEnable = KafkaConsumerDefaults.AUTO_COMMIT_ENABLE; - private String autoCommitInterval = KafkaConsumerDefaults.AUTO_COMMIT_INTERVAL; - private String autoOffsetReset = KafkaConsumerDefaults.AUTO_OFFSET_RESET; - private String rebalanceRetriesMax = KafkaConsumerDefaults.REBALANCE_RETRIES_MAX; - private String consumerTimeout = KafkaConsumerDefaults.CONSUMER_TIMEOUT; - - private String topic; - private int streams; - private Decoder valueDecoder; - private Decoder keyDecoder; - private Map topicStreamMap; - private TopicFilterConfiguration topicFilterConfiguration; - - public String getGroupId() { - return groupId; - } - - public void setGroupId(final String groupId) { - this.groupId = groupId; - } - - public String getSocketTimeout() { - return socketTimeout; - } - - public void setSocketTimeout(final String socketTimeout) { - this.socketTimeout = socketTimeout; - } - - public String getSocketBufferSize() { - return socketBufferSize; - } - - public void setSocketBufferSize(final String socketBufferSize) { - this.socketBufferSize = socketBufferSize; - } - - public String getFetchSize() { - return fetchSize; - } - - public void setFetchSize(final String fetchSize) { - this.fetchSize = fetchSize; - } - - public String getBackoffIncrement() { - return backoffIncrement; - } - - public void setBackoffIncrement(final String backoffIncrement) { - this.backoffIncrement = backoffIncrement; - } - - public String getQueuedChunksMax() { - return queuedChunksMax; - } - - public void setQueuedChunksMax(final String queuedChunksMax) { - this.queuedChunksMax = queuedChunksMax; - } - - public String getAutoCommitEnable() { - return autoCommitEnable; - } - - public void setAutoCommitEnable(final String autoCommitEnable) { - this.autoCommitEnable = autoCommitEnable; - } - - public String getAutoCommitInterval() { - return autoCommitInterval; - } - - public void setAutoCommitInterval(final String autoCommitInterval) { - this.autoCommitInterval = autoCommitInterval; - } - - public String getAutoOffsetReset() { - return autoOffsetReset; - } - - public void setAutoOffsetReset(final String autoOffsetReset) { - this.autoOffsetReset = autoOffsetReset; - } - - public String getRebalanceRetriesMax() { - return rebalanceRetriesMax; - } - - public void setRebalanceRetriesMax(final String rebalanceRetriesMax) { - this.rebalanceRetriesMax = rebalanceRetriesMax; - } - - public String getConsumerTimeout() { - return consumerTimeout; - } - - public void setConsumerTimeout(final String consumerTimeout) { - this.consumerTimeout = consumerTimeout; - } - - public String getTopic() { - return topic; - } - - public void setTopic(final String topic) { - this.topic = topic; - } - - public int getStreams() { - return streams; - } - - public void setStreams(final int streams) { - this.streams = streams; - } - - public Decoder getValueDecoder() { - return valueDecoder; - } - - public void setValueDecoder(final Decoder valueDecoder) { - this.valueDecoder = valueDecoder; - } - - public Decoder getKeyDecoder() { - return keyDecoder; - } - - public void setKeyDecoder(final Decoder keyDecoder) { - this.keyDecoder = keyDecoder; - } - - public Map getTopicStreamMap() { - return topicStreamMap; - } - - public void setTopicStreamMap(final Map topicStreamMap) { - this.topicStreamMap = topicStreamMap; - } - - @Override - @SuppressWarnings("unchecked") - public void afterPropertiesSet() throws Exception { - if (valueDecoder == null) { - setValueDecoder((Decoder) new DefaultDecoder(null)); - } - - if (keyDecoder == null) { - setKeyDecoder((Decoder) getValueDecoder()); - } - } - - public TopicFilterConfiguration getTopicFilterConfiguration() { - return topicFilterConfiguration; - } - - public void setTopicFilterConfiguration( - TopicFilterConfiguration topicFilterConfiguration) { - this.topicFilterConfiguration = topicFilterConfiguration; - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/DefaultPartitioner.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/DefaultPartitioner.java deleted file mode 100644 index 007c116b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/DefaultPartitioner.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import kafka.producer.Partitioner; -import kafka.utils.Utils; - -/** - * @author Soby Chacko - * @since 0.5 - * - * This class is for internal use only and therefore is at default access level - */ -class DefaultPartitioner implements Partitioner { - /** - * Uses the key to calculate a partition bucket id for routing - * the data to the appropriate broker partition - * @return an integer between 0 and numPartitions-1 - */ - @Override - public int partition(final Object key, final int numPartitions) { - return Utils.abs(key.hashCode()) % numPartitions; - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/KafkaConsumerContext.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/KafkaConsumerContext.java deleted file mode 100644 index 650717c3..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/KafkaConsumerContext.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.support; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.springframework.beans.factory.DisposableBean; -import org.springframework.integration.kafka.core.KafkaConsumerDefaults; -import org.springframework.integration.support.MessageBuilder; -import org.springframework.messaging.Message; -import org.springframework.util.CollectionUtils; - -/** - * @author Soby Chacko - * @author Ilayaperumal Gopinathan - * @since 0.5 - */ -public class KafkaConsumerContext implements DisposableBean { - private Map> consumerConfigurations; - - private String consumerTimeout = KafkaConsumerDefaults.CONSUMER_TIMEOUT; - - private ZookeeperConnect zookeeperConnect; - - public String getConsumerTimeout() { - return this.consumerTimeout; - } - - public void setConsumerTimeout(final String consumerTimeout) { - this.consumerTimeout = consumerTimeout; - } - - public ZookeeperConnect getZookeeperConnect() { - return this.zookeeperConnect; - } - - public void setZookeeperConnect(final ZookeeperConnect zookeeperConnect) { - this.zookeeperConnect = zookeeperConnect; - } - - public void setConsumerConfigurations(Map> consumerConfigurations) { - this.consumerConfigurations = consumerConfigurations; - } - - public Map> getConsumerConfigurations() { - return this.consumerConfigurations; - } - - public ConsumerConfiguration getConsumerConfiguration(String groupId) { - return this.consumerConfigurations.get(groupId); - } - - public Message>>> receive() { - final Map>> consumedData = new HashMap>>(); - - for (final ConsumerConfiguration consumerConfiguration : getConsumerConfigurations().values()) { - final Map>> messages = consumerConfiguration.receive(); - - if (!CollectionUtils.isEmpty(messages)) { - consumedData.putAll(messages); - } - } - return consumedData.isEmpty() ? null : MessageBuilder.withPayload(consumedData).build(); - } - - @Override - public void destroy() throws Exception { - for (ConsumerConfiguration config : this.consumerConfigurations.values()) { - config.getConsumerConnector().shutdown(); - } - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/KafkaProducerContext.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/KafkaProducerContext.java deleted file mode 100644 index 87b5678b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/KafkaProducerContext.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.support; - -import java.util.Collection; -import java.util.Map; -import java.util.Properties; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -import org.springframework.messaging.Message; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @author Ilayaperumal Gopinathan - * @since 0.5 - */ -public class KafkaProducerContext { - - private static final Log LOGGER = LogFactory.getLog(KafkaProducerContext.class); - - private volatile Map> producerConfigurations; - - private volatile ProducerConfiguration theProducerConfiguration; - - private Properties producerProperties; - - public void send(final Message message) throws Exception { - if (message.getHeaders().containsKey("topic")) { - ProducerConfiguration producerConfiguration = - getTopicConfiguration(message.getHeaders().get("topic", String.class)); - if (producerConfiguration != null) { - producerConfiguration.send(message); - } - } - // if there is a single producer configuration then use that config to send message. - else if (this.theProducerConfiguration != null) { - this.theProducerConfiguration.send(message); - } - else { - throw new IllegalStateException("Could not send messages as there are multiple producer configurations " + - "with no topic information found from the message header."); - } - } - - public ProducerConfiguration getTopicConfiguration(final String topic) { - if (this.theProducerConfiguration != null) { - if (topic.matches(this.theProducerConfiguration.getProducerMetadata().getTopic())) { - return this.theProducerConfiguration; - } - } - - Collection> topics = this.producerConfigurations.values(); - - for (final ProducerConfiguration producerConfiguration : topics) { - if (topic.matches(producerConfiguration.getProducerMetadata().getTopic())) { - return producerConfiguration; - } - } - LOGGER.error("No producer-configuration defined for topic " + topic + ". Cannot send message"); - return null; - } - - public Map> getProducerConfigurations() { - return this.producerConfigurations; - } - - public void setProducerConfigurations(Map> producerConfigurations) { - this.producerConfigurations = producerConfigurations; - if (this.producerConfigurations.size() == 1) { - this.theProducerConfiguration = this.producerConfigurations.values().iterator().next(); - } - } - - /** - * @param producerProperties - * The producerProperties to set. - */ - public void setProducerProperties(Properties producerProperties) { - this.producerProperties = producerProperties; - } - - /** - * @return Returns the producerProperties. - */ - public Properties getProducerProperties() { - return this.producerProperties; - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/MessageLeftOverTracker.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/MessageLeftOverTracker.java deleted file mode 100644 index bdeb0448..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/MessageLeftOverTracker.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import kafka.message.MessageAndMetadata; - -import java.util.ArrayList; -import java.util.List; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class MessageLeftOverTracker { - private final List> messageLeftOverFromPreviousPoll = new ArrayList>(); - - public void addMessageAndMetadata(final MessageAndMetadata messageAndMetadata){ - messageLeftOverFromPreviousPoll.add(messageAndMetadata); - } - - public List> getMessageLeftOverFromPreviousPoll(){ - return messageLeftOverFromPreviousPoll; - } - - public void clearMessagesLeftOver(){ - messageLeftOverFromPreviousPoll.clear(); - } - - public int getCurrentCount() { - return messageLeftOverFromPreviousPoll.size(); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerConfiguration.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerConfiguration.java deleted file mode 100644 index cab8925b..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerConfiguration.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.support; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.ObjectOutputStream; - -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; - -import org.springframework.messaging.Message; -import org.springframework.messaging.MessageHandlingException; - -import kafka.javaapi.producer.Producer; -import kafka.producer.KeyedMessage; -import kafka.serializer.DefaultEncoder; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @author Ilayaperumal Gopinathan - * @since 0.5 - */ -public class ProducerConfiguration { - - private final Producer producer; - - private final ProducerMetadata producerMetadata; - - public ProducerConfiguration(final ProducerMetadata producerMetadata, final Producer producer) { - this.producerMetadata = producerMetadata; - this.producer = producer; - } - - public ProducerMetadata getProducerMetadata() { - return this.producerMetadata; - } - - public Producer getProducer() { - return this.producer; - } - - public void send(final Message message) throws Exception { - final V v = getPayload(message); - - String topic = message.getHeaders().containsKey("topic") - ? message.getHeaders().get("topic", String.class) - : this.producerMetadata.getTopic(); - - if (message.getHeaders().containsKey("messageKey")) { - this.producer.send(new KeyedMessage(topic, getKey(message), v)); - } - else { - this.producer.send(new KeyedMessage(topic, v)); - } - } - - @SuppressWarnings("unchecked") - private V getPayload(final Message message) throws Exception { - if (this.producerMetadata.getValueEncoder() instanceof DefaultEncoder) { - return (V) getByteStream(message.getPayload()); - } - else if (producerMetadata.getValueClassType().isAssignableFrom(message.getPayload().getClass())) { - return producerMetadata.getValueClassType().cast(message.getPayload()); - } - throw new MessageHandlingException(message, "Message payload type is not matching with what is configured"); - } - - @SuppressWarnings("unchecked") - private K getKey(final Message message) throws Exception { - final Object key = message.getHeaders().get("messageKey"); - - if (this.producerMetadata.getKeyEncoder() instanceof DefaultEncoder) { - return (K) getByteStream(key); - } - return message.getHeaders().get("messageKey", this.producerMetadata.getKeyClassType()); - } - - private static boolean isRawByteArray(final Object obj) { - return obj instanceof byte[]; - } - - private static byte[] getByteStream(final Object obj) throws IOException { - if (isRawByteArray(obj)) { - return (byte[]) obj; - } - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - final ObjectOutputStream os = new ObjectOutputStream(out); - os.writeObject(obj); - return out.toByteArray(); - } - - @Override - public boolean equals(final Object obj) { - return EqualsBuilder.reflectionEquals(this, obj); - } - - @Override - public int hashCode() { - return HashCodeBuilder.reflectionHashCode(this); - } - - @Override - public String toString() { - return "ProducerConfiguration [producerMetadata=" + this.producerMetadata + "]"; - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerFactoryBean.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerFactoryBean.java deleted file mode 100644 index 2dfbe6cb..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerFactoryBean.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import kafka.javaapi.producer.Producer; -import kafka.producer.ProducerConfig; -import kafka.producer.ProducerPool; -import kafka.producer.async.DefaultEventHandler; -import kafka.producer.async.EventHandler; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.beans.factory.FactoryBean; - -import scala.collection.mutable.HashMap; - -import java.util.Properties; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ProducerFactoryBean implements FactoryBean> { - - private static final Log LOGGER = LogFactory.getLog(ProducerFactoryBean.class); - - private final String brokerList; - private final ProducerMetadata producerMetadata; - private Properties producerProperties = new Properties(); - - public ProducerFactoryBean(final ProducerMetadata producerMetadata, final String brokerList, - final Properties producerProperties) { - this.producerMetadata = producerMetadata; - this.brokerList = brokerList; - if (producerProperties != null) { - this.producerProperties = producerProperties; - } - } - - public ProducerFactoryBean(final ProducerMetadata producerMetadata, final String brokerList) { - this(producerMetadata, brokerList, null); - } - - @Override - public Producer getObject() throws Exception { - final Properties props = new Properties(); - props.putAll(producerProperties); - props.put("metadata.broker.list", brokerList); - props.put("compression.codec", producerMetadata.getCompressionCodec()); - - if (producerMetadata.isAsync()){ - props.put("producer.type", "async"); - if (producerMetadata.getBatchNumMessages() != null){ - props.put("batch.num.messages", producerMetadata.getBatchNumMessages()); - } - } - - LOGGER.info("Using producer properties => " + props); - final ProducerConfig config = new ProducerConfig(props); - final EventHandler eventHandler = new DefaultEventHandler(config, - producerMetadata.getPartitioner() == null ? new DefaultPartitioner() : producerMetadata.getPartitioner(), - producerMetadata.getValueEncoder(), producerMetadata.getKeyEncoder(), - new ProducerPool(config), new HashMap()); - - final kafka.producer.Producer prod = new kafka.producer.Producer(config, - eventHandler); - return new Producer(prod); - } - - @Override - public Class getObjectType() { - return Producer.class; - } - - @Override - public boolean isSingleton() { - return true; - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerMetadata.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerMetadata.java deleted file mode 100644 index fe6ba9a4..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ProducerMetadata.java +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import kafka.producer.Partitioner; -import kafka.serializer.DefaultEncoder; -import kafka.serializer.Encoder; - -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; -import org.springframework.beans.factory.InitializingBean; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @since 0.5 - */ -public class ProducerMetadata implements InitializingBean { - private Encoder keyEncoder; - private Encoder valueEncoder; - private Class keyClassType; - private Class valueClassType; - private final String topic; - private String compressionCodec = "default"; - private Partitioner partitioner; - private boolean async = false; - private String batchNumMessages; - - public ProducerMetadata(final String topic) { - this.topic = topic; - } - - public String getTopic() { - return topic; - } - - public Encoder getKeyEncoder() { - return keyEncoder; - } - - public void setKeyEncoder(final Encoder keyEncoder) { - this.keyEncoder = keyEncoder; - } - - public Encoder getValueEncoder() { - return valueEncoder; - } - - public void setValueEncoder(final Encoder valueEncoder) { - this.valueEncoder = valueEncoder; - } - - public Class getKeyClassType() { - return keyClassType; - } - - public void setKeyClassType(final Class keyClassType) { - this.keyClassType = keyClassType; - } - - public Class getValueClassType() { - return valueClassType; - } - - public void setValueClassType(final Class valueClassType) { - this.valueClassType = valueClassType; - } - - //TODO: Use an enum - public String getCompressionCodec() { - if (compressionCodec.equalsIgnoreCase("gzip")) { - return "1"; - } else if (compressionCodec.equalsIgnoreCase("snappy")) { - return "2"; - } - - return "0"; - } - - public void setCompressionCodec(final String compressionCodec) { - this.compressionCodec = compressionCodec; - } - - public Partitioner getPartitioner() { - return partitioner; - } - - public void setPartitioner(final Partitioner partitioner) { - this.partitioner = partitioner; - } - - @Override - @SuppressWarnings("unchecked") - public void afterPropertiesSet() throws Exception { - if (valueEncoder == null) { - setValueEncoder((Encoder) new DefaultEncoder(null)); - } - - if (keyEncoder == null) { - setKeyEncoder((Encoder) getValueEncoder()); - } - } - - public boolean isAsync() { - return async; - } - - public void setAsync(final boolean async) { - this.async = async; - } - - public String getBatchNumMessages() { - return batchNumMessages; - } - - public void setBatchNumMessages(final String batchNumMessages) { - this.batchNumMessages = batchNumMessages; - } - - @Override - public boolean equals(final Object obj){ - return EqualsBuilder.reflectionEquals(this, obj); - } - - @Override - public int hashCode() { - return HashCodeBuilder.reflectionHashCode(this); - } - - @Override - public String toString() { - StringBuilder builder = new StringBuilder(); - builder.append("ProducerMetadata [keyEncoder=").append(keyEncoder).append(", valueEncoder=") - .append(valueEncoder).append(", topic=").append(topic).append(", compressionCodec=") - .append(compressionCodec).append(", partitioner=").append(partitioner).append(", async=").append(async) - .append(", batchNumMessages=").append(batchNumMessages).append("]"); - return builder.toString(); - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/TopicFilterConfiguration.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/TopicFilterConfiguration.java deleted file mode 100644 index 576fa0b7..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/TopicFilterConfiguration.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.support; - -import kafka.consumer.Blacklist; -import kafka.consumer.TopicFilter; -import kafka.consumer.Whitelist; - -/** - * @author Rajasekar Elango - * @author Artem Bilan - * @since 0.5 - */ -public class TopicFilterConfiguration { - - private final int numberOfStreams; - - private final TopicFilter topicFilter; - - public TopicFilterConfiguration(final String pattern, final int numberOfStreams, final boolean exclude) { - this.numberOfStreams = numberOfStreams; - if (exclude) { - this.topicFilter = new Blacklist(pattern); - } - else { - this.topicFilter = new Whitelist(pattern); - } - } - - public TopicFilter getTopicFilter() { - return this.topicFilter; - } - - public int getNumberOfStreams() { - return this.numberOfStreams; - } - - @Override - public String toString() { - return this.topicFilter.toString() + " : " + this.numberOfStreams; - } - -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ZookeeperConnect.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ZookeeperConnect.java deleted file mode 100644 index 99c304b3..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/ZookeeperConnect.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import org.springframework.integration.kafka.core.ZookeeperConnectDefaults; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ZookeeperConnect { - private String zkConnect = ZookeeperConnectDefaults.ZK_CONNECT; - private String zkConnectionTimeout = ZookeeperConnectDefaults.ZK_CONNECTION_TIMEOUT; - private String zkSessionTimeout = ZookeeperConnectDefaults.ZK_SESSION_TIMEOUT; - private String zkSyncTime = ZookeeperConnectDefaults.ZK_SYNC_TIME; - - public String getZkConnect() { - return zkConnect; - } - - public void setZkConnect(final String zkConnect) { - this.zkConnect = zkConnect; - } - - public String getZkConnectionTimeout() { - return zkConnectionTimeout; - } - - public void setZkConnectionTimeout(final String zkConnectionTimeout) { - this.zkConnectionTimeout = zkConnectionTimeout; - } - - public String getZkSessionTimeout() { - return zkSessionTimeout; - } - - public void setZkSessionTimeout(final String zkSessionTimeout) { - this.zkSessionTimeout = zkSessionTimeout; - } - - public String getZkSyncTime() { - return zkSyncTime; - } - - public void setZkSyncTime(final String zkSyncTime) { - this.zkSyncTime = zkSyncTime; - } -} diff --git a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/package-info.java b/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/package-info.java deleted file mode 100644 index 0ec591de..00000000 --- a/spring-integration-kafka/src/main/java/org/springframework/integration/kafka/support/package-info.java +++ /dev/null @@ -1,4 +0,0 @@ -/** - * Provides various support classes used across Spring Integration Kafka Components. - */ -package org.springframework.integration.kafka.support; diff --git a/spring-integration-kafka/src/main/resources/META-INF/spring.handlers b/spring-integration-kafka/src/main/resources/META-INF/spring.handlers deleted file mode 100644 index 1bee2b0d..00000000 --- a/spring-integration-kafka/src/main/resources/META-INF/spring.handlers +++ /dev/null @@ -1 +0,0 @@ -http\://www.springframework.org/schema/integration/kafka=org.springframework.integration.kafka.config.xml.KafkaNamespaceHandler diff --git a/spring-integration-kafka/src/main/resources/META-INF/spring.schemas b/spring-integration-kafka/src/main/resources/META-INF/spring.schemas deleted file mode 100644 index b5240f84..00000000 --- a/spring-integration-kafka/src/main/resources/META-INF/spring.schemas +++ /dev/null @@ -1,2 +0,0 @@ -http\://www.springframework.org/schema/integration/kafka/spring-integration-kafka-1.0.xsd=org/springframework/integration/config/xml/spring-integration-kafka-1.0.xsd -http\://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd=org/springframework/integration/config/xml/spring-integration-kafka-1.0.xsd diff --git a/spring-integration-kafka/src/main/resources/META-INF/spring.tooling b/spring-integration-kafka/src/main/resources/META-INF/spring.tooling deleted file mode 100644 index d4079816..00000000 --- a/spring-integration-kafka/src/main/resources/META-INF/spring.tooling +++ /dev/null @@ -1,4 +0,0 @@ -# Tooling related information for the integration Kafka namespace -http\://www.springframework.org/schema/integration/kafka@name=integration Kafka Namespace -http\://www.springframework.org/schema/integration/kafka@prefix=int-kafka -http\://www.springframework.org/schema/integration/kafka@icon=org/springframework/integration/config/xml/spring-integration-kafka.gif diff --git a/spring-integration-kafka/src/main/resources/org/springframework/integration/config/xml/spring-integration-kafka-1.0.xsd b/spring-integration-kafka/src/main/resources/org/springframework/integration/config/xml/spring-integration-kafka-1.0.xsd deleted file mode 100644 index 66c65d11..00000000 --- a/spring-integration-kafka/src/main/resources/org/springframework/integration/config/xml/spring-integration-kafka-1.0.xsd +++ /dev/null @@ -1,456 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The topic configured by this configuration. - - - - - - - - - - - - - - - - - Custom implementation of a Kafka Encoder for encoding message values. - - - - - - - Custom implementation of a Kafka Encoder for encoding message keys. - - - - - - - Class type used for the key - - - - - - - Class type used for the value - - - - - - - - - - - - - - - - - Custom Kafka key partitioner. - - - - - - - Indicates if this producer is async or not. - - - - - - - number of messages to batch at this producer. - - - - - - - - - - - - - - Kafka producer properties to use for all producers - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Kafka Server Bean Name - - - - - - - Kafka Server Bean Name - - - - - - - - - - - - - - - - - - - - - - - - - Kafka Server Bean Name - - - - - - - Kafka consumer properties to use for all consumers - - - - - - - - - - The definition for the Spring Integration Kafka - Inbound Channel Adapter. - - - - - - - - - - - - - - - - - - - - - - - - - - - Kafka Server Bean Name - - - - - - - - - - Defines kafka outbound channel adapter that writes the contents of the - Message to kafka broker. - - - - - - - - - - - - Kafka producer context reference. - - - - - - - Specifies the order for invocation when this endpoint is connected as a - subscriber to a SubscribableChannel. - - - - - - diff --git a/spring-integration-kafka/src/main/resources/org/springframework/integration/config/xml/spring-integration-kafka.gif b/spring-integration-kafka/src/main/resources/org/springframework/integration/config/xml/spring-integration-kafka.gif deleted file mode 100644 index 210e0764..00000000 Binary files a/spring-integration-kafka/src/main/resources/org/springframework/integration/config/xml/spring-integration-kafka.gif and /dev/null differ diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParserTests-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParserTests-context.xml deleted file mode 100644 index d787a2dc..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParserTests-context.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - largest - 10485760 - 5242880 - 1000 - - - - - - foo - 10 - true - - - - - - - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParserTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParserTests.java deleted file mode 100644 index 5d43b735..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaConsumerContextParserTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.config.xml; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; - -import kafka.consumer.Blacklist; -import org.hamcrest.Matchers; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.integration.kafka.support.ConsumerConfiguration; -import org.springframework.integration.kafka.support.ConsumerMetadata; -import org.springframework.integration.kafka.support.KafkaConsumerContext; -import org.springframework.integration.kafka.support.TopicFilterConfiguration; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Soby Chacko - * @author Artem Bilan - * @since 0.5 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class KafkaConsumerContextParserTests { - - @Autowired - private ApplicationContext appContext; - - @Test - @SuppressWarnings("unchecked") - public void testConsumerContextConfiguration() { - final KafkaConsumerContext consumerContext = appContext.getBean("consumerContext", - KafkaConsumerContext.class); - Assert.assertNotNull(consumerContext); - ConsumerConfiguration cc = consumerContext.getConsumerConfiguration("default1"); - ConsumerMetadata cm = cc.getConsumerMetadata(); - assertNotNull(cm); - TopicFilterConfiguration topicFilterConfiguration = cm.getTopicFilterConfiguration(); - assertEquals("foo : 10", topicFilterConfiguration.toString()); - assertThat(topicFilterConfiguration.getTopicFilter(), Matchers.instanceOf(Blacklist.class)); - } - -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaInboundAdapterParserTests-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaInboundAdapterParserTests-context.xml deleted file mode 100644 index b1ac3cfa..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaInboundAdapterParserTests-context.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaInboundAdapterParserTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaInboundAdapterParserTests.java deleted file mode 100644 index f5e9486e..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaInboundAdapterParserTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.integration.endpoint.SourcePollingChannelAdapter; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Soby Chacko - * @since 0.5 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class KafkaInboundAdapterParserTests { - - @Autowired - private ApplicationContext appContext; - - /** - * Test method for {@link org.springframework.integration.kafka.config.xml.KafkaInboundChannelAdapterParser#parseSource(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext)}. - */ - @Test - public void testParseSourceElementParserContext() throws Exception { - final SourcePollingChannelAdapter adapter = appContext.getBean("kafkaInboundChannelAdapter", - SourcePollingChannelAdapter.class); - - Assert.assertNotNull(adapter); - Assert.assertFalse(adapter.isAutoStartup()); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaMultiConsumerContextParserTests-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaMultiConsumerContextParserTests-context.xml deleted file mode 100644 index a428f6f2..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaMultiConsumerContextParserTests-context.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - - - - - - largest - 10485760 - - 5242880 - 1000 - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaMultiConsumerContextParserTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaMultiConsumerContextParserTests.java deleted file mode 100644 index 84f9b958..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaMultiConsumerContextParserTests.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright 2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.integration.kafka.support.ConsumerConfiguration; -import org.springframework.integration.kafka.support.ConsumerMetadata; -import org.springframework.integration.kafka.support.KafkaConsumerContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Ilayaperumal Gopinathan - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class KafkaMultiConsumerContextParserTests { - - @Autowired - private ApplicationContext appContext; - - @SuppressWarnings("unchecked") - @Test - public void testMultiConsumerContexts() { - final KafkaConsumerContext consumerContext1 = appContext.getBean("consumerContext1", KafkaConsumerContext.class); - Assert.assertNotNull(consumerContext1); - final KafkaConsumerContext consumerContext2 = appContext.getBean("consumerContext2", KafkaConsumerContext.class); - Assert.assertNotNull(consumerContext2); - } - - @SuppressWarnings("unchecked") - @Test - public void testConsumerContextConfigurations() { - final KafkaConsumerContext consumerContext = appContext.getBean("consumerContext1", KafkaConsumerContext.class); - Assert.assertNotNull(consumerContext); - final ConsumerConfiguration cc = consumerContext.getConsumerConfiguration("default1"); - final ConsumerMetadata cm = cc.getConsumerMetadata(); - Assert.assertTrue(cm.getTopicStreamMap().get("test1") == 3); - Assert.assertTrue(cm.getTopicStreamMap().get("test2") == 4); - Assert.assertNotNull(cm); - final ConsumerConfiguration cc2 = consumerContext.getConsumerConfiguration("default2"); - final ConsumerMetadata cm2 = cc2.getConsumerMetadata(); - Assert.assertTrue(cm2.getTopicStreamMap().get("test3") == 1); - Assert.assertNotNull(cm2); - final KafkaConsumerContext consumerContext2 = appContext.getBean("consumerContext2", KafkaConsumerContext.class); - Assert.assertNotNull(consumerContext2); - final ConsumerConfiguration otherCC = consumerContext2.getConsumerConfiguration("default1"); - final ConsumerMetadata otherCM = otherCC.getConsumerMetadata(); - Assert.assertTrue(otherCM.getTopicStreamMap().get("test4") == 3); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaOutboundAdapterParserTests-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaOutboundAdapterParserTests-context.xml deleted file mode 100644 index d1cdd054..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaOutboundAdapterParserTests-context.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaOutboundAdapterParserTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaOutboundAdapterParserTests.java deleted file mode 100644 index 19487ee6..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaOutboundAdapterParserTests.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.integration.endpoint.PollingConsumer; -import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler; -import org.springframework.integration.kafka.support.KafkaProducerContext; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Soby Chacko - * @since 0.5 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class KafkaOutboundAdapterParserTests { - - @Autowired - private ApplicationContext appContext; - - @Test - @SuppressWarnings("unchecked") - public void testOutboundAdapterConfiguration() { - final PollingConsumer pollingConsumer = - appContext.getBean("kafkaOutboundChannelAdapter", PollingConsumer.class); - final KafkaProducerMessageHandler messageHandler = appContext.getBean(KafkaProducerMessageHandler.class); - Assert.assertNotNull(pollingConsumer); - Assert.assertNotNull(messageHandler); - Assert.assertEquals(messageHandler.getOrder(), 3); - final KafkaProducerContext producerContext = messageHandler.getKafkaProducerContext(); - Assert.assertNotNull(producerContext); - Assert.assertEquals(producerContext.getProducerConfigurations().size(), 2); - } - -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParserTests-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParserTests-context.xml deleted file mode 100644 index 1c0945a4..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParserTests-context.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - - - - - - 3600000 - 5 - 5242880 - - - - - - localhost:9092 - localhost:9091 - test1 - test2 - - - - - - - - - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParserTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParserTests.java deleted file mode 100644 index 54792be0..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/KafkaProducerContextParserTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.junit.Assert; -import kafka.javaapi.producer.Producer; -import kafka.serializer.Encoder; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.integration.kafka.support.KafkaProducerContext; -import org.springframework.integration.kafka.support.ProducerConfiguration; -import org.springframework.integration.kafka.support.ProducerMetadata; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -import java.util.Map; - -/** - * @author Soby Chacko - * @since 0.5 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class KafkaProducerContextParserTests { - - @Autowired - private ApplicationContext appContext; - - @Test - @SuppressWarnings("unchecked") - public void testProducerContextConfiguration(){ - final KafkaProducerContext producerContext = appContext.getBean("producerContext", KafkaProducerContext.class); - Assert.assertNotNull(producerContext); - - final Map> producerConfigurations = producerContext.getProducerConfigurations(); - Assert.assertEquals(producerConfigurations.size(), 2); - - final ProducerConfiguration producerConfigurationTest1 = producerConfigurations.get("test1"); - Assert.assertNotNull(producerConfigurationTest1); - final ProducerMetadata producerMetadataTest1 = producerConfigurationTest1.getProducerMetadata(); - Assert.assertEquals(producerMetadataTest1.getTopic(), "test1"); - Assert.assertEquals(producerMetadataTest1.getCompressionCodec(), "0"); - Assert.assertEquals(producerMetadataTest1.getKeyClassType(), java.lang.String.class); - Assert.assertEquals(producerMetadataTest1.getValueClassType(), java.lang.String.class); - - final Encoder valueEncoder = appContext.getBean("valueEncoder", Encoder.class); - Assert.assertEquals(producerMetadataTest1.getValueEncoder(), valueEncoder); - Assert.assertEquals(producerMetadataTest1.getKeyEncoder(), valueEncoder); - - final Producer producerTest1 = producerConfigurationTest1.getProducer(); - Assert.assertEquals(producerConfigurationTest1, new ProducerConfiguration(producerMetadataTest1, producerTest1)); - - final ProducerConfiguration producerConfigurationTest2 = producerConfigurations.get("test2"); - Assert.assertNotNull(producerConfigurationTest2); - final ProducerMetadata producerMetadataTest2 = producerConfigurationTest2.getProducerMetadata(); - Assert.assertEquals(producerMetadataTest2.getTopic(), "test2"); - Assert.assertEquals(producerMetadataTest2.getCompressionCodec(), "0"); - - final Producer producerTest2 = producerConfigurationTest2.getProducer(); - Assert.assertEquals(producerConfigurationTest2, new ProducerConfiguration(producerMetadataTest2, producerTest2)); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParserTests-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParserTests-context.xml deleted file mode 100644 index f5d07846..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParserTests-context.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParserTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParserTests.java deleted file mode 100644 index f520fbb9..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/ZookeeperConnectParserTests.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.config.xml; - -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.ApplicationContext; -import org.springframework.integration.kafka.core.ZookeeperConnectDefaults; -import org.springframework.integration.kafka.support.ZookeeperConnect; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - -/** - * @author Soby Chacko - * @since 0.5 - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration -public class ZookeeperConnectParserTests { - - @Autowired - private ApplicationContext appContext; - - @Test - public void testCustomKafkaBrokerConfiguration() { - final ZookeeperConnect broker = appContext.getBean("zookeeperConnect", ZookeeperConnect.class); - - Assert.assertEquals("localhost:2181", broker.getZkConnect()); - Assert.assertEquals("10000", broker.getZkConnectionTimeout()); - Assert.assertEquals("10000", broker.getZkSessionTimeout()); - Assert.assertEquals("200", broker.getZkSyncTime()); - } - - @Test - public void testDefaultKafkaBrokerConfiguration() { - final ZookeeperConnect broker = appContext.getBean("defaultZookeeperConnect", ZookeeperConnect.class); - - Assert.assertEquals(ZookeeperConnectDefaults.ZK_CONNECT, broker.getZkConnect()); - Assert.assertEquals(ZookeeperConnectDefaults.ZK_CONNECTION_TIMEOUT, broker.getZkConnectionTimeout()); - Assert.assertEquals(ZookeeperConnectDefaults.ZK_SESSION_TIMEOUT, broker.getZkSessionTimeout()); - Assert.assertEquals(ZookeeperConnectDefaults.ZK_SYNC_TIME, broker.getZkSyncTime()); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/kafkaInboundAdapterCommon-context.xml b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/kafkaInboundAdapterCommon-context.xml deleted file mode 100644 index 42e907ef..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/config/xml/kafkaInboundAdapterCommon-context.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/serializer/AvroReflectDatumBackedKafkaSerializerTest.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/serializer/AvroReflectDatumBackedKafkaSerializerTest.java deleted file mode 100644 index b374d923..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/serializer/AvroReflectDatumBackedKafkaSerializerTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.serializer; - -import org.junit.Assert; -import org.junit.Test; -import org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaDecoder; -import org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder; -import org.springframework.integration.kafka.test.utils.TestObject; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroReflectDatumBackedKafkaSerializerTest { - - @Test - public void testDecodePlainSchema() { - final AvroReflectDatumBackedKafkaEncoder avroBackedKafkaEncoder = new AvroReflectDatumBackedKafkaEncoder(TestObject.class); - - final TestObject testObject = new TestObject(); - testObject.setTestData1("\"Test Data1\""); - testObject.setTestData2(1); - - final byte[] data = avroBackedKafkaEncoder.toBytes(testObject); - - final AvroReflectDatumBackedKafkaDecoder avroReflectDatumBackedKafkaDecoder = new AvroReflectDatumBackedKafkaDecoder(TestObject.class); - final TestObject decodedFbu = avroReflectDatumBackedKafkaDecoder.fromBytes(data); - - Assert.assertEquals(testObject.getTestData1(), decodedFbu.getTestData1()); - Assert.assertEquals(testObject.getTestData2(), decodedFbu.getTestData2()); - } - - @Test - public void anotherTest() { - final AvroReflectDatumBackedKafkaEncoder avroBackedKafkaEncoder = new AvroReflectDatumBackedKafkaEncoder(java.lang.String.class); - final String testString = "Testing Avro"; - final byte[] data = avroBackedKafkaEncoder.toBytes(testString); - - final AvroReflectDatumBackedKafkaDecoder avroReflectDatumBackedKafkaDecoder = new AvroReflectDatumBackedKafkaDecoder(java.lang.String.class); - final String decodedS = avroReflectDatumBackedKafkaDecoder.fromBytes(data); - - Assert.assertEquals(testString, decodedS); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/serializer/AvroSpecificDatumBackedKafkaSerializerTest.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/serializer/AvroSpecificDatumBackedKafkaSerializerTest.java deleted file mode 100644 index 3850a5ab..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/serializer/AvroSpecificDatumBackedKafkaSerializerTest.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.springframework.integration.kafka.serializer; - -import org.junit.Assert; -import org.junit.Test; -import org.springframework.integration.kafka.serializer.avro.AvroSpecificDatumBackedKafkaDecoder; -import org.springframework.integration.kafka.serializer.avro.AvroSpecificDatumBackedKafkaEncoder; -import org.springframework.integration.kafka.test.utils.User; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class AvroSpecificDatumBackedKafkaSerializerTest { - - @Test - public void testEncodeDecodeFromSpecificDatumSchema() { - final AvroSpecificDatumBackedKafkaEncoder avroBackedKafkaEncoder = new AvroSpecificDatumBackedKafkaEncoder(User.class); - - final User user = new User("First", "Last"); - - final byte[] data = avroBackedKafkaEncoder.toBytes(user); - - final AvroSpecificDatumBackedKafkaDecoder avroSpecificDatumBackedKafkaDecoder = new AvroSpecificDatumBackedKafkaDecoder(User.class); - final User decodedUser = avroSpecificDatumBackedKafkaDecoder.fromBytes(data); - - Assert.assertEquals(user.getFirstName(), decodedUser.getFirstName().toString()); - Assert.assertEquals(user.getLastName(), decodedUser.getLastName().toString()); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ConsumerConfigurationTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ConsumerConfigurationTests.java deleted file mode 100644 index 1dab1758..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ConsumerConfigurationTests.java +++ /dev/null @@ -1,527 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import static org.junit.Assert.assertNull; -import static org.mockito.Mockito.*; - -import java.util.*; - -import kafka.consumer.ConsumerIterator; -import kafka.consumer.KafkaStream; -import kafka.javaapi.consumer.ConsumerConnector; -import kafka.message.MessageAndMetadata; -import kafka.serializer.Decoder; - -import org.junit.Assert; -import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - -/** - * @author Soby Chacko - * @author Rajasekar Elango - * @since 0.5 - */ -public class ConsumerConfigurationTests { - - @Test - @SuppressWarnings("unchecked") - public void testReceiveMessageForSingleTopicFromSingleStream() { - final ConsumerMetadata consumerMetadata = mock(ConsumerMetadata.class); - final ConsumerConnectionProvider consumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker messageLeftOverTracker = mock(MessageLeftOverTracker.class); - final ConsumerConnector consumerConnector = mock(ConsumerConnector.class); - - Map topicStreamMap = new HashMap(); - topicStreamMap.put("topic1", 1); - when(consumerMetadata.getTopicStreamMap()).thenReturn(topicStreamMap); - - when(consumerConnectionProvider.getConsumerConnector()).thenReturn(consumerConnector); - - final ConsumerConfiguration consumerConfiguration = new ConsumerConfiguration(consumerMetadata, - consumerConnectionProvider, messageLeftOverTracker); - consumerConfiguration.setMaxMessages(1); - - final KafkaStream stream = mock(KafkaStream.class); - final List> streams = new ArrayList>(); - streams.add(stream); - final Map>> messageStreams = new HashMap>>(); - messageStreams.put("topic", streams); - - when(consumerConfiguration.createMessageStreamsForTopic()).thenReturn(messageStreams); - final ConsumerIterator iterator = mock(ConsumerIterator.class); - when(stream.iterator()).thenReturn(iterator); - final MessageAndMetadata messageAndMetadata = mock(MessageAndMetadata.class); - when(iterator.next()).thenReturn(messageAndMetadata); - when(messageAndMetadata.message()).thenReturn((V) "got message"); - when(messageAndMetadata.topic()).thenReturn("topic"); - when(messageAndMetadata.partition()).thenReturn(1); - - final Map>> messages = consumerConfiguration.receive(); - Assert.assertEquals(1, messages.size()); - Assert.assertEquals(1, messages.get("topic").size()); - Assert.assertEquals("got message", messages.get("topic").get(1).get(0)); - - verify(stream, times(1)).iterator(); - verify(iterator, times(1)).next(); - verify(messageAndMetadata, times(1)).message(); - verify(messageAndMetadata, times(1)).topic(); - } - - @Test - @SuppressWarnings("unchecked") - public void testReceiveMessageForSingleTopicFromMultipleStreams() { - final ConsumerMetadata consumerMetadata = mock(ConsumerMetadata.class); - final ConsumerConnectionProvider consumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker messageLeftOverTracker = mock(MessageLeftOverTracker.class); - - Map topicStreamMap = new HashMap(); - topicStreamMap.put("topic1", 1); - when(consumerMetadata.getTopicStreamMap()).thenReturn(topicStreamMap); - - final ConsumerConnector consumerConnector = mock(ConsumerConnector.class); - - when(consumerConnectionProvider.getConsumerConnector()).thenReturn(consumerConnector); - - final ConsumerConfiguration consumerConfiguration = new ConsumerConfiguration(consumerMetadata, - consumerConnectionProvider, messageLeftOverTracker); - consumerConfiguration.setMaxMessages(3); - - final KafkaStream stream1 = mock(KafkaStream.class); - final KafkaStream stream2 = mock(KafkaStream.class); - final KafkaStream stream3 = mock(KafkaStream.class); - final List> streams = new ArrayList>(); - streams.add(stream1); - streams.add(stream2); - streams.add(stream3); - final Map>> messageStreams = new HashMap>>(); - messageStreams.put("topic", streams); - - when(consumerConfiguration.createMessageStreamsForTopic()).thenReturn(messageStreams); - final ConsumerIterator iterator1 = mock(ConsumerIterator.class); - final ConsumerIterator iterator2 = mock(ConsumerIterator.class); - final ConsumerIterator iterator3 = mock(ConsumerIterator.class); - - when(stream1.iterator()).thenReturn(iterator1); - when(stream2.iterator()).thenReturn(iterator2); - when(stream3.iterator()).thenReturn(iterator3); - final MessageAndMetadata messageAndMetadata1 = mock(MessageAndMetadata.class); - final MessageAndMetadata messageAndMetadata2 = mock(MessageAndMetadata.class); - final MessageAndMetadata messageAndMetadata3 = mock(MessageAndMetadata.class); - - when(iterator1.next()).thenReturn(messageAndMetadata1); - when(iterator2.next()).thenReturn(messageAndMetadata2); - when(iterator3.next()).thenReturn(messageAndMetadata3); - - when(messageAndMetadata1.message()).thenReturn((V)"got message"); - when(messageAndMetadata1.topic()).thenReturn("topic"); - when(messageAndMetadata1.partition()).thenReturn(1); - - when(messageAndMetadata2.message()).thenReturn((V)"got message"); - when(messageAndMetadata2.topic()).thenReturn("topic"); - when(messageAndMetadata2.partition()).thenReturn(2); - - when(messageAndMetadata3.message()).thenReturn((V)"got message"); - when(messageAndMetadata3.topic()).thenReturn("topic"); - when(messageAndMetadata3.partition()).thenReturn(3); - - final Map>> messages = consumerConfiguration.receive(); - Assert.assertEquals(messages.size(), 1); - int sum = 0; - - final Map> values = messages.get("topic"); - - for (final List l : values.values()) { - sum += l.size(); - } - - Assert.assertEquals(3, sum); - } - - @Test - @SuppressWarnings("unchecked") - public void testReceiveMessageForMultipleTopicsFromMultipleStreams() { - final ConsumerMetadata consumerMetadata = mock(ConsumerMetadata.class); - final ConsumerConnectionProvider consumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker messageLeftOverTracker = mock(MessageLeftOverTracker.class); - - Map topicStreamMap = new HashMap(); - topicStreamMap.put("topic1", 1); - when(consumerMetadata.getTopicStreamMap()).thenReturn(topicStreamMap); - - - final ConsumerConnector consumerConnector = mock(ConsumerConnector.class); - - when(consumerConnectionProvider.getConsumerConnector()).thenReturn(consumerConnector); - - final ConsumerConfiguration consumerConfiguration = new ConsumerConfiguration(consumerMetadata, - consumerConnectionProvider, messageLeftOverTracker); - consumerConfiguration.setMaxMessages(9); - - final KafkaStream stream1 = mock(KafkaStream.class); - final KafkaStream stream2 = mock(KafkaStream.class); - final KafkaStream stream3 = mock(KafkaStream.class); - final List> streams = new ArrayList>(); - streams.add(stream1); - streams.add(stream2); - streams.add(stream3); - final Map>> messageStreams = new HashMap>>(); - messageStreams.put("topic1", streams); - messageStreams.put("topic2", streams); - messageStreams.put("topic3", streams); - - when(consumerConfiguration.createMessageStreamsForTopic()).thenReturn(messageStreams); - final ConsumerIterator iterator1 = mock(ConsumerIterator.class); - final ConsumerIterator iterator2 = mock(ConsumerIterator.class); - final ConsumerIterator iterator3 = mock(ConsumerIterator.class); - - when(stream1.iterator()).thenReturn(iterator1); - when(stream2.iterator()).thenReturn(iterator2); - when(stream3.iterator()).thenReturn(iterator3); - final MessageAndMetadata messageAndMetadata1 = mock(MessageAndMetadata.class); - final MessageAndMetadata messageAndMetadata2 = mock(MessageAndMetadata.class); - final MessageAndMetadata messageAndMetadata3 = mock(MessageAndMetadata.class); - - when(iterator1.next()).thenReturn(messageAndMetadata1); - when(iterator2.next()).thenReturn(messageAndMetadata2); - when(iterator3.next()).thenReturn(messageAndMetadata3); - - when(messageAndMetadata1.message()).thenReturn((V)"got message1"); - when(messageAndMetadata1.topic()).thenReturn("topic1"); - when(messageAndMetadata1.partition()).thenAnswer(getAnswer()); - - when(messageAndMetadata2.message()).thenReturn((V)"got message2"); - when(messageAndMetadata2.topic()).thenReturn("topic2"); - when(messageAndMetadata1.partition()).thenAnswer(getAnswer()); - - when(messageAndMetadata3.message()).thenReturn((V)"got message3"); - when(messageAndMetadata3.topic()).thenReturn("topic3"); - when(messageAndMetadata1.partition()).thenAnswer(getAnswer()); - - final Map>> messages = consumerConfiguration.receive(); - - int sum = 0; - - final Collection>> values = messages.values(); - - for (final Map> m : values) { - for (final List l : m.values()) { - sum += l.size(); - } - } - - Assert.assertEquals(9, sum); - } - - - - private Answer getAnswer() { - return new Answer() { - private int count = 0; - - @Override - public Object answer(final InvocationOnMock invocation) throws Throwable { - if (count++ == 1) { - return 1; - } else if (count++ == 2) { - return 2; - } - - return 3; - } - }; - } - - @Test - @SuppressWarnings("unchecked") - public void testReceiveMessageAndVerifyMessageLeftoverFromPreviousPollAreTakenFirst() { - final ConsumerMetadata consumerMetadata = mock(ConsumerMetadata.class); - final ConsumerConnectionProvider consumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker messageLeftOverTracker = mock(MessageLeftOverTracker.class); - final ConsumerConnector consumerConnector = mock(ConsumerConnector.class); - - Map topicStreamMap = new HashMap(); - topicStreamMap.put("topic1", 1); - when(consumerMetadata.getTopicStreamMap()).thenReturn(topicStreamMap); - when(messageLeftOverTracker.getCurrentCount()).thenReturn(3); - - final MessageAndMetadata m1 = mock(MessageAndMetadata.class); - final MessageAndMetadata m2 = mock(MessageAndMetadata.class); - final MessageAndMetadata m3 = mock(MessageAndMetadata.class); - - when(m1.key()).thenReturn("key1"); - when(m1.message()).thenReturn("value1"); - when(m1.topic()).thenReturn("topic1"); - when(m1.partition()).thenReturn(1); - - when(m2.key()).thenReturn("key2"); - when(m2.message()).thenReturn("value2"); - when(m2.topic()).thenReturn("topic2"); - when(m2.partition()).thenReturn(1); - - when(m3.key()).thenReturn("key1"); - when(m3.message()).thenReturn("value3"); - when(m3.topic()).thenReturn("topic3"); - when(m3.partition()).thenReturn(1); - - final List> mList = new ArrayList>(); - mList.add(m1); - mList.add(m2); - mList.add(m3); - - when((List>) (Object) messageLeftOverTracker.getMessageLeftOverFromPreviousPoll()).thenReturn(mList); - - when(consumerConnectionProvider.getConsumerConnector()).thenReturn(consumerConnector); - - final ConsumerConfiguration consumerConfiguration = new ConsumerConfiguration(consumerMetadata, - consumerConnectionProvider, messageLeftOverTracker); - consumerConfiguration.setMaxMessages(5); - - final KafkaStream stream = mock(KafkaStream.class); - final List> streams = new ArrayList>(); - streams.add(stream); - final Map>> messageStreams = new HashMap>>(); - messageStreams.put("topic1", streams); - when(consumerConfiguration.createMessageStreamsForTopic()).thenReturn(messageStreams); - final ConsumerIterator iterator = mock(ConsumerIterator.class); - when(stream.iterator()).thenReturn(iterator); - final MessageAndMetadata messageAndMetadata = mock(MessageAndMetadata.class); - when(iterator.next()).thenReturn(messageAndMetadata); - when(messageAndMetadata.message()).thenReturn((V) "got message"); - when(messageAndMetadata.topic()).thenReturn("topic1"); - when(messageAndMetadata.partition()).thenReturn(1); - - final Map>> messages = consumerConfiguration.receive(); - int sum = 0; - - final Collection>> values = messages.values(); - - for (final Map> m : values) { - for (final List l : m.values()) { - sum += l.size(); - } - - } - Assert.assertEquals(5, sum); - - Assert.assertTrue(messages.containsKey("topic1")); - Assert.assertTrue(messages.containsKey("topic2")); - Assert.assertTrue(messages.containsKey("topic3")); - - Assert.assertTrue(valueFound(messages.get("topic1").get(1), "value1")); - Assert.assertTrue(valueFound(messages.get("topic2").get(1), "value2")); - Assert.assertTrue(valueFound(messages.get("topic3").get(1), "value3")); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetConsumerMapWithMessageStreamsWithNullDecoders() { - - final ConsumerMetadata mockedConsumerMetadata = mock(ConsumerMetadata.class); - - assertNull(mockedConsumerMetadata.getKeyDecoder()); - assertNull(mockedConsumerMetadata.getValueDecoder()); - - final Map topicsStreamMap = new HashMap(); - when(mockedConsumerMetadata.getTopicStreamMap()).thenReturn(topicsStreamMap); - - final ConsumerConnectionProvider mockedConsumerConnectionProvider = mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker mockedMessageLeftOverTracker = mock(MessageLeftOverTracker.class); - final ConsumerConnector mockedConsumerConnector = mock(ConsumerConnector.class); - - when(mockedConsumerConnectionProvider.getConsumerConnector()).thenReturn(mockedConsumerConnector); - - final Map>> messageStreams = new HashMap>>(); - when((Map>>) - (Object) mockedConsumerConnector.createMessageStreams(topicsStreamMap)).thenReturn(messageStreams); - - final ConsumerConfiguration consumerConfiguration = new ConsumerConfiguration(mockedConsumerMetadata, - mockedConsumerConnectionProvider, mockedMessageLeftOverTracker); - - consumerConfiguration.createMessageStreamsForTopic(); - - verify(mockedConsumerMetadata, atLeast(1)).getTopicStreamMap(); - verify(mockedConsumerConnector, atLeast(1)).createMessageStreams(topicsStreamMap, null, null); - //verify(mockedConsumerConnector, atMost(0)).createMessageStreams(topicsStreamMap, null, null); - } - - @Test - @SuppressWarnings("unchecked") - public void testGetConsumerMapWithMessageStreamsWithDecoders() { - - @SuppressWarnings("unchecked") - final ConsumerMetadata mockedConsumerMetadata = mock(ConsumerMetadata.class); - - final Map topicsStreamMap = new HashMap(); - when(mockedConsumerMetadata.getTopicStreamMap()).thenReturn(topicsStreamMap); - - @SuppressWarnings("unchecked") - final Decoder mockedKeyDecoder = mock(Decoder.class); - - @SuppressWarnings("unchecked") - final Decoder mockedValueDecoder = mock(Decoder.class); - - when(mockedConsumerMetadata.getKeyDecoder()).thenReturn(mockedKeyDecoder); - when(mockedConsumerMetadata.getValueDecoder()).thenReturn(mockedValueDecoder); - - final ConsumerConnectionProvider mockedConsumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker mockedMessageLeftOverTracker = mock(MessageLeftOverTracker.class); - final ConsumerConnector mockedConsumerConnector = mock(ConsumerConnector.class); - - when(mockedConsumerConnectionProvider.getConsumerConnector()).thenReturn(mockedConsumerConnector); - - final Map>> messageStreams = new HashMap>>(); - when(mockedConsumerConnector.createMessageStreams(topicsStreamMap)).thenReturn(messageStreams); - - final ConsumerConfiguration consumerConfiguration = - new ConsumerConfiguration(mockedConsumerMetadata, mockedConsumerConnectionProvider, - mockedMessageLeftOverTracker); - - consumerConfiguration.createMessageStreamsForTopic(); - - verify(mockedConsumerMetadata, atLeast(1)).getTopicStreamMap(); - verify(mockedConsumerConnector, atMost(0)).createMessageStreams(topicsStreamMap); - verify(mockedConsumerConnector, atLeast(1)) - .createMessageStreams(topicsStreamMap, mockedKeyDecoder, mockedValueDecoder); - } - - - @Test - @SuppressWarnings("unchecked") - public void testReceiveMessageForTopicFilterFromSingleStream() { - final ConsumerMetadata consumerMetadata = mock(ConsumerMetadata.class); - final ConsumerConnectionProvider consumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker messageLeftOverTracker = mock(MessageLeftOverTracker.class); - final ConsumerConnector consumerConnector = mock(ConsumerConnector.class); - - when(consumerMetadata.getTopicFilterConfiguration()).thenReturn(new TopicFilterConfiguration(".*", 1, false)); - - when(consumerConnectionProvider.getConsumerConnector()).thenReturn(consumerConnector); - - final ConsumerConfiguration consumerConfiguration = - new ConsumerConfiguration(consumerMetadata, consumerConnectionProvider, - messageLeftOverTracker); - consumerConfiguration.setMaxMessages(1); - - final KafkaStream stream = mock(KafkaStream.class); - final List> streams = new ArrayList>(); - streams.add(stream); - - when(consumerConfiguration.createMessageStreamsForTopicFilter()).thenReturn(streams); - final ConsumerIterator iterator = mock(ConsumerIterator.class); - when(stream.iterator()).thenReturn(iterator); - final MessageAndMetadata messageAndMetadata = mock(MessageAndMetadata.class); - when(iterator.next()).thenReturn(messageAndMetadata); - when(messageAndMetadata.message()).thenReturn("got message"); - when(messageAndMetadata.topic()).thenReturn("topic"); - when(messageAndMetadata.partition()).thenReturn(1); - - final Map>> messages = consumerConfiguration.receive(); - Assert.assertEquals(1, messages.size()); - Assert.assertEquals(1, messages.get("topic").size()); - Assert.assertEquals("got message", messages.get("topic").get(1).get(0)); - - verify(stream, times(1)).iterator(); - verify(iterator, times(1)).next(); - verify(messageAndMetadata, times(1)).message(); - verify(messageAndMetadata, times(1)).topic(); - } - - @Test - @SuppressWarnings("unchecked") - public void testReceiveMessageForTopicFilterFromMultipleStreams() { - final ConsumerMetadata consumerMetadata = mock(ConsumerMetadata.class); - final ConsumerConnectionProvider consumerConnectionProvider = - mock(ConsumerConnectionProvider.class); - final MessageLeftOverTracker messageLeftOverTracker = mock(MessageLeftOverTracker.class); - - when(consumerMetadata.getTopicFilterConfiguration()).thenReturn(new TopicFilterConfiguration(".*", 1, false)); - - final ConsumerConnector consumerConnector = mock(ConsumerConnector.class); - - when(consumerConnectionProvider.getConsumerConnector()).thenReturn(consumerConnector); - - final ConsumerConfiguration consumerConfiguration = - new ConsumerConfiguration(consumerMetadata, consumerConnectionProvider, - messageLeftOverTracker); - consumerConfiguration.setMaxMessages(3); - - final KafkaStream stream1 = mock(KafkaStream.class); - final KafkaStream stream2 = mock(KafkaStream.class); - final KafkaStream stream3 = mock(KafkaStream.class); - final List> streams = new ArrayList>(); - streams.add(stream1); - streams.add(stream2); - streams.add(stream3); - - when(consumerConfiguration.createMessageStreamsForTopicFilter()).thenReturn(streams); - final ConsumerIterator iterator1 = mock(ConsumerIterator.class); - final ConsumerIterator iterator2 = mock(ConsumerIterator.class); - final ConsumerIterator iterator3 = mock(ConsumerIterator.class); - - when(stream1.iterator()).thenReturn(iterator1); - when(stream2.iterator()).thenReturn(iterator2); - when(stream3.iterator()).thenReturn(iterator3); - final MessageAndMetadata messageAndMetadata1 = mock(MessageAndMetadata.class); - final MessageAndMetadata messageAndMetadata2 = mock(MessageAndMetadata.class); - final MessageAndMetadata messageAndMetadata3 = mock(MessageAndMetadata.class); - - when(iterator1.next()).thenReturn(messageAndMetadata1); - when(iterator2.next()).thenReturn(messageAndMetadata2); - when(iterator3.next()).thenReturn(messageAndMetadata3); - - when(messageAndMetadata1.message()).thenReturn("got message".getBytes()); - when(messageAndMetadata1.topic()).thenReturn("topic"); - when(messageAndMetadata1.partition()).thenReturn(1); - - when(messageAndMetadata2.message()).thenReturn("got message".getBytes()); - when(messageAndMetadata2.topic()).thenReturn("topic"); - when(messageAndMetadata2.partition()).thenReturn(2); - - when(messageAndMetadata3.message()).thenReturn("got message".getBytes()); - when(messageAndMetadata3.topic()).thenReturn("topic"); - when(messageAndMetadata3.partition()).thenReturn(3); - - final Map>> messages = consumerConfiguration.receive(); - Assert.assertEquals(1, messages.size()); - int sum = 0; - - final Map> values = messages.get("topic"); - - for (final List l : values.values()) { - sum += l.size(); - } - - Assert.assertEquals(3, sum); - } - - private boolean valueFound(final List l, final String value){ - for (final Object o : l){ - if (value.equals(o)){ - return true; - } - } - - return false; - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/KafkaConsumerContextTest.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/KafkaConsumerContextTest.java deleted file mode 100644 index 709ab632..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/KafkaConsumerContextTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2002-2014 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.integration.kafka.support; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.Assert; -import org.junit.Test; -import org.mockito.Mockito; - -import org.springframework.beans.factory.ListableBeanFactory; -import org.springframework.messaging.Message; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class KafkaConsumerContextTest { - - @Test - @SuppressWarnings("unchecked") - public void testMergeResultsFromMultipleConsumerConfiguration() { - final KafkaConsumerContext kafkaConsumerContext = new KafkaConsumerContext(); - final ListableBeanFactory beanFactory = Mockito.mock(ListableBeanFactory.class); - final ConsumerConfiguration consumerConfiguration1 = Mockito.mock(ConsumerConfiguration.class); - final ConsumerConfiguration consumerConfiguration2 = Mockito.mock(ConsumerConfiguration.class); - - final Map> map = new HashMap>(); - map.put("config1", consumerConfiguration1); - map.put("config2", consumerConfiguration2); - - kafkaConsumerContext.setConsumerConfigurations(map); - - final Map>> result1 = new HashMap>>(); - final List l1 = new ArrayList(); - l1.add("got message1 - l1"); - l1.add("got message2 - l1"); - final Map> innerMap1 = new HashMap>(); - innerMap1.put(1, l1); - result1.put("topic1", innerMap1); - - final Map>> result2 = new HashMap>>(); - final List l2 = new ArrayList(); - l2.add("got message1 - l2"); - l2.add("got message2 - l2"); - l2.add("got message3 - l2"); - - final Map> innerMap2 = new HashMap>(); - innerMap2.put(1, l2); - result1.put("topic2", innerMap2); - - Mockito.when(consumerConfiguration1.receive()).thenReturn(result1); - Mockito.when(consumerConfiguration2.receive()).thenReturn(result2); - - final Message>>> messages = kafkaConsumerContext.receive(); - Assert.assertEquals(messages.getPayload().size(), 2); - Assert.assertEquals(messages.getPayload().get("topic1").size(), 1); - Assert.assertEquals(messages.getPayload().get("topic1").get(1).get(0), "got message1 - l1"); - Assert.assertEquals(messages.getPayload().get("topic1").get(1).get(1), "got message2 - l1"); - - Assert.assertEquals(messages.getPayload().get("topic2").size(), 1); - Assert.assertEquals(messages.getPayload().get("topic2").get(1).get(0), "got message1 - l2"); - Assert.assertEquals(messages.getPayload().get("topic2").get(1).get(1), "got message2 - l2"); - Assert.assertEquals(messages.getPayload().get("topic2").get(1).get(2), "got message3 - l2"); - } - -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/KafkaProducerContextTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/KafkaProducerContextTests.java deleted file mode 100644 index 5e8e3931..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/KafkaProducerContextTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import java.util.HashMap; -import java.util.Map; - -import org.junit.Assert; - -import kafka.javaapi.producer.Producer; - -import org.junit.Test; -import org.mockito.Mockito; -import org.springframework.beans.factory.ListableBeanFactory; - -/** - * @author Rajasekar Elango - * @since 0.5 - */ -public class KafkaProducerContextTests { - - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Test - public void testTopicRegexForProducerConfiguration(){ - - final KafkaProducerContext kafkaProducerContext = new KafkaProducerContext(); - - final ProducerMetadata producerMetadata = Mockito.mock(ProducerMetadata.class); - - String testRegex = "test.*"; - - Mockito.when(producerMetadata.getTopic()).thenReturn(testRegex); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration producerConfiguration = new ProducerConfiguration(producerMetadata, producer); - - final Map topicConfigurations = new HashMap(); - topicConfigurations.put(testRegex, producerConfiguration); - kafkaProducerContext.setProducerConfigurations(topicConfigurations); - - Assert.assertNotNull(kafkaProducerContext.getTopicConfiguration("test1")); - Assert.assertNotNull(kafkaProducerContext.getTopicConfiguration("test2")); - Assert.assertNotNull(kafkaProducerContext.getTopicConfiguration("testabc")); - Assert.assertNull(kafkaProducerContext.getTopicConfiguration("dontmatch_testRegex")); - - } - -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ProducerConfigurationTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ProducerConfigurationTests.java deleted file mode 100644 index cbdad2c7..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ProducerConfigurationTests.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import kafka.javaapi.producer.Producer; -import kafka.producer.KeyedMessage; -import kafka.serializer.DefaultEncoder; -import kafka.serializer.StringEncoder; -import org.junit.Assert; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder; -import org.springframework.integration.kafka.test.utils.NonSerializableTestKey; -import org.springframework.integration.kafka.test.utils.NonSerializableTestPayload; -import org.springframework.integration.kafka.test.utils.TestKey; -import org.springframework.integration.kafka.test.utils.TestPayload; -import org.springframework.integration.support.MessageBuilder; -import org.springframework.messaging.Message; - -import java.io.ByteArrayInputStream; -import java.io.NotSerializableException; -import java.io.ObjectInputStream; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ProducerConfigurationTests { - @Test - @SuppressWarnings("unchecked") - public void testSendMessageWithNonDefaultKeyAndValueEncoders() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setValueEncoder(new StringEncoder(null)); - producerMetadata.setKeyEncoder(new StringEncoder(null)); - producerMetadata.setKeyClassType(String.class); - producerMetadata.setValueClassType(String.class); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - - final Message message = MessageBuilder.withPayload("test message") - .setHeader("messageKey", "key") - .setHeader("topic", "test") - .build(); - - configuration.send(message); - - Mockito.verify(producer, Mockito.times(1)).send(Mockito.any(KeyedMessage.class)); - - final ArgumentCaptor> argument = - (ArgumentCaptor>) (Object) - ArgumentCaptor.forClass(KeyedMessage.class); - Mockito.verify(producer).send(argument.capture()); - - final KeyedMessage capturedKeyMessage = argument.getValue(); - - Assert.assertEquals(capturedKeyMessage.key(), "key"); - Assert.assertEquals(capturedKeyMessage.message(), "test message"); - Assert.assertEquals(capturedKeyMessage.topic(), "test"); - } - - /** - * User does not set an explicit key/value encoder, but send a serializable object for both key/value - */ - @Test - @SuppressWarnings("unchecked") - public void testSendMessageWithDefaultKeyAndValueEncodersAndCustomSerializableKeyAndPayloadObject() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setValueEncoder(new DefaultEncoder(null)); - producerMetadata.setKeyEncoder(new DefaultEncoder(null)); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - - final Message message = MessageBuilder.withPayload(new TestPayload("part1", "part2")) - .setHeader("messageKey", new TestKey("compositePart1", "compositePart2")) - .setHeader("topic", "test") - .build(); - - configuration.send(message); - - Mockito.verify(producer, Mockito.times(1)).send(Mockito.any(KeyedMessage.class)); - - final ArgumentCaptor> argument = - (ArgumentCaptor>) (Object) - ArgumentCaptor.forClass(KeyedMessage.class); - Mockito.verify(producer).send(argument.capture()); - - final KeyedMessage capturedKeyMessage = argument.getValue(); - - final byte[] keyBytes = capturedKeyMessage.key(); - - final ByteArrayInputStream keyInputStream = new ByteArrayInputStream (keyBytes); - final ObjectInputStream keyObjectInputStream = new ObjectInputStream (keyInputStream); - final Object keyObj = keyObjectInputStream.readObject(); - - final TestKey tk = (TestKey)keyObj; - - Assert.assertEquals(tk.getKeyPart1(), "compositePart1"); - Assert.assertEquals(tk.getKeyPart2(), "compositePart2"); - - final byte[] messageBytes = capturedKeyMessage.message(); - - final ByteArrayInputStream messageInputStream = new ByteArrayInputStream (messageBytes); - final ObjectInputStream messageObjectInputStream = new ObjectInputStream (messageInputStream); - final Object messageObj = messageObjectInputStream.readObject(); - - final TestPayload tp = (TestPayload)messageObj; - - Assert.assertEquals(tp.getPart1(), "part1"); - Assert.assertEquals(tp.getPart2(), "part2"); - - Assert.assertEquals(capturedKeyMessage.topic(), "test"); - } - - /** - * User does not set an explicit key encoder, but a value encoder, and sends the corresponding data - */ - @Test - @SuppressWarnings("unchecked") - public void testSendMessageWithDefaultKeyEncoderAndNonDefaultValueEncoderAndCorrespondingData() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - final AvroReflectDatumBackedKafkaEncoder encoder = new AvroReflectDatumBackedKafkaEncoder(TestPayload.class); - producerMetadata.setValueEncoder(encoder); - producerMetadata.setKeyEncoder(new DefaultEncoder(null)); - producerMetadata.setValueClassType(TestPayload.class); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - final TestPayload tp = new TestPayload("part1", "part2"); - final Message message = MessageBuilder.withPayload(tp) - .setHeader("messageKey", "key") - .setHeader("topic", "test") - .build(); - - configuration.send(message); - - Mockito.verify(producer, Mockito.times(1)).send(Mockito.any(KeyedMessage.class)); - - final ArgumentCaptor> argument = - (ArgumentCaptor>) (Object) - ArgumentCaptor.forClass(KeyedMessage.class); - Mockito.verify(producer).send(argument.capture()); - - final KeyedMessage capturedKeyMessage = argument.getValue(); - - final byte[] keyBytes = capturedKeyMessage.key(); - - final ByteArrayInputStream keyInputStream = new ByteArrayInputStream (keyBytes); - final ObjectInputStream keyObjectInputStream = new ObjectInputStream (keyInputStream); - final Object keyObj = keyObjectInputStream.readObject(); - - Assert.assertEquals("key", keyObj); - Assert.assertEquals(capturedKeyMessage.message(), tp); - - Assert.assertEquals(capturedKeyMessage.topic(), "test"); - } - - /** - * User does set an explicit key encoder, but not a value encoder, and sends the corresponding data - */ - @Test - @SuppressWarnings("unchecked") - public void testSendMessageWithNonDefaultKeyEncoderAndDefaultValueEncoderAndCorrespondingData() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - final AvroReflectDatumBackedKafkaEncoder encoder = new AvroReflectDatumBackedKafkaEncoder(TestKey.class); - producerMetadata.setKeyEncoder(encoder); - producerMetadata.setValueEncoder(new DefaultEncoder(null)); - producerMetadata.setKeyClassType(TestKey.class); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - final TestKey tk = new TestKey("part1", "part2"); - final Message message = MessageBuilder.withPayload("test message"). - setHeader("messageKey", tk) - .setHeader("topic", "test").build(); - - configuration.send(message); - - Mockito.verify(producer, Mockito.times(1)).send(Mockito.any(KeyedMessage.class)); - - final ArgumentCaptor> argument = - (ArgumentCaptor>) (Object) - ArgumentCaptor.forClass(KeyedMessage.class); - Mockito.verify(producer).send(argument.capture()); - - final KeyedMessage capturedKeyMessage = argument.getValue(); - - Assert.assertEquals(capturedKeyMessage.key(), tk); - - final byte[] payloadBytes = capturedKeyMessage.message(); - - final ByteArrayInputStream payloadBis = new ByteArrayInputStream (payloadBytes); - final ObjectInputStream payloadOis = new ObjectInputStream (payloadBis); - final Object payloadObj = payloadOis.readObject(); - - Assert.assertEquals("test message", payloadObj); - - Assert.assertEquals(capturedKeyMessage.topic(), "test"); - } - - /** - * User does not set an explicit key/value encoder, but send a serializable String key/value pair - */ - @Test - @SuppressWarnings("unchecked") - public void testSendMessageWithDefaultKeyAndValueEncodersAndStringKeyAndValue() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setValueEncoder(new DefaultEncoder(null)); - producerMetadata.setKeyEncoder(new DefaultEncoder(null)); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - - final Message message = MessageBuilder.withPayload("test message"). - setHeader("messageKey", "key") - .setHeader("topic", "test").build(); - - configuration.send(message); - - Mockito.verify(producer, Mockito.times(1)).send(Mockito.any(KeyedMessage.class)); - - final ArgumentCaptor> argument = - (ArgumentCaptor>) (Object) - ArgumentCaptor.forClass(KeyedMessage.class); - Mockito.verify(producer).send(argument.capture()); - - final KeyedMessage capturedKeyMessage = argument.getValue(); - final byte[] keyBytes = capturedKeyMessage.key(); - - final ByteArrayInputStream keyBis = new ByteArrayInputStream (keyBytes); - final ObjectInputStream keyOis = new ObjectInputStream (keyBis); - final Object keyObj = keyOis.readObject(); - - Assert.assertEquals("key", keyObj); - - final byte[] payloadBytes = capturedKeyMessage.message(); - - final ByteArrayInputStream payloadBis = new ByteArrayInputStream (payloadBytes); - final ObjectInputStream payloadOis = new ObjectInputStream (payloadBis); - final Object payloadObj = payloadOis.readObject(); - - Assert.assertEquals("test message", payloadObj); - Assert.assertEquals(capturedKeyMessage.topic(), "test"); - } - - /** - * User does not set an explicit key/value encoder, but send non-serializable object for both key/value - */ - @Test(expected = NotSerializableException.class) - @SuppressWarnings("unchecked") - public void testSendMessageWithDefaultKeyAndValueEncodersButNonSerializableKeyAndValue() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setValueEncoder(new DefaultEncoder(null)); - producerMetadata.setKeyEncoder(new DefaultEncoder(null)); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - - final Message message = MessageBuilder.withPayload(new NonSerializableTestPayload("part1", "part2")). - setHeader("messageKey", new NonSerializableTestKey("compositePart1", "compositePart2")) - .setHeader("topic", "test").build(); - configuration.send(message); - } - - /** - * User does not set an explicit key/value encoder, but send non-serializable key and serializable value - */ - @Test(expected = NotSerializableException.class) - @SuppressWarnings("unchecked") - public void testSendMessageWithDefaultKeyAndValueEncodersButNonSerializableKeyAndSerializableValue() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setValueEncoder(new DefaultEncoder(null)); - producerMetadata.setKeyEncoder(new DefaultEncoder(null)); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - - final Message message = MessageBuilder.withPayload(new TestPayload("part1", "part2")). - setHeader("messageKey", new NonSerializableTestKey("compositePart1", "compositePart2")) - .setHeader("topic", "test").build(); - configuration.send(message); - } - - /** - * User does not set an explicit key/value encoder, but send serializable key and non-serializable value - */ - @Test(expected = NotSerializableException.class) - @SuppressWarnings("unchecked") - public void testSendMessageWithDefaultKeyAndValueEncodersButSerializableKeyAndNonSerializableValue() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setValueEncoder(new DefaultEncoder(null)); - producerMetadata.setKeyEncoder(new DefaultEncoder(null)); - final Producer producer = Mockito.mock(Producer.class); - - final ProducerConfiguration configuration = new ProducerConfiguration(producerMetadata, producer); - - final Message message = MessageBuilder.withPayload(new NonSerializableTestPayload("part1", "part2")). - setHeader("messageKey", new TestKey("compositePart1", "compositePart2")) - .setHeader("topic", "test").build(); - configuration.send(message); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ProducerFactoryBeanTests.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ProducerFactoryBeanTests.java deleted file mode 100644 index e1dc411f..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/support/ProducerFactoryBeanTests.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.support; - -import org.junit.Assert; -import kafka.javaapi.producer.Producer; -import org.junit.Test; -import org.mockito.Mockito; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class ProducerFactoryBeanTests { - - @Test - public void createProducerWithDefaultMetadata() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - final ProducerMetadata tm = Mockito.spy(producerMetadata); - final ProducerFactoryBean producerFactoryBean = new ProducerFactoryBean(tm, "localhost:9092"); - final Producer producer = producerFactoryBean.getObject(); - - Assert.assertTrue(producer != null); - - Mockito.verify(tm, Mockito.times(1)).getPartitioner(); - Mockito.verify(tm, Mockito.times(1)).getCompressionCodec(); - Mockito.verify(tm, Mockito.times(1)).getValueEncoder(); - Mockito.verify(tm, Mockito.times(1)).getKeyEncoder(); - Mockito.verify(tm, Mockito.times(1)).isAsync(); - Mockito.verify(tm, Mockito.times(0)).getBatchNumMessages(); - } - - @Test - public void createProducerWithAsyncFeatures() throws Exception { - final ProducerMetadata producerMetadata = new ProducerMetadata("test"); - producerMetadata.setAsync(true); - producerMetadata.setBatchNumMessages("300"); - final ProducerMetadata tm = Mockito.spy(producerMetadata); - final ProducerFactoryBean producerFactoryBean = new ProducerFactoryBean(tm, "localhost:9092"); - final Producer producer = producerFactoryBean.getObject(); - - Assert.assertTrue(producer != null); - - Mockito.verify(tm, Mockito.times(1)).getPartitioner(); - Mockito.verify(tm, Mockito.times(1)).getCompressionCodec(); - Mockito.verify(tm, Mockito.times(1)).getValueEncoder(); - Mockito.verify(tm, Mockito.times(1)).getKeyEncoder(); - Mockito.verify(tm, Mockito.times(1)).isAsync(); - Mockito.verify(tm, Mockito.times(2)).getBatchNumMessages(); - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/NonSerializableTestKey.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/NonSerializableTestKey.java deleted file mode 100644 index fab58fdb..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/NonSerializableTestKey.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.test.utils; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class NonSerializableTestKey { - private final String keyPart1; - private final String keyPart2; - - public NonSerializableTestKey(final String keyPart1, final String keyPart2) { - this.keyPart1 = keyPart1; - this.keyPart2 = keyPart2; - } - - public String getKeyPart1() { - return keyPart1; - } - - public String getKeyPart2() { - return keyPart2; - } - -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/NonSerializableTestPayload.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/NonSerializableTestPayload.java deleted file mode 100644 index f18a10fe..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/NonSerializableTestPayload.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.test.utils; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class NonSerializableTestPayload { - private final String part1; - private final String part2; - - public NonSerializableTestPayload(final String part1, final String part2) { - this.part1 = part1; - this.part2 = part2; - } - - public String getPart1() { - return part1; - } - - public String getPart2() { - return part2; - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestKey.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestKey.java deleted file mode 100644 index ee4314ef..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestKey.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.test.utils; - -import java.io.Serializable; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class TestKey implements Serializable { - private static final long serialVersionUID = -6415387283545560656L; - - private final String keyPart1; - private final String keyPart2; - - public TestKey(final String keyPart1, final String keyPart2) { - this.keyPart1 = keyPart1; - this.keyPart2 = keyPart2; - } - - public String getKeyPart1() { - return keyPart1; - } - - public String getKeyPart2() { - return keyPart2; - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestObject.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestObject.java deleted file mode 100644 index 4cb5302b..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestObject.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.test.utils; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class TestObject { - public String testData1; - public int testData2; - - public String getTestData1() { - return testData1; - } - - public void setTestData1(final String testData1) { - this.testData1 = testData1; - } - - public int getTestData2() { - return testData2; - } - - public void setTestData2(final int testData2) { - this.testData2 = testData2; - } - -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestPayload.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestPayload.java deleted file mode 100644 index 73e9bf4e..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/TestPayload.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright 2002-2013 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.integration.kafka.test.utils; - -import java.io.Serializable; - -/** - * @author Soby Chacko - * @since 0.5 - */ -public class TestPayload implements Serializable { - private static final long serialVersionUID = -8560378224929007403L; - - private final String part1; - private final String part2; - - public TestPayload(final String part1, final String part2){ - this.part1 = part1; - this.part2 = part2; - } - - public String getPart1() { - return part1; - } - - public String getPart2() { - return part2; - } -} diff --git a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/User.java b/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/User.java deleted file mode 100644 index 8ceada80..00000000 --- a/spring-integration-kafka/src/test/java/org/springframework/integration/kafka/test/utils/User.java +++ /dev/null @@ -1,93 +0,0 @@ -package org.springframework.integration.kafka.test.utils; - -import org.apache.avro.specific.SpecificRecord; - -/** - * @author Soby Chacko - * @since 0.5 - *

- * This class is copied (partly) from an Avro generated class for necessary testing. - * Please use caution when modify. - */ -public class User extends org.apache.avro.specific.SpecificRecordBase implements SpecificRecord { - - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"User\",\"namespace\":\"org.springframework.integration.samples.kafka.user\",\"fields\":[{\"name\":\"firstName\",\"type\":\"string\"},{\"name\":\"lastName\",\"type\":\"string\"}]}"); - public java.lang.CharSequence firstName; - public java.lang.CharSequence lastName; - - /** - * Default constructor. - */ - public User() { - } - - /** - * All-args constructor. - */ - public User(java.lang.CharSequence firstName, java.lang.CharSequence lastName) { - this.firstName = firstName; - this.lastName = lastName; - } - - public org.apache.avro.Schema getSchema() { - return SCHEMA$; - } - - // Used by DatumWriter. Applications should not call. - public java.lang.Object get(int field$) { - switch (field$) { - case 0: - return firstName; - case 1: - return lastName; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } - } - - // Used by DatumReader. Applications should not call. - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: - firstName = (java.lang.CharSequence) value$; - break; - case 1: - lastName = (java.lang.CharSequence) value$; - break; - default: - throw new org.apache.avro.AvroRuntimeException("Bad index"); - } - } - - /** - * Gets the value of the 'firstName' field. - */ - public java.lang.CharSequence getFirstName() { - return firstName; - } - - /** - * Sets the value of the 'firstName' field. - * - * @param value the value to set. - */ - public void setFirstName(java.lang.CharSequence value) { - this.firstName = value; - } - - /** - * Gets the value of the 'lastName' field. - */ - public java.lang.CharSequence getLastName() { - return lastName; - } - - /** - * Sets the value of the 'lastName' field. - * - * @param value the value to set. - */ - public void setLastName(java.lang.CharSequence value) { - this.lastName = value; - } -} diff --git a/spring-integration-kafka/src/test/resources/log4j.properties b/spring-integration-kafka/src/test/resources/log4j.properties deleted file mode 100644 index f4284391..00000000 --- a/spring-integration-kafka/src/test/resources/log4j.properties +++ /dev/null @@ -1,8 +0,0 @@ -log4j.rootCategory=WARN, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{HH:mm:ss.SSS} %-5p [%t][%c] %m%n - -log4j.category.org.springframework.integration=WARN -log4j.category.org.springframework.integration.kafka=INFO