Skip to content

Commit

Permalink
0003609: Kafka support as a load only node
Browse files Browse the repository at this point in the history
  • Loading branch information
jumpmind-josh committed Jun 21, 2018
1 parent abb0abb commit 4a10d0a
Show file tree
Hide file tree
Showing 7 changed files with 149 additions and 8 deletions.
4 changes: 3 additions & 1 deletion symmetric-assemble/common.gradle
Expand Up @@ -234,7 +234,9 @@ subprojects { subproject ->
provided "org.apache.geronimo.specs:geronimo-j2ee-connector_1.6_spec:1.0"
provided "com.datastax.cassandra:cassandra-driver-core:3.1.4"
provided "nl.cad:tps-parse:1.0.15-SNAPSHOT"

provided "org.apache.kafka:kafka-clients:1.1.0"
provided "org.apache.avro:avro:1.8.2"

testCompile fileTree(dir: System.getProperty("user.home") + '/.symmetricds/lib', include: '*.jar')
testCompile "junit:junit:$junitVersion"
testCompile "org.hamcrest:hamcrest-all:$hamcrestVersion"
Expand Down
1 change: 1 addition & 0 deletions symmetric-assemble/src/asciidoc/appendix/databases.ad
Expand Up @@ -496,6 +496,7 @@ include::h2.ad[]
include::hsqldb.ad[]
include::informix.ad[]
include::interbase.ad[]
include::kafka.ad[]
include::mariadb.ad[]
include::mysql.ad[]
include::mongodb.ad[]
Expand Down
130 changes: 130 additions & 0 deletions symmetric-assemble/src/asciidoc/appendix/kafka.ad
@@ -0,0 +1,130 @@

=== Kafka

Send changes from your relational database to Kafka in a variety of formats. A Kafka node can be setup as a <<Load Only Node>> to receive changes from another node that is capturing changes.

ifdef::pro[]
Setup the Kafka node by using the <<Add Node>> wizard and selecting Kafka as the type. The URL will be the connection point to Kafka. User and password are not needed (or used).

image::appendix/kafka-node-setup.png[]

After hitting next you can setup advanced options for your Kafka node.

image::appendix/kafka-advanced-settings.png[]
endif::pro[]

==== Output Message Format


.Set the output message format with the following property
----
kafka.format
----


[horizontal]
JSON:: Json formatted output message
[source, json]
----
{
"table name": {
"eventType": "INSERT|UPDATE|DELETE",
"data": {
"column name": "value",....
}
}
}
----
XML:: Xml formatted output message
[source, xml]
----
<row entity="table name" dml="INSERT|UPDATE|DELETE">
<data key="column name">value</data>
...
</row>
----
AVRO:: Apache Avro output message (Avro Schema Below)
[source, avro]
----
{
"type": "record",
"name": "cdc",
"fields": [
{
"name": "table",
"type": "string"
},
{
"name": "eventType",
"type": "string"
},
{
"name": "data",
"type": {
"type": "array",
"items": {
"name": "column",
"type": "record",
"fields": [
{
"name": "name",
"type": "string"
},
{
"name": "value",
"type": [
"null",
"string"
]
}
]
}
}
}
]
}
----
CSV:: CSV formatted output message
[source, csv]
----
TABLE,table name,EVENT,INSERT|UPDATE|DELETE,column name,value, ...
----

==== Setting the Topic


.Set the topic using the following property
----
kafka.topic.by
----


[horizontal]
CHANNEL:: This will send to a topic based on the channel of the batch that is being sent.
TABLE:: This will send to a topic based on the table name of the change.

==== Setting Messages By


.Set following property to determine how messages will be sent.
----
kafka.message.by
----


[horizontal]
BATCH:: This will send one message for each batch containing all changes.
ROW:: This will send one messsage for each change captured.

==== Setting The Producer


.Set following property to specify the producer of the messages.
----
kafka.producer
----


ifdef::pro[]
Provide a value for the producer of the message.
endif::pro[]
1 change: 0 additions & 1 deletion symmetric-assemble/src/asciidoc/examples.ad
Expand Up @@ -3,6 +3,5 @@

This chapter focuses on using examples for a variety of use cases with SymmetricDS.

include::examples/kafka.ad[]
include::examples/csv2db.ad[]

Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Expand Up @@ -18,7 +18,6 @@
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.jumpmind.db.model.Table;
import org.jumpmind.db.util.BasicDataSourcePropertyConstants;
import org.jumpmind.symmetric.common.ParameterConstants;
import org.jumpmind.symmetric.io.data.CsvData;
import org.jumpmind.symmetric.io.data.DataContext;
Expand Down Expand Up @@ -69,7 +68,7 @@ public class KafkaWriterFilter implements IDatabaseWriterFilter {
+ " \"type\":\"record\","
+ " \"fields\":["
+ " {\"name\":\"name\", \"type\":\"string\"},"
+ " {\"name\":\"value\", \"type\":\"string\"} ] }}}]}";
+ " {\"name\":\"value\", \"type\":[\"null\", \"string\"]} ] }}}]}";



Expand All @@ -79,11 +78,11 @@ public class KafkaWriterFilter implements IDatabaseWriterFilter {
public KafkaWriterFilter(IParameterService parameterService) {
log.info(AVRO_CDC_SCHEMA);
schema = parser.parse(AVRO_CDC_SCHEMA);
this.url = parameterService.getString(ParameterConstants.LOAD_ONLY_PROPERTY_PREFIX + BasicDataSourcePropertyConstants.DB_POOL_URL);
this.url = parameterService.getString(ParameterConstants.LOAD_ONLY_PROPERTY_PREFIX + "db.url");
if (url == null) {
throw new RuntimeException(
"Kakfa not configured properly, verify you have set the endpoint to kafka with the following property : "
+ ParameterConstants.LOAD_ONLY_PROPERTY_PREFIX + BasicDataSourcePropertyConstants.DB_POOL_URL);
+ ParameterConstants.LOAD_ONLY_PROPERTY_PREFIX + "db.url");
}

this.producer = parameterService.getString(ParameterConstants.KAFKA_PRODUCER, "SymmetricDS");
Expand Down Expand Up @@ -121,9 +120,19 @@ public boolean beforeWrite(DataContext context, Table table, CsvData data) {
kafkaText.append("{\"").append(table.getName()).append("\": {")
.append("\"eventType\": \"" + data.getDataEventType() + "\",").append("\"data\": { ");
for (int i = 0; i < table.getColumnNames().length; i++) {
kafkaText.append("\"" + table.getColumnNames()[i] + "\": \"" + rowData[i]);
kafkaText.append("\"")
.append(table.getColumnNames()[i])
.append("\": ");

if (rowData[i] != null) {
kafkaText.append("\"");
}
kafkaText.append(rowData[i]);
if (rowData[i] != null) {
kafkaText.append("\"");
}
if (i + 1 < table.getColumnNames().length) {
kafkaText.append("\",");
kafkaText.append(",");
}
}
kafkaText.append(" } } }");
Expand Down

0 comments on commit 4a10d0a

Please sign in to comment.