fields() {
+ return null;
+ }
+
+ @Override
+ public Field field(final String s) {
+ return null;
+ }
+
+ @Override
+ public Schema schema() {
+ return null;
+ }
+}
+
diff --git a/src/main/java/com/databend/kafka/connect/sink/records/DatabendRecordContent.java b/src/main/java/com/databend/kafka/connect/sink/records/DatabendRecordContent.java
new file mode 100644
index 0000000..68e5bf6
--- /dev/null
+++ b/src/main/java/com/databend/kafka/connect/sink/records/DatabendRecordContent.java
@@ -0,0 +1,149 @@
+package com.databend.kafka.connect.sink.records;
+
+import com.databend.jdbc.com.fasterxml.jackson.databind.JsonNode;
+import com.databend.jdbc.com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.kafka.connect.data.Schema;
+
+public class DatabendRecordContent {
+
+ private static ObjectMapper MAPPER = new ObjectMapper();
+ public static int NON_AVRO_SCHEMA = -1;
+ private final JsonNode[] content;
+ private final byte[] brokenData;
+ private int schemaID;
+ private boolean isBroken;
+
+ // We have to introduce this field so as to distinguish a null value record from a record whose
+ // actual contents are an empty json node.
+ // This is only set inside a constructor which is called when a byte value found in the record is
+ // null.
+ private boolean isNullValueRecord;
+
+ /**
+ * Constructor for null value.
+ *
+ * If we change this logic in future, we need to carefully modify how we handle tombstone
+ * records.
+ *
+ *
@see SnowflakeSinkServiceV1#shouldSkipNullValue(SinkRecord)
+ */
+ public DatabendRecordContent() {
+ content = new JsonNode[1];
+ content[0] = MAPPER.createObjectNode();
+ brokenData = null;
+ isNullValueRecord = true;
+ }
+
+// /**
+// * constructor for native json converter
+// *
+// * @param schema schema of the object
+// * @param data object produced by native avro/json converters
+// * @param isStreaming indicates whether this is part of snowpipe streaming
+// */
+// public DatabendRecordContent(Schema schema, Object data, boolean isStreaming) {
+// this.content = new JsonNode[1];
+// this.schemaID = NON_AVRO_SCHEMA;
+// this.content[0] = RecordService.convertToJson(schema, data, isStreaming);
+// this.isBroken = false;
+// this.brokenData = null;
+// }
+
+ /**
+ * constructor for json converter
+ *
+ * @param data json node
+ */
+ public DatabendRecordContent(JsonNode data) {
+ this.content = new JsonNode[1];
+ this.content[0] = data;
+ this.isBroken = false;
+ this.schemaID = NON_AVRO_SCHEMA;
+ this.brokenData = null;
+ }
+
+ /**
+ * constructor for avro converter without schema registry
+ *
+ * @param data json node array
+ */
+ DatabendRecordContent(JsonNode[] data) {
+ this.content = data;
+ this.isBroken = false;
+ this.schemaID = NON_AVRO_SCHEMA;
+ this.brokenData = null;
+ }
+
+ /**
+ * constructor for broken record
+ *
+ * @param data broken record
+ */
+ public DatabendRecordContent(byte[] data) {
+ this.brokenData = data;
+ this.isBroken = true;
+ this.schemaID = NON_AVRO_SCHEMA;
+ this.content = null;
+ }
+
+ /**
+ * constructor for avro converter
+ *
+ * @param data json node
+ * @param schemaID schema id
+ */
+ DatabendRecordContent(JsonNode data, int schemaID) {
+ this(data);
+ this.schemaID = schemaID;
+ }
+
+ /**
+ * @return true is record is broken
+ */
+ public boolean isBroken() {
+ return this.isBroken;
+ }
+
+ /**
+ * @return bytes array represents broken data
+ */
+ public byte[] getBrokenData() {
+ if (!isBroken) {
+ throw new IllegalStateException("Record is not broken");
+ }
+ assert this.brokenData != null;
+ return this.brokenData.clone();
+ }
+
+ /**
+ * @return schema id, -1 if not available
+ */
+ int getSchemaID() {
+ return schemaID;
+ }
+
+ public JsonNode[] getData() {
+ if (isBroken) {
+ System.out.println("ERROR_5011");
+ }
+ assert content != null;
+ return content.clone();
+ }
+
+ /**
+ * Check if primary reason for this record content's value to be an empty json String, a null
+ * value?
+ *
+ *
i.e if value passed in by record is empty json node (`{}`), we don't interpret this as null
+ * value.
+ *
+ * @return true if content value is empty json node as well as isNullValueRecord is set to true.
+ */
+ public boolean isRecordContentValueNull() {
+ if (content != null && content[0].isEmpty() && isNullValueRecord) {
+ return true;
+ }
+ return false;
+ }
+}
+
diff --git a/target/checkstyle-cachefile b/target/checkstyle-cachefile
deleted file mode 100644
index a94c452..0000000
--- a/target/checkstyle-cachefile
+++ /dev/null
@@ -1,4 +0,0 @@
-#Mon Aug 14 14:38:12 CST 2023
-configuration*?=15CB8BB226DF1A8F9AAD0E6A6C0B87E4D56E96A5
-module-resource*?\:checkstyle-xpath-suppressions.xml=CC80A51A485F793BBEE490A9FEC16EA43A8A7FFE
-module-resource*?\:checkstyle-suppressions.xml=D02C429C31A78E743A32C7278B64010BF2033C76
diff --git a/target/checkstyle-checker.xml b/target/checkstyle-checker.xml
deleted file mode 100644
index eed537b..0000000
--- a/target/checkstyle-checker.xml
+++ /dev/null
@@ -1,198 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/target/checkstyle-result.xml b/target/checkstyle-result.xml
deleted file mode 100644
index b8ea60d..0000000
--- a/target/checkstyle-result.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/target/checkstyle-suppressions.xml b/target/checkstyle-suppressions.xml
deleted file mode 100644
index 18eec8e..0000000
--- a/target/checkstyle-suppressions.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/target/components/component-package.xml b/target/components/component-package.xml
index 77d1e69..0f3aa3f 100644
--- a/target/components/component-package.xml
+++ b/target/components/component-package.xml
@@ -1,19 +1,12 @@
-
component
-
-
dir
-
zip
-
-
true
-
/Users/hanshanjie/git-works/databend-kafka-connect
@@ -55,7 +48,6 @@
*
-
@@ -73,5 +65,4 @@
-
diff --git a/target/components/packages/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/manifest.json b/target/components/packages/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/manifest.json
index 3feb088..6a48c1f 100644
--- a/target/components/packages/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/manifest.json
+++ b/target/components/packages/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/databendCloud-databend-kafka-connect-0.0.1-SNAPSHOT/manifest.json
@@ -31,5 +31,5 @@
"url" : "https://www.apache.org/licenses/LICENSE-2.0"
} ],
"component_types" : [ "sink" ],
- "release_date" : "2023-09-07"
+ "release_date" : "2024-07-13"
}
\ No newline at end of file
diff --git a/target/manifest.json b/target/manifest.json
index 3feb088..6a48c1f 100644
--- a/target/manifest.json
+++ b/target/manifest.json
@@ -31,5 +31,5 @@
"url" : "https://www.apache.org/licenses/LICENSE-2.0"
} ],
"component_types" : [ "sink" ],
- "release_date" : "2023-09-07"
+ "release_date" : "2024-07-13"
}
\ No newline at end of file
diff --git a/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst
index 65b0855..b26aec9 100644
--- a/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst
+++ b/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst
@@ -1 +1,61 @@
+com/databend/kafka/connect/util/StringUtils.class
+com/databend/kafka/connect/databendclient/DatabendTypes.class
+com/databend/kafka/connect/sink/PreparedStatementBinder$1.class
+com/databend/kafka/connect/databendclient/DatabendConnection.class
+com/databend/kafka/connect/util/DateTimeUtils.class
+com/databend/kafka/connect/sink/DatabendSinkTask.class
+com/databend/kafka/connect/databendclient/TableIdentity.class
+com/databend/kafka/connect/util/BytesUtil.class
+com/databend/kafka/connect/sink/records/DatabendJsonSchema.class
+com/databend/kafka/connect/util/TimeZoneValidator.class
+com/databend/kafka/connect/sink/records/DatabendConverter.class
+com/databend/kafka/connect/sink/DatabendSinkConfig$EnumValidator.class
+com/databend/kafka/connect/databendclient/ColumnDefinition$Mutability.class
+com/databend/kafka/connect/sink/DatabendClient.class
+com/databend/kafka/connect/sink/PreparedStatementBinder.class
+com/databend/kafka/connect/sink/metadata/FieldsMetadata$1.class
+com/databend/kafka/connect/util/DeleteEnabledRecommender.class
+com/databend/kafka/connect/util/StringUtils$1.class
+com/databend/kafka/connect/sink/DatabendWriter$1.class
+com/databend/kafka/connect/sink/metadata/SinkRecordField.class
+com/databend/kafka/connect/sink/TimestampIncrementingCriteria$CriteriaValues.class
+com/databend/kafka/connect/databendclient/ConnectionProvider.class
+com/databend/kafka/connect/sink/records/AvroConverterConfig.class
+com/databend/kafka/connect/databendclient/ColumnDefinition$Nullability.class
+com/databend/kafka/connect/databendclient/DatabendConnection$ColumnConverter.class
+com/databend/kafka/connect/databendclient/SQLExpressionBuilder$BasicListBuilder.class
+com/databend/kafka/connect/sink/DatabendWriter.class
+com/databend/kafka/connect/databendclient/ColumnMapping.class
+com/databend/kafka/connect/databendclient/DropOptions.class
+com/databend/kafka/connect/databendclient/SQLExpressionBuilder$1.class
+com/databend/kafka/connect/sink/DatabendSinkConfig$InsertMode.class
+com/databend/kafka/connect/sink/records/DatabendAvroConverter.class
+com/databend/kafka/connect/DatabendSinkConnector.class
+com/databend/kafka/connect/util/QuoteWay.class
+com/databend/kafka/connect/sink/RecordValidator$1.class
+com/databend/kafka/connect/databendclient/SQLExpressionBuilder$ListBuilder.class
+com/databend/kafka/connect/sink/BufferedRecords$1.class
+com/databend/kafka/connect/databendclient/TableDefinitions.class
+com/databend/kafka/connect/databendclient/TableType.class
+com/databend/kafka/connect/sink/BufferedRecords.class
+com/databend/kafka/connect/databendclient/SQLExpressionBuilder$Transform.class
+com/databend/kafka/connect/util/IdentifierRules.class
+com/databend/kafka/connect/sink/metadata/FieldsMetadata.class
+com/databend/kafka/connect/sink/DatabendSinkConfig$PrimaryKeyMode.class
+com/databend/kafka/connect/sink/RecordValidator.class
+com/databend/kafka/connect/databendclient/DatabendConnection$StatementBinder.class
+com/databend/kafka/connect/sink/TableAlterOrCreateException.class
+com/databend/kafka/connect/sink/metadata/SchemaPair.class
+com/databend/kafka/connect/util/Version.class
+com/databend/kafka/connect/sink/DbStructure.class
com/databend/kafka/connect/sink/DatabendSinkConfig.class
+com/databend/kafka/connect/databendclient/ColumnIdentity.class
+com/databend/kafka/connect/sink/DbStructure$1.class
+com/databend/kafka/connect/databendclient/ColumnDefinition.class
+com/databend/kafka/connect/databendclient/CachedConnectionProvider.class
+com/databend/kafka/connect/databendclient/SQLExpressionBuilder.class
+com/databend/kafka/connect/databendclient/SQLExpressionBuilder$Expressable.class
+com/databend/kafka/connect/databendclient/TableDefinition.class
+com/databend/kafka/connect/sink/DatabendClient$1.class
+com/databend/kafka/connect/sink/TimestampIncrementingCriteria.class
+com/databend/kafka/connect/sink/records/DatabendRecordContent.class
diff --git a/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst
index 9b2dbb5..f5d7eff 100644
--- a/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst
+++ b/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst
@@ -8,6 +8,8 @@
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/util/TimeZoneValidator.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/DatabendWriter.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/databendclient/ConnectionProvider.java
+/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/records/DatabendJsonSchema.java
+/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/records/DatabendRecordContent.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/util/QuoteWay.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/util/StringUtils.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/BufferedRecords.java
@@ -15,13 +17,16 @@
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/metadata/FieldsMetadata.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/DatabendSinkConnector.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/databendclient/DatabendConnection.java
+/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/records/DatabendAvroConverter.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/databendclient/TableIdentity.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/DatabendSinkTask.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/DatabendClient.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/util/Version.java
+/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/records/DatabendConverter.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/PreparedStatementBinder.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/TableAlterOrCreateException.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/databendclient/ColumnDefinition.java
+/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/records/AvroConverterConfig.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/util/BytesUtil.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/databendclient/TableType.java
/Users/hanshanjie/git-works/databend-kafka-connect/src/main/java/com/databend/kafka/connect/sink/RecordValidator.java