-
Notifications
You must be signed in to change notification settings - Fork 608
Description
When I use v0.3.2 ,
val array = pst.getConnection
.createArrayOf(ClickHouseDataType.String.name(),
ClickHouseArrayValue.of(intoMsg.withArrayJsonNode.elements().asScala.toArray).asArray())
pst.setArray(20, array) //告警
pst.setInt(21, intoMsg.get("isInterpolate").asInt) //是否插补(0正常||1插补)
default ClickHouseArray createArrayOf(String typeName, Object[] elements) throws SQLException {
ClickHouseConfig config = this.getConfig();
ClickHouseColumn column = ClickHouseColumn.of("", ClickHouseDataType.Array, false, new ClickHouseColumn[]{ClickHouseColumn.of("", typeName)});
ClickHouseValue v = ClickHouseValues.newValue(config, column).update(elements);
ClickHouseResultSet rs = new ClickHouseResultSet("", "", this.createStatement(), ClickHouseSimpleResponse.of(config, Collections.singletonList(column), new Object[][]{{v.asObject()}}));
rs.next();
return new ClickHouseArray(rs, 1);
}
default ClickHouseValue update(Object[] value) {
if (value != null && value.length != 0) {
if (value.length != 1) {
throw new IllegalArgumentException("Only singleton array is allowed, but we got: " + Arrays.toString(value));
} else {
return this.update(value[0]);
}
} else {
return this.resetToNullOrEmpty();
}
}
this update must size=0?
Caused by: java.lang.RuntimeException: Writing records to JDBC failed.
at org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat.checkFlushException(JdbcBatchingOutputFormat.java:153)
at org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat.flush(JdbcBatchingOutputFormat.java:179)
at org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat.close(JdbcBatchingOutputFormat.java:229)
... 11 more
Caused by: java.lang.IllegalArgumentException: Only singleton array is allowed, but we got: ["E5", "E6"]
at com.clickhouse.client.ClickHouseValue.update(ClickHouseValue.java:1148)
at com.clickhouse.jdbc.ClickHouseConnection.createArrayOf(ClickHouseConnection.java:40)
at com.clickhouse.jdbc.ClickHouseConnection.createArrayOf(ClickHouseConnection.java:23)
at com.anso.process.function.JdbcCkStatementBuilder.accept(JdbcCkStatementBuilder.scala:55)
at com.anso.process.function.JdbcCkStatementBuilder.accept(JdbcCkStatementBuilder.scala:21)
at org.apache.flink.connector.jdbc.internal.executor.SimpleBatchStatementExecutor.executeBatch(SimpleBatchStatementExecutor.java:70)
at org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat.attemptFlush(JdbcBatchingOutputFormat.java:213)
at org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat.flush(JdbcBatchingOutputFormat.java:183)
at org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat.lambda$open$0(JdbcBatchingOutputFormat.java:127)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)