We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
I had searched in the issues and found no similar question.
I had googled my question but i didn't get any help.
I had read the documentation: ChunJun doc but it didn't help me.
sink为hive的时候,多并行度下writemode 设置为overwirte,只能写入部分数据 json配置: { "job": { "content": [ { "reader": { "name": "mysqlreader", "parameter": { "username": "*******", "password": "*********************", "splitPk": "id", "connection": [ { "jdbcUrl": [ "*******************************" ], "table": [ "s_flinkx_test" ] } ], "column": [ { "name": "id", "type": "int" }, { "name": "title", "type": "varchar" }, { "name": "gmt_modified", "type": "varchar" }, { "name": "price", "type": "double" }, { "name": "price_float", "type": "float" }, { "name": "price_decimal", "type": "DECIMAL(13,5)" }, { "name": "bool_test", "type": "BOOLEAN" }, { "name": "id_TINYINT", "type": "int" }, { "name": "id_SMALLINT", "type": "int" }, { "name": "id_BIGINT", "type": "int" }, { "name": "char_testT", "type": "char" }, { "name": "createtime", "type": "timestamp" }, { "name": "starts", "type": "varchar" } ], "polling": false }, "table": { "tableName": "sourceTable" } }, "writer": { "parameter": { "print": true, "writeMode": "overwrite", "partitionType": "DAY", "tablesColumn": "{\"s_flinkx_hive_test\":[{\"comment\":\"\",\"type\":\"int\",\"key\":\"id\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"title\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"gmt_modified\"},{\"comment\":\"\",\"type\":\"double\",\"key\":\"price\"},{\"comment\":\"\",\"type\":\"float\",\"key\":\"price_float\"},{\"comment\":\"\",\"type\":\"DECIMAL(13,5)\",\"key\":\"price_decimal\"},{\"comment\":\"\",\"type\":\"BOOLEAN\",\"key\":\"bool_test\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_TINYINT\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_SMALLINT\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_BIGINT\"},{\"comment\":\"\",\"type\":\"char(2)\",\"key\":\"char_testT\"},{\"comment\":\"\",\"type\":\"date\",\"key\":\"createtime\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"starts\"}]}", "partition": "pt", "hadoopConfig": { "dfs.ha.namenodes.bigdata": "nn1,nn2", "dfs.namenode.rpc-address.bigdata.nn2": "bigdata-0003:9000", "dfs.client.failover.proxy.provider.bigdata": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider", "dfs.namenode.rpc-address.bigdata.nn1": "bigdata-0004:9000", "dfs.nameservices": "bigdata", "hadoop.security.authorization": "true", "hadoop.security.authentication": "Kerberos", "dfs.namenode.kerberos.principal": "cs-bd/_HOST@HADOOP.COM", "dfs.namenode.keytab.file": "/etc/security/flinkx/hive/cs-bd.keytab", "java.security.krb5.conf": "/etc/security/flinkx/hive/krb5.conf", "principalFile": "/etc/security/flinkx/hive/cs-bd.keytab", "useLocalFile": true }, "jdbcUrl": "*************************************************************", "defaultFS": "hdfs://bigdata", "fileType": "text", "compress":"GZIP", "charsetName": "utf-8", "username": "*****" }, "name": "hivewriter" } } ], "setting": { "speed": { "channel": 3, "readerChannel": 3, "writerChannel": 2, "bytes": 0 } } } } 执行UI: 查询结果只有一个task的数据:
{ "job": { "content": [ { "reader": { "name": "mysqlreader", "parameter": { "username": "*******", "password": "*********************", "splitPk": "id", "connection": [ { "jdbcUrl": [ "*******************************" ], "table": [ "s_flinkx_test" ] } ], "column": [ { "name": "id", "type": "int" }, { "name": "title", "type": "varchar" }, { "name": "gmt_modified", "type": "varchar" }, { "name": "price", "type": "double" }, { "name": "price_float", "type": "float" }, { "name": "price_decimal", "type": "DECIMAL(13,5)" }, { "name": "bool_test", "type": "BOOLEAN" }, { "name": "id_TINYINT", "type": "int" }, { "name": "id_SMALLINT", "type": "int" }, { "name": "id_BIGINT", "type": "int" }, { "name": "char_testT", "type": "char" }, { "name": "createtime", "type": "timestamp" }, { "name": "starts", "type": "varchar" } ], "polling": false }, "table": { "tableName": "sourceTable" } }, "writer": { "parameter": { "print": true, "writeMode": "overwrite", "partitionType": "DAY", "tablesColumn": "{\"s_flinkx_hive_test\":[{\"comment\":\"\",\"type\":\"int\",\"key\":\"id\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"title\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"gmt_modified\"},{\"comment\":\"\",\"type\":\"double\",\"key\":\"price\"},{\"comment\":\"\",\"type\":\"float\",\"key\":\"price_float\"},{\"comment\":\"\",\"type\":\"DECIMAL(13,5)\",\"key\":\"price_decimal\"},{\"comment\":\"\",\"type\":\"BOOLEAN\",\"key\":\"bool_test\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_TINYINT\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_SMALLINT\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_BIGINT\"},{\"comment\":\"\",\"type\":\"char(2)\",\"key\":\"char_testT\"},{\"comment\":\"\",\"type\":\"date\",\"key\":\"createtime\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"starts\"}]}", "partition": "pt", "hadoopConfig": { "dfs.ha.namenodes.bigdata": "nn1,nn2", "dfs.namenode.rpc-address.bigdata.nn2": "bigdata-0003:9000", "dfs.client.failover.proxy.provider.bigdata": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider", "dfs.namenode.rpc-address.bigdata.nn1": "bigdata-0004:9000", "dfs.nameservices": "bigdata", "hadoop.security.authorization": "true", "hadoop.security.authentication": "Kerberos", "dfs.namenode.kerberos.principal": "cs-bd/_HOST@HADOOP.COM", "dfs.namenode.keytab.file": "/etc/security/flinkx/hive/cs-bd.keytab", "java.security.krb5.conf": "/etc/security/flinkx/hive/krb5.conf", "principalFile": "/etc/security/flinkx/hive/cs-bd.keytab", "useLocalFile": true }, "jdbcUrl": "*************************************************************", "defaultFS": "hdfs://bigdata", "fileType": "text", "compress":"GZIP", "charsetName": "utf-8", "username": "*****" }, "name": "hivewriter" } } ], "setting": { "speed": { "channel": 3, "readerChannel": 3, "writerChannel": 2, "bytes": 0 } } } }
The text was updated successfully, but these errors were encountered:
[hotfix-DTStack#1052][hive] Replace the call order of HDFSOutputForma…
2269e84
…t Close and finalizeGlobal
[hotfix-#1052][hive] Replace the call order of HDFSOutputFormat Close…
54b01f1
… and finalizeGlobal
57b698c
9d7e13a
d4c3b7c
c674515
Successfully merging a pull request may close this issue.
Search before asking
I had searched in the issues and found no similar question.
I had googled my question but i didn't get any help.
I had read the documentation: ChunJun doc but it didn't help me.
Description
sink为hive的时候,多并行度下writemode 设置为overwirte,只能写入部分数据
json配置:
{ "job": { "content": [ { "reader": { "name": "mysqlreader", "parameter": { "username": "*******", "password": "*********************", "splitPk": "id", "connection": [ { "jdbcUrl": [ "*******************************" ], "table": [ "s_flinkx_test" ] } ], "column": [ { "name": "id", "type": "int" }, { "name": "title", "type": "varchar" }, { "name": "gmt_modified", "type": "varchar" }, { "name": "price", "type": "double" }, { "name": "price_float", "type": "float" }, { "name": "price_decimal", "type": "DECIMAL(13,5)" }, { "name": "bool_test", "type": "BOOLEAN" }, { "name": "id_TINYINT", "type": "int" }, { "name": "id_SMALLINT", "type": "int" }, { "name": "id_BIGINT", "type": "int" }, { "name": "char_testT", "type": "char" }, { "name": "createtime", "type": "timestamp" }, { "name": "starts", "type": "varchar" } ], "polling": false }, "table": { "tableName": "sourceTable" } }, "writer": { "parameter": { "print": true, "writeMode": "overwrite", "partitionType": "DAY", "tablesColumn": "{\"s_flinkx_hive_test\":[{\"comment\":\"\",\"type\":\"int\",\"key\":\"id\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"title\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"gmt_modified\"},{\"comment\":\"\",\"type\":\"double\",\"key\":\"price\"},{\"comment\":\"\",\"type\":\"float\",\"key\":\"price_float\"},{\"comment\":\"\",\"type\":\"DECIMAL(13,5)\",\"key\":\"price_decimal\"},{\"comment\":\"\",\"type\":\"BOOLEAN\",\"key\":\"bool_test\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_TINYINT\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_SMALLINT\"},{\"comment\":\"\",\"type\":\"int\",\"key\":\"id_BIGINT\"},{\"comment\":\"\",\"type\":\"char(2)\",\"key\":\"char_testT\"},{\"comment\":\"\",\"type\":\"date\",\"key\":\"createtime\"},{\"comment\":\"\",\"type\":\"string\",\"key\":\"starts\"}]}", "partition": "pt", "hadoopConfig": { "dfs.ha.namenodes.bigdata": "nn1,nn2", "dfs.namenode.rpc-address.bigdata.nn2": "bigdata-0003:9000", "dfs.client.failover.proxy.provider.bigdata": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider", "dfs.namenode.rpc-address.bigdata.nn1": "bigdata-0004:9000", "dfs.nameservices": "bigdata", "hadoop.security.authorization": "true", "hadoop.security.authentication": "Kerberos", "dfs.namenode.kerberos.principal": "cs-bd/_HOST@HADOOP.COM", "dfs.namenode.keytab.file": "/etc/security/flinkx/hive/cs-bd.keytab", "java.security.krb5.conf": "/etc/security/flinkx/hive/krb5.conf", "principalFile": "/etc/security/flinkx/hive/cs-bd.keytab", "useLocalFile": true }, "jdbcUrl": "*************************************************************", "defaultFS": "hdfs://bigdata", "fileType": "text", "compress":"GZIP", "charsetName": "utf-8", "username": "*****" }, "name": "hivewriter" } } ], "setting": { "speed": { "channel": 3, "readerChannel": 3, "writerChannel": 2, "bytes": 0 } } } }
执行UI:
查询结果只有一个task的数据:
Code of Conduct
The text was updated successfully, but these errors were encountered: