From 822363f56cf9e79a6f7673b86b5afb3147a98bbc Mon Sep 17 00:00:00 2001 From: manishgupta88 Date: Wed, 10 May 2017 19:16:31 +0530 Subject: [PATCH] Problem: When bad records action is set to fail and any IUD operation is executed and it fails due to bad records error message is not displayed correctly because of which user is not clear with the cause of update operation failure. Whereas in the same case in other operations like data load and insert into, if there is any failure due to bad record proper error message is displayed to the user for failure due to bad record. Fix: Instead of forming own message get the executor failure message and send it to update operation as exception message. --- .../src/test/resources/IUD/bad_record.csv | 2 ++ .../spark/rdd/CarbonDataRDDFactory.scala | 6 +++++- .../iud/UpdateCarbonTableTestCase.scala | 18 ++++++++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 integration/spark-common-test/src/test/resources/IUD/bad_record.csv diff --git a/integration/spark-common-test/src/test/resources/IUD/bad_record.csv b/integration/spark-common-test/src/test/resources/IUD/bad_record.csv new file mode 100644 index 00000000000..358cbf69b58 --- /dev/null +++ b/integration/spark-common-test/src/test/resources/IUD/bad_record.csv @@ -0,0 +1,2 @@ +item,name +2,Apple \ No newline at end of file diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala index 4cca0a3975b..51a56e92271 100644 --- a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala +++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala @@ -826,7 +826,11 @@ object CarbonDataRDDFactory { // updateModel.get.executorErrors.errorMsg = errorMessage if (updateModel.get.executorErrors.failureCauses == FailureCauses.NONE) { updateModel.get.executorErrors.failureCauses = FailureCauses.EXECUTOR_FAILURE - updateModel.get.executorErrors.errorMsg = "Update failed as the data load has failed." + if (null != executorMessage && !executorMessage.isEmpty) { + updateModel.get.executorErrors.errorMsg = executorMessage + } else { + updateModel.get.executorErrors.errorMsg = "Update failed as the data load has failed." + } } return } diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala index 8e9efe55375..2fc51b52ad4 100644 --- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala +++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala @@ -365,6 +365,24 @@ class UpdateCarbonTableTestCase extends QueryTest with BeforeAndAfterAll { } } + test("Failure of update operation due to bad record with proper error message") { + try { + CarbonProperties.getInstance() + .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL") + val errorMessage = intercept[Exception] { + sql("drop table if exists update_with_bad_record") + sql("create table update_with_bad_record(item int, name String) stored by 'carbondata'") + sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/IUD/bad_record.csv' into table " + + s"update_with_bad_record") + sql("update update_with_bad_record set (item)=(3.45)").show() + sql("drop table if exists update_with_bad_record") + } + assert(errorMessage.getMessage.contains("Data load failed due to bad record")) + } finally { + CarbonProperties.getInstance() + .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FORCE") + } + } override def afterAll { sql("use default")