Skip to content

Commit 163f4a4

Browse files
sujith71955dongjoon-hyun
authored andcommitted
[SPARK-26969][SQL] Using ODBC client not able to see the query data when column datatype is decimal
## What changes were proposed in this pull request? While processing the Rowdata in the server side ColumnValue BigDecimal type value processed by server has to converted to the HiveDecmal data type for successful processing of query using Hive ODBC client.As per current logic corresponding to the Decimal column datatype, the Spark server uses BigDecimal, and the ODBC client uses HiveDecimal. If the data type does not match, the client fail to parse Since this handing was missing the query executed in Hive ODBC client wont return or provides result to the user even though the decimal type column value data present. ## How was this patch tested? Manual test report and impact assessment is done using existing test-cases Before fix ![decimal_odbc](https://user-images.githubusercontent.com/12999161/53440179-e74a7f00-3a29-11e9-93db-83f2ae37ef16.PNG) After Fix ![hive_odbc](https://user-images.githubusercontent.com/12999161/53679519-70e0a200-3cf3-11e9-9437-9c27d2e5056d.PNG) Closes #23899 from sujith71955/master_decimalissue. Authored-by: s71955 <sujithchacko.2010@gmail.com> Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
1 parent 48d04f7 commit 163f4a4

File tree

3 files changed

+3
-23
lines changed

3 files changed

+3
-23
lines changed

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkThriftServerProtocolVersionsSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -186,8 +186,7 @@ class SparkThriftServerProtocolVersionsSuite extends HiveThriftJdbcTest {
186186
}
187187
}
188188

189-
// TODO: enable this test case after SPARK-28463 and SPARK-26969
190-
ignore(s"$version get decimal type") {
189+
test(s"$version get decimal type") {
191190
testExecuteStatementWithProtocolVersion(version,
192191
"SELECT cast(1 as decimal(18, 2)) as c") { rs =>
193192
assert(rs.next())

sql/hive-thriftserver/v1.2.1/src/main/java/org/apache/hive/service/cli/ColumnValue.java

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import java.sql.Timestamp;
2424

2525
import org.apache.hadoop.hive.common.type.HiveChar;
26-
import org.apache.hadoop.hive.common.type.HiveDecimal;
2726
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
2827
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
2928
import org.apache.hadoop.hive.common.type.HiveVarchar;
@@ -140,14 +139,6 @@ private static TColumnValue timestampValue(Timestamp value) {
140139
return TColumnValue.stringVal(tStringValue);
141140
}
142141

143-
private static TColumnValue stringValue(HiveDecimal value) {
144-
TStringValue tStrValue = new TStringValue();
145-
if (value != null) {
146-
tStrValue.setValue(value.toString());
147-
}
148-
return TColumnValue.stringVal(tStrValue);
149-
}
150-
151142
private static TColumnValue stringValue(HiveIntervalYearMonth value) {
152143
TStringValue tStrValue = new TStringValue();
153144
if (value != null) {
@@ -195,7 +186,7 @@ public static TColumnValue toTColumnValue(Type type, Object value) {
195186
case INTERVAL_DAY_TIME_TYPE:
196187
return stringValue((HiveIntervalDayTime) value);
197188
case DECIMAL_TYPE:
198-
return stringValue(((HiveDecimal)value));
189+
return stringValue(((BigDecimal)value).toPlainString());
199190
case BINARY_TYPE:
200191
String strVal = value == null ? null : UTF8String.fromBytes((byte[])value).toString();
201192
return stringValue(strVal);

sql/hive-thriftserver/v2.3.5/src/main/java/org/apache/hive/service/cli/ColumnValue.java

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
import java.sql.Timestamp;
2424

2525
import org.apache.hadoop.hive.common.type.HiveChar;
26-
import org.apache.hadoop.hive.common.type.HiveDecimal;
2726
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
2827
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
2928
import org.apache.hadoop.hive.common.type.HiveVarchar;
@@ -141,15 +140,6 @@ private static TColumnValue timestampValue(Timestamp value) {
141140
return TColumnValue.stringVal(tStringValue);
142141
}
143142

144-
private static TColumnValue stringValue(HiveDecimal value, TypeDescriptor typeDescriptor) {
145-
TStringValue tStrValue = new TStringValue();
146-
if (value != null) {
147-
int scale = typeDescriptor.getDecimalDigits();
148-
tStrValue.setValue(value.toFormatString(scale));
149-
}
150-
return TColumnValue.stringVal(tStrValue);
151-
}
152-
153143
private static TColumnValue stringValue(HiveIntervalYearMonth value) {
154144
TStringValue tStrValue = new TStringValue();
155145
if (value != null) {
@@ -199,7 +189,7 @@ public static TColumnValue toTColumnValue(TypeDescriptor typeDescriptor, Object
199189
case INTERVAL_DAY_TIME_TYPE:
200190
return stringValue((HiveIntervalDayTime) value);
201191
case DECIMAL_TYPE:
202-
return stringValue((HiveDecimal)value, typeDescriptor);
192+
return stringValue(((BigDecimal)value).toPlainString());
203193
case BINARY_TYPE:
204194
String strVal = value == null ? null : UTF8String.fromBytes((byte[])value).toString();
205195
return stringValue(strVal);

0 commit comments

Comments
 (0)