Skip to content

Commit

Permalink
update the table scan accodringly since the unwrapData function changed
Browse files Browse the repository at this point in the history
  • Loading branch information
chenghao-intel committed Jul 17, 2014
1 parent afc39da commit 34cc21a
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -96,19 +96,9 @@ case class HiveTableScan(
.getOrElse(sys.error(s"Can't find attribute $a"))
val fieldObjectInspector = ref.getFieldObjectInspector

val unwrapHiveData = fieldObjectInspector match {
case _: HiveVarcharObjectInspector =>
(value: Any) => value.asInstanceOf[HiveVarchar].getValue
case _: HiveDecimalObjectInspector =>
(value: Any) => BigDecimal(value.asInstanceOf[HiveDecimal].bigDecimalValue())
case _ =>
identity[Any] _
}

(row: Any, _: Array[String]) => {
val data = objectInspector.getStructFieldData(row, ref)
val hiveData = unwrapData(data, fieldObjectInspector)
if (hiveData != null) unwrapHiveData(hiveData) else null
unwrapData(data, fieldObjectInspector)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive

import scala.collection.mutable.ArrayBuffer

import org.apache.hadoop.hive.common.`type`.{HiveDecimal, HiveVarchar}
import org.apache.hadoop.hive.common.`type`.HiveDecimal
import org.apache.hadoop.hive.ql.exec.UDF
import org.apache.hadoop.hive.ql.exec.{FunctionInfo, FunctionRegistry}
import org.apache.hadoop.hive.ql.udf.{UDFType => HiveUDFType}
Expand Down

0 comments on commit 34cc21a

Please sign in to comment.