Skip to content

Commit

Permalink
SPARK-SQL-2846 add configureInputJobPropertiesForStorageHandler to in…
Browse files Browse the repository at this point in the history
…itial job conf
  • Loading branch information
alexoss68 committed Aug 13, 2014
1 parent fe47359 commit e4bdc4c
Showing 1 changed file with 2 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants._
import org.apache.hadoop.hive.ql.exec.Utilities
import org.apache.hadoop.hive.ql.metadata.{Partition => HivePartition, Table => HiveTable}
import org.apache.hadoop.hive.ql.plan.TableDesc
import org.apache.hadoop.hive.ql.plan.{PlanUtils, TableDesc}
import org.apache.hadoop.hive.serde2.Deserializer
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector

Expand Down Expand Up @@ -249,6 +249,7 @@ private[hive] object HadoopTableReader extends HiveInspectors {
def initializeLocalJobConfFunc(path: String, tableDesc: TableDesc)(jobConf: JobConf) {
FileInputFormat.setInputPaths(jobConf, path)
if (tableDesc != null) {
PlanUtils.configureInputJobPropertiesForStorageHandler(tableDesc)
Utilities.copyTableJobPropertiesToConf(tableDesc, jobConf)
}
val bufferSize = System.getProperty("spark.buffer.size", "65536")
Expand Down

0 comments on commit e4bdc4c

Please sign in to comment.