Skip to content
Browse files

Merge branch 'master' of github.com:amplab/shark

  • Loading branch information...
2 parents 6ccd53b + 5383c68 commit 41d1223678e7a46014c22631811b7f03ae5b74a9 @rxin rxin committed May 31, 2012
Showing with 15 additions and 15 deletions.
  1. +10 −10 src/main/scala/shark/exec/MapJoinOperator.scala
  2. +5 −5 src/main/scala/shark/exec/SparkTask.scala
View
20 src/main/scala/shark/exec/MapJoinOperator.scala
@@ -2,31 +2,31 @@ package shark.exec
import java.util.{HashMap => JavaHashMap, List => JavaList}
-import org.apache.hadoop.hive.ql.exec.{ExprNodeEvaluator, JoinUtil}
-import org.apache.hadoop.hive.ql.exec.{MapJoinOperator => HiveMapJoinOperator}
-import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey
-import org.apache.hadoop.hive.ql.plan.MapJoinDesc
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector
import org.apache.hadoop.io.BytesWritable
-import org.apache.hadoop.hive.ql.plan.{PartitionDesc, TableDesc}
+
+import org.apache.hadoop.hive.ql.exec.{ExprNodeEvaluator, JoinUtil}
import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator.{HashTableSinkObjectCtx => MapJoinObjectCtx}
import org.apache.hadoop.hive.ql.exec.MapJoinMetaData
+import org.apache.hadoop.hive.ql.exec.{MapJoinOperator => HiveMapJoinOperator}
+import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectValue
import org.apache.hadoop.hive.ql.exec.persistence.MapJoinRowContainer
-import org.apache.hadoop.hive.serde2.SerDe
+import org.apache.hadoop.hive.ql.plan.MapJoinDesc
+import org.apache.hadoop.hive.ql.plan.{PartitionDesc, TableDesc}
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
+import org.apache.hadoop.hive.serde2.SerDe
import scala.collection.JavaConversions._
import scala.reflect.BeanProperty
-import shark.SharkEnv
+import spark.broadcast.Broadcast
import shark.collections.Conversions._
import spark.RDD
-import spark.broadcast.Broadcast
-
+import shark.SharkEnv
/**
* A join operator optimized for joining a large table with a number of small
View
10 src/main/scala/shark/exec/SparkTask.scala
@@ -3,13 +3,13 @@ package shark.exec
import java.util.{List => JavaList}
import org.apache.hadoop.hive.metastore.api.FieldSchema
-import org.apache.hadoop.hive.ql.exec.{TableScanOperator => HiveTableScanOperator, Utilities}
import org.apache.hadoop.hive.ql.{Context, DriverContext}
+import org.apache.hadoop.hive.ql.exec.{TableScanOperator => HiveTableScanOperator, Utilities}
import org.apache.hadoop.hive.ql.metadata.{Partition, Table}
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner
import org.apache.hadoop.hive.ql.parse._
-import org.apache.hadoop.hive.ql.plan.CreateTableDesc
import org.apache.hadoop.hive.ql.plan.api.StageType
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc
import org.apache.hadoop.hive.ql.plan.PartitionDesc
import scala.collection.JavaConversions._
@@ -76,11 +76,11 @@ with java.io.Serializable with LogHelper {
op.parts = ppl.getConfirmedPartns.toArray ++ ppl.getUnknownPartns.toArray
val allParts = op.parts ++ ppl.getDeniedPartns.toArray
if (allParts.size == 0) {
- op.firstConfPartDesc = new PartitionDesc(op.tableDesc, null)
+ op.firstConfPartDesc = new PartitionDesc(op.tableDesc, null)
} else {
- op.firstConfPartDesc = Utilities.getPartitionDesc(allParts(0).asInstanceOf[Partition])
+ op.firstConfPartDesc = Utilities.getPartitionDesc(allParts(0).asInstanceOf[Partition])
}
- }
+ }
}}
}

0 comments on commit 41d1223

Please sign in to comment.
Something went wrong with that request. Please try again.