From dbd321b6727488f73059e5d3ccf79b947673e57c Mon Sep 17 00:00:00 2001 From: scwf Date: Thu, 20 Nov 2014 19:10:08 +0800 Subject: [PATCH 1/3] fix path in hivefromspark --- .../apache/spark/examples/sql/hive/HiveFromSpark.scala | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala index 0c52ef8ed96ac..2109e15d5627d 100644 --- a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala +++ b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala @@ -25,6 +25,14 @@ object HiveFromSpark { case class Record(key: Int, value: String) def main(args: Array[String]) { + + if (args.length < 1) { + System.err.println("Usage: HiveFromSpark ") + System.exit(1) + } + // Data path for a table type of (Int, String) + val path = args(0) + val sparkConf = new SparkConf().setAppName("HiveFromSpark") val sc = new SparkContext(sparkConf) @@ -35,7 +43,7 @@ object HiveFromSpark { import hiveContext._ sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING)") - sql("LOAD DATA LOCAL INPATH 'src/main/resources/kv1.txt' INTO TABLE src") + sql(s"LOAD DATA LOCAL INPATH '$path' INTO TABLE src") // Queries are expressed in HiveQL println("Result of 'SELECT *': ") From b00e20c162e886673bf4fe04029b4da82a246634 Mon Sep 17 00:00:00 2001 From: scwf Date: Sat, 22 Nov 2014 18:29:29 +0800 Subject: [PATCH 2/3] fix path usring spark_home --- .../apache/spark/examples/sql/hive/HiveFromSpark.scala | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala index 2109e15d5627d..00188c71a25e4 100644 --- a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala +++ b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala @@ -26,17 +26,11 @@ object HiveFromSpark { def main(args: Array[String]) { - if (args.length < 1) { - System.err.println("Usage: HiveFromSpark ") - System.exit(1) - } - // Data path for a table type of (Int, String) - val path = args(0) - val sparkConf = new SparkConf().setAppName("HiveFromSpark") val sc = new SparkContext(sparkConf) + val path = s"${System.getenv("SPARK_HOME")}/examples/src/main/resources/kv1.txt" - // A local hive context creates an instance of the Hive Metastore in process, storing + // A local hive context creates an instance of the Hive Metastore in process, storing // the warehouse data in the current directory. This location can be overridden by // specifying a second parameter to the constructor. val hiveContext = new HiveContext(sc) From ed3d6c95f1d04159cc4bbab5e5e26c846cd2b594 Mon Sep 17 00:00:00 2001 From: scwf Date: Sat, 22 Nov 2014 18:31:00 +0800 Subject: [PATCH 3/3] revert no need change --- .../org/apache/spark/examples/sql/hive/HiveFromSpark.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala index 00188c71a25e4..227acc117502d 100644 --- a/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala +++ b/examples/src/main/scala/org/apache/spark/examples/sql/hive/HiveFromSpark.scala @@ -25,12 +25,11 @@ object HiveFromSpark { case class Record(key: Int, value: String) def main(args: Array[String]) { - val sparkConf = new SparkConf().setAppName("HiveFromSpark") val sc = new SparkContext(sparkConf) val path = s"${System.getenv("SPARK_HOME")}/examples/src/main/resources/kv1.txt" - // A local hive context creates an instance of the Hive Metastore in process, storing + // A local hive context creates an instance of the Hive Metastore in process, storing // the warehouse data in the current directory. This location can be overridden by // specifying a second parameter to the constructor. val hiveContext = new HiveContext(sc)