Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Merge pull request #7 from yanglei99/master
Browse files Browse the repository at this point in the history
upgrade to Spark 1.5.1
  • Loading branch information
rnewson committed Oct 30, 2015
2 parents 5335962 + c1d0302 commit ea56e75
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 7 deletions.
1 change: 1 addition & 0 deletions README.md
Expand Up @@ -36,6 +36,7 @@ Spark Version | Release # | Binary Location
1.3.1 | v1.3.1.2 | [Location] (https://github.com/cloudant/spark-cloudant/releases/download/v1.3.1.2/cloudant-spark.jar)
1.4.0 | v1.4.0.0 | [Location] (https://github.com/cloudant/spark-cloudant/releases/download/1.4.0.0/cloudant-spark.jar)
1.4.1 | v1.4.1.3 | [Location] (https://github.com/cloudant/spark-cloudant/releases/download/v1.4.1.3/cloudant-spark.jar)
1.5.1 | v1.5.1.0 | [Location] (https://github.com/yanglei99/spark-cloudant/releases/download/v1.5.1.0/cloudant-spark.jar)


### Build from source:
Expand Down
2 changes: 1 addition & 1 deletion cloudant-spark-sql/build.sbt
Expand Up @@ -14,7 +14,7 @@ resolvers ++= Seq(
)

libraryDependencies ++= {
val sparkV = "1.4.1"
val sparkV = "1.5.1"
val sprayV = "1.3.2"
val playJsonV = "2.2.3"
Seq(
Expand Down
Expand Up @@ -17,8 +17,8 @@ package com.cloudant.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.Row
import org.apache.spark.sql.sources.{TableScan, RelationProvider, BaseRelation}
import com.cloudant.spark.common._

Expand Down
Expand Up @@ -18,7 +18,7 @@ package com.cloudant.spark
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.sources._
import scala.collection.mutable.ArrayBuffer
Expand Down
Expand Up @@ -18,7 +18,7 @@ package com.cloudant.spark
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.sources._
import com.cloudant.spark.common._
Expand Down
Expand Up @@ -18,7 +18,7 @@ package com.cloudant.spark
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.sources._
import scala.collection.mutable.ArrayBuffer
Expand Down
Expand Up @@ -18,7 +18,7 @@ package com.cloudant.spark.riak
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.sources._
import scala.collection.mutable.ArrayBuffer
Expand Down
2 changes: 1 addition & 1 deletion spark-test/build.sbt
Expand Up @@ -14,7 +14,7 @@ resolvers ++= Seq(
)

libraryDependencies ++= {
val sparkV = "1.4.1"
val sparkV = "1.5.1"
Seq(
"org.apache.spark" %% "spark-core" % sparkV % "provided",
"org.apache.spark" %% "spark-sql" % sparkV % "provided"
Expand Down

0 comments on commit ea56e75

Please sign in to comment.