Skip to content
This repository has been archived by the owner on Mar 24, 2021. It is now read-only.

Commit

Permalink
Merge pull request #70 from cloudant-labs/73249_update_libraries
Browse files Browse the repository at this point in the history
Update libraries, upgrade version to 1.6.4
  • Loading branch information
mayya-sharipova committed Sep 6, 2016
2 parents 3d635c0 + 74b60e8 commit 4ca11e9
Show file tree
Hide file tree
Showing 10 changed files with 30 additions and 38 deletions.
10 changes: 4 additions & 6 deletions cloudant-spark-sql/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ organization := "cloudant-labs"

name := "spark-cloudant"

version := "1.6.3"
version := "1.6.4"

scalaVersion := "2.10.4"
scalaVersion := "2.10.5"

fork in run := true

Expand All @@ -16,7 +16,7 @@ resolvers ++= Seq(
libraryDependencies ++= {
val sparkV = "1.6.0"
val sprayV = "1.3.2"
val playJsonV = "2.2.3"
val playJsonV = "2.4.8"
val httpcomponentsV = "4.5.2"
Seq(
"org.apache.spark" %% "spark-core" % sparkV % "provided",
Expand Down Expand Up @@ -48,7 +48,7 @@ spName := "cloudant-labs/spark-cloudant"

sparkVersion := "1.6.0"

sparkComponents := Seq("sql")
sparkComponents := Seq("sql", "streaming")

spShortDescription := "Spark SQL Cloudant External Datasource"

Expand All @@ -59,8 +59,6 @@ spDescription := """Spark SQL Cloudant External Datasource.
| - Supports predicates push down (only based on _id field in databases, but varios fields for indexes).
| - Support column pruning for indexes.""".stripMargin

spAppendScalaVersion := true

licenses += "Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0")

credentials += Credentials(Path.userHome / ".ivy2" / ".sbtcredentials")
2 changes: 1 addition & 1 deletion cloudant-spark-sql/project/assembly.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.12.0")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3")
4 changes: 2 additions & 2 deletions cloudant-spark-sql/project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@

resolvers += "Spark Package Main Repo" at "https://dl.bintray.com/spark-packages/maven"
resolvers += "bintray-spark-packages" at "https://dl.bintray.com/spark-packages/maven/"

addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.3")
addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.5")
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,12 @@ package com.cloudant.spark
import play.api.libs.json.JsValue
import play.api.libs.json.JsArray
import play.api.libs.json.Json
import play.api.libs.json.JsUndefined
import java.net.URLEncoder
import com.cloudant.spark.common._
import play.api.libs.json.JsNumber
import akka.actor.ActorSystem

/*
@author yanglei
@author yanglei*
Only allow one field pushdown now
as the filter today does not tell how to link the filters out And v.s. Or
*/
Expand Down Expand Up @@ -84,7 +82,7 @@ as the filter today does not tell how to link the filters out And v.s. Or
createDBOnSave
}

def getLastNum(result: JsValue): JsValue = {result \ "last_seq"}
def getLastNum(result: JsValue): JsValue = (result \ "last_seq").get

def getTotalUrl(url: String) = {
if (url.contains('?')) url+"&limit=1"
Expand Down Expand Up @@ -210,19 +208,20 @@ as the filter today does not tell how to link the filters out And v.s. Or
}

def getTotalRows(result: JsValue): Int = {
val value = result \ "total_rows"
value match {
case s : JsUndefined =>
(result \ "pending").as[JsNumber].value.intValue() + 1
case _ => value.as[JsNumber].value.intValue()
val tr = (result \ "total_rows").asOpt[Int]
tr match {
case None =>
(result \ "pending").as[Int] + 1
case Some(tr2) =>
tr2
}
}

def getRows(result: JsValue): Seq[JsValue] = {
if (viewName == null) {
((result \ "rows").asInstanceOf[JsArray]).value.map(row => row \ "doc")
((result \ "rows").as[JsArray]).value.map(row => (row \ "doc").get)
} else {
((result \ "rows").asInstanceOf[JsArray]).value.map(row => row)
((result \ "rows").as[JsArray]).value.map(row => row)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class CloudantReceiver(cloudantParams: Map[String, String])
while (!isStopped() && line != null) {
if (line.length() > 0) {
val json = Json.parse(line)
val jsonDoc = json \ "doc"
val jsonDoc = (json \ "doc").get
val doc = Json.stringify(jsonDoc)
store(doc)
}
Expand All @@ -82,4 +82,4 @@ class CloudantReceiver(cloudantParams: Map[String, String])
def onStop() = {
config.shutdown()
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,6 @@ package com.cloudant.spark.common
import com.cloudant.spark.JsonUtil
import org.apache.spark.sql.sources._
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import play.api.libs.json.JsSuccess
import play.api.libs.json.JsError
import scala.collection.immutable.StringOps
import play.api.libs.json.JsNumber
import play.api.libs.json.JsBoolean
import play.api.libs.json.JsString
import org.apache.spark.SparkEnv
import akka.event.Logging
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.SparkConf

object JsonStoreConfigManager
{
val CLOUDANT_CONNECTOR_VERSION = "1.6.3"
val CLOUDANT_CONNECTOR_VERSION = "1.6.4"
val SCHEMA_FOR_ALL_DOCS_NUM = -1

private val CLOUDANT_HOST_CONFIG = "cloudant.host"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@ object JsonUtil{
var finalValue: Option[JsValue] = None
breakable {
for (i <- path.indices){
val f = currentValue \ path(i)
val f: Option[JsValue] = (currentValue \ path(i)).toOption
f match {
case s : JsUndefined => break
case _ => currentValue = f
case Some(f2) => currentValue = f2
case None => break
}
if (i == path.length -1) //The leaf node
finalValue = Some(currentValue)
}
}
finalValue
}
}
}
6 changes: 3 additions & 3 deletions examples/scala/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name := "spark_test"

version := "0.1-SNAPSHOT"

scalaVersion := "2.10.4"
scalaVersion := "2.10.5"

fork in run := true

Expand All @@ -14,12 +14,12 @@ resolvers ++= Seq(
)

libraryDependencies ++= {
val sparkV = "1.5.1"
val sparkV = "1.6.0"
Seq(
"org.apache.spark" %% "spark-core" % sparkV % "provided",
"org.apache.spark" %% "spark-sql" % sparkV % "provided",
"org.apache.spark" %% "spark-streaming" % sparkV % "provided"
)
}

sparkVersion := "1.5.1"
sparkVersion := "1.6.0"
5 changes: 3 additions & 2 deletions test/helpers/dbutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#******************************************************************************/
import requests
import os
import time

class CloudantDbUtils:
"""
Expand Down Expand Up @@ -94,7 +95,8 @@ def reset_databases(self):
for db in self.test_dbs:
if self.db_exists(db):
self.drop_database(db)

# leave some time for synchonization between nodes
time.sleep(3)
self.create_database(db)
self.create_index(db)

Expand Down Expand Up @@ -122,7 +124,6 @@ def wait_for_doc_count(self, db_name, expected, timeoutInMin):
"""
Wait for the given database to reach the target doc count or until the timeout setting is reached
"""
import time
timeout = time.time() + timeoutInMin * 60

while (time.time() < timeout):
Expand Down

0 comments on commit 4ca11e9

Please sign in to comment.