Skip to content
Permalink
Browse files
Fix warnings around depecrated usage of shouldRunTest
  • Loading branch information
lresende committed Jun 12, 2019
1 parent 221c1f4 commit 843cd71a2c1b14e6babe9538cd0e9982f8bab836
Showing 8 changed files with 33 additions and 30 deletions.
@@ -40,7 +40,7 @@ class ClientSparkFunSuite extends ConditionalSparkFunSuite with BeforeAndAfter {
var spark: SparkSession = _

override def beforeAll() {
runIf(TestUtils.shouldRunTest) {
runIf(() => TestUtils.shouldRunTest()) {
tempDir.mkdirs()
tempDir.deleteOnExit()
setupClient()
@@ -35,7 +35,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
.getOrCreate()
}

testIf("load and save data from Cloudant database", TestUtils.shouldRunTest) {
testIf("load and save data from Cloudant database", () => TestUtils.shouldRunTest()) {
// Loading data from Cloudant db
val df = spark.read.format("org.apache.bahir.cloudant").load("n_flight")
// Caching df in memory to speed computations
@@ -46,7 +46,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
assert(df.count() == 100)
}

testIf("load and count data from Cloudant search index", TestUtils.shouldRunTest) {
testIf("load and count data from Cloudant search index", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("index", "_design/view/_search/n_flights").load("n_flight")
val total = df.filter(df("flightSegmentId") >"AA14")
@@ -55,7 +55,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
assert(total == 89)
}

testIf("load data and count rows in filtered dataframe", TestUtils.shouldRunTest) {
testIf("load data and count rows in filtered dataframe", () => TestUtils.shouldRunTest()) {
// Loading data from Cloudant db
val df = spark.read.format("org.apache.bahir.cloudant")
.load("n_airportcodemapping")
@@ -64,7 +64,7 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
}

// save data to Cloudant test
testIf("save filtered dataframe to database", TestUtils.shouldRunTest) {
testIf("save filtered dataframe to database", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant").load("n_flight")

// Saving data frame with filter to Cloudant db
@@ -81,7 +81,8 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
}

// createDBOnSave option test
testIf("save dataframe to database using createDBOnSave=true option", TestUtils.shouldRunTest) {
testIf("save dataframe to database using createDBOnSave=true option",
() => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.load("n_airportcodemapping")

@@ -106,13 +107,13 @@ class CloudantAllDocsDFSuite extends ClientSparkFunSuite {
}

// view option tests
testIf("load and count data from view", TestUtils.shouldRunTest) {
testIf("load and count data from view", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("view", "_design/view/_view/AA0").load("n_flight")
assert(df.count() == 1)
}

testIf("load data from view with MapReduce function", TestUtils.shouldRunTest) {
testIf("load data from view with MapReduce function", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("view", "_design/view/_view/AAreduce?reduce=true")
.load("n_flight")
@@ -47,7 +47,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
assert(df.count() == 100)
}

testIf("load and count data from Cloudant search index", TestUtils.shouldRunTest) {
testIf("load and count data from Cloudant search index", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("index", "_design/view/_search/n_flights").load("n_flight")
val total = df.filter(df("flightSegmentId") >"AA14")
@@ -56,7 +56,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
assert(total == 89)
}

testIf("load data and verify deleted doc is not in results", TestUtils.shouldRunTest) {
testIf("load data and verify deleted doc is not in results", () => TestUtils.shouldRunTest()) {
val db = client.database("n_flight", false)
// delete a saved doc to verify it's not included when loading data
db.remove(deletedDoc.get("_id").getAsString, deletedDoc.get("_rev").getAsString)
@@ -68,7 +68,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
assert(!df.columns.contains("_deleted"))
}

testIf("load data and count rows in filtered dataframe", TestUtils.shouldRunTest) {
testIf("load data and count rows in filtered dataframe", () => TestUtils.shouldRunTest()) {
// Loading data from Cloudant db
val df = spark.read.format("org.apache.bahir.cloudant")
.load("n_airportcodemapping")
@@ -77,7 +77,7 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
}

// save data to Cloudant test
testIf("save filtered dataframe to database", TestUtils.shouldRunTest) {
testIf("save filtered dataframe to database", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant").load("n_flight")

// Saving data frame with filter to Cloudant db
@@ -94,7 +94,8 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
}

// createDBOnSave option test
testIf("save dataframe to database using createDBOnSave=true option", TestUtils.shouldRunTest) {
testIf("save dataframe to database using createDBOnSave=true option",
() => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.load("n_airportcodemapping")

@@ -124,21 +125,21 @@ class CloudantChangesDFSuite extends ClientSparkFunSuite {
}

// view option tests
testIf("load and count data from view", TestUtils.shouldRunTest) {
testIf("load and count data from view", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("view", "_design/view/_view/AA0").load("n_flight")
assert(df.count() == 1)
}

testIf("load data from view with MapReduce function", TestUtils.shouldRunTest) {
testIf("load data from view with MapReduce function", () => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("view", "_design/view/_view/AAreduce?reduce=true")
.load("n_flight")
assert(df.count() == 1)
}

testIf("load data and verify total count of selector, filter, and view option",
TestUtils.shouldRunTest) {
() => TestUtils.shouldRunTest()) {
val df = spark.read.format("org.apache.bahir.cloudant")
.option("selector", "{\"flightSegmentId\": {\"$eq\": \"AA202\"}}")
.load("n_flight")
@@ -29,7 +29,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
spark.close()
}

testIf("invalid api receiver option throws an error message", TestUtils.shouldRunTest) {
testIf("invalid api receiver option throws an error message", () => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -45,7 +45,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
s"is invalid. Please supply the valid option '_all_docs' or '_changes'.")
}

testIf("empty username option throws an error message", TestUtils.shouldRunTest) {
testIf("empty username option throws an error message", () => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -60,7 +60,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
s"is empty. Please supply the required value.")
}

testIf("empty password option throws an error message", TestUtils.shouldRunTest) {
testIf("empty password option throws an error message", () => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -75,7 +75,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
s"is empty. Please supply the required value.")
}

testIf("empty databaseName throws an error message", TestUtils.shouldRunTest) {
testIf("empty databaseName throws an error message", () => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -91,7 +91,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
}

testIf("incorrect password throws an error message for changes receiver",
TestUtils.shouldRunTest) {
() => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -108,7 +108,8 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
"\"reason\":\"Name or password is incorrect.\"}")
}

testIf("string with valid value for cloudant.numberOfRetries option", TestUtils.shouldRunTest) {
testIf("string with valid value for cloudant.numberOfRetries option",
() => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -122,7 +123,7 @@ class CloudantOptionSuite extends ClientSparkFunSuite with BeforeAndAfter {
}

testIf("invalid value for cloudant.numberOfRetries option throws an error message",
TestUtils.shouldRunTest) {
() => TestUtils.shouldRunTest()) {
spark = SparkSession.builder().config(conf)
.config("cloudant.protocol", TestUtils.getProtocol)
.config("cloudant.host", TestUtils.getHost)
@@ -39,7 +39,7 @@ class CloudantSparkSQLSuite extends ClientSparkFunSuite {
}

testIf("verify results from temp view of database n_airportcodemapping",
TestUtils.shouldRunTest) {
() => TestUtils.shouldRunTest()) {
// create a temp table from Cloudant db and query it using sql syntax
val sparkSql = spark.sql(
s"""
@@ -67,7 +67,7 @@ class CloudantSparkSQLSuite extends ClientSparkFunSuite {
assert(df2count == airportData.count())
}

testIf("verify results from temp view of index in n_flight", TestUtils.shouldRunTest) {
testIf("verify results from temp view of index in n_flight", () => TestUtils.shouldRunTest()) {
// create a temp table from Cloudant index and query it using sql syntax
val sparkSql = spark.sql(
s"""
@@ -65,7 +65,7 @@ class PubNubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
}
}

testIf("Stream receives messages", shouldRunTest) {
testIf("Stream receives messages", () => PubNubStreamSuite.this.shouldRunTest()) {
val nbOfMsg = 5
var publishedMessages: List[JsonObject] = List()
@volatile var receivedMessages: Set[SparkPubNubMessage] = Set()
@@ -101,7 +101,7 @@ class PubNubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
}
}

testIf("Message filtering", shouldRunTest) {
testIf("Message filtering", () => PubNubStreamSuite.this.shouldRunTest()) {
val config = new PNConfiguration()
config.setSubscribeKey(subscribeKey)
config.setPublishKey(publishKey)
@@ -134,7 +134,7 @@ class PubNubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
}
}

testIf("Test time token", shouldRunTest) {
testIf("Test time token", () => PubNubStreamSuite.this.shouldRunTest()) {
val config = new PNConfiguration()
config.setSubscribeKey(subscribeKey)
config.setPublishKey(publishKey)
@@ -52,7 +52,7 @@ class PubsubStreamSuite extends ConditionalSparkFunSuite with Eventually with Be
private var subForCreateFullName: String = _

override def beforeAll(): Unit = {
runIf(PubsubTestUtils.shouldRunTest) {
runIf(() => PubsubTestUtils.shouldRunTest()) {
pubsubTestUtils = new PubsubTestUtils
topicFullName = pubsubTestUtils.getFullTopicPath(topicName)
subscriptionFullName = pubsubTestUtils.getFullSubscriptionPath(subscriptionName)
@@ -71,7 +71,7 @@ class TwitterStreamSuite extends ConditionalSparkFunSuite
ssc, Some(authorization), Some(query), StorageLevel.MEMORY_AND_DISK_SER_2)
}

testIf("messages received", shouldRunTest) {
testIf("messages received", () => TwitterStreamSuite.this.shouldRunTest()) {
val userId = TwitterFactory.getSingleton.updateStatus(
UUID.randomUUID().toString
).getUser.getId

0 comments on commit 843cd71

Please sign in to comment.