-
Notifications
You must be signed in to change notification settings - Fork 28k
/
KafkaRDDSuite.scala
87 lines (72 loc) · 2.82 KB
/
KafkaRDDSuite.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd.kafka
import scala.util.Random
import kafka.serializer.StringDecoder
import org.scalatest.BeforeAndAfter
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.streaming.kafka.KafkaStreamSuiteBase
class KafkaRDDSuite extends KafkaStreamSuiteBase with BeforeAndAfter {
var sc: SparkContext = _
before {
setupKafka()
}
after {
if (sc != null) {
sc.stop
sc = null
}
tearDownKafka()
}
test("Kafka RDD") {
val sparkConf = new SparkConf().setMaster("local[4]").setAppName(this.getClass.getSimpleName)
sc = new SparkContext(sparkConf)
val topic = "topic1"
val sent = Map("a" -> 5, "b" -> 3, "c" -> 10)
createTopic(topic)
produceAndSendMessage(topic, sent)
val kafkaParams = Map("metadata.broker.list" -> s"localhost:$brokerPort",
"group.id" -> s"test-consumer-${Random.nextInt(10000)}")
val kc = new KafkaCluster(kafkaParams)
val rdd = getRdd(kc, Set(topic))
assert(rdd.isDefined)
assert(rdd.get.count === sent.values.sum)
kc.setConsumerOffsets(kafkaParams("group.id"), rdd.get.untilOffsets)
val rdd2 = getRdd(kc, Set(topic))
val sent2 = Map("d" -> 1)
produceAndSendMessage(topic, sent2)
assert(rdd2.isDefined)
assert(rdd2.get.count === 0)
val rdd3 = getRdd(kc, Set(topic))
produceAndSendMessage(topic, Map("extra" -> 22))
assert(rdd3.isDefined)
assert(rdd3.get.count === sent2.values.sum)
}
private def getRdd(kc: KafkaCluster, topics: Set[String]) = {
val groupId = kc.kafkaParams("group.id")
for {
topicPartitions <- kc.getPartitions(topics).right.toOption
from <- kc.getConsumerOffsets(groupId, topicPartitions).right.toOption.orElse(
kc.getEarliestLeaderOffsets(topicPartitions).right.toOption)
until <- kc.getLatestLeaderOffsets(topicPartitions).right.toOption
} yield {
new KafkaRDD[String, String, StringDecoder, StringDecoder, String](
sc, kc.kafkaParams, from, until, mmd => s"${mmd.offset} ${mmd.message}")
}
}
}