Skip to content

Commit

Permalink
Changes based on Discussions on PR, removing use of RawTextHelper fro…
Browse files Browse the repository at this point in the history
…m examples
  • Loading branch information
techaddict committed Apr 30, 2014
1 parent 8ed2d3f commit 47ef86c
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import kafka.producer._
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._
import org.apache.spark.streaming.kafka._
import org.apache.spark.streaming.util.RawTextHelper._

// scalastyle:off
/**
Expand Down Expand Up @@ -59,7 +58,7 @@ object KafkaWordCount {
val lines = KafkaUtils.createStream(ssc, zkQuorum, group, topicpMap).map(_._2)
val words = lines.flatMap(_.split(" "))
val wordCounts = words.map(x => (x, 1L))
.reduceByKeyAndWindow(add _, subtract _, Minutes(10), Seconds(2), 2)
.reduceByKeyAndWindow(_ + _, _ - _, Minutes(10), Seconds(2), 2)
wordCounts.print()

ssc.start()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.examples.streaming

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming._
import org.apache.spark.streaming.util.RawTextHelper
import org.apache.spark.util.IntParam

/**
Expand Down Expand Up @@ -52,9 +51,6 @@ object RawNetworkGrep {
val ssc = new StreamingContext(master, "RawNetworkGrep", Milliseconds(batchMillis),
System.getenv("SPARK_HOME"), StreamingContext.jarOfClass(this.getClass).toSeq)

// Warm up the JVMs on master and slave for JIT compilation to kick in
RawTextHelper.warmUp(ssc.sparkContext)

val rawStreams = (1 to numStreams).map(_ =>
ssc.rawSocketStream[String](host, port, StorageLevel.MEMORY_ONLY_SER_2)).toArray
val union = ssc.union(rawStreams)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.SparkContext._
import org.apache.spark.util.collection.OpenHashMap
import scala.collection.JavaConversions.mapAsScalaMap

private[spark]
private[streaming]
object RawTextHelper {

/**
Expand Down

0 comments on commit 47ef86c

Please sign in to comment.