Skip to content

Commit

Permalink
Allowing printlns in example files
Browse files Browse the repository at this point in the history
  • Loading branch information
jonalter committed Jun 30, 2015
1 parent ca4b477 commit 5b50da1
Show file tree
Hide file tree
Showing 84 changed files with 168 additions and 251 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import org.apache.spark.{SparkConf, SparkContext}
Expand All @@ -38,7 +39,6 @@ object BroadcastTest {

val arr1 = (0 until num).toArray

// scalastyle:off println
for (i <- 0 until 3) {
println("Iteration " + i)
println("===========")
Expand All @@ -49,8 +49,8 @@ object BroadcastTest {
observedSizes.collect().foreach(i => println(i))
println("Iteration %d took %.0f milliseconds".format(i, (System.nanoTime - startTime) / 1E6))
}
// scalastyle:on println

sc.stop()
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.nio.ByteBuffer
Expand Down Expand Up @@ -107,7 +108,6 @@ object CassandraCQLTest {
classOf[java.util.Map[String, ByteBuffer]],
classOf[java.util.Map[String, ByteBuffer]])

// scalastyle:off println
println("Count: " + casRdd.count)
val productSaleRDD = casRdd.map {
case (key, value) => {
Expand All @@ -118,7 +118,6 @@ object CassandraCQLTest {
aggregatedRDD.collect().foreach {
case (productId, saleCount) => println(productId + ":" + saleCount)
}
// scalastyle:on println

val casoutputCF = aggregatedRDD.map {
case (productId, saleCount) => {
Expand All @@ -142,3 +141,4 @@ object CassandraCQLTest {
sc.stop()
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.nio.ByteBuffer
Expand Down Expand Up @@ -100,9 +101,7 @@ object CassandraTest {
val counts = paraRdd.flatMap(p => p.split(" ")).map(word => (word, 1)).reduceByKey(_ + _)

counts.collect().foreach {
// scalastyle:off println
case (word, count) => println(word + ":" + count)
// scalastyle:on println
}

counts.map {
Expand Down Expand Up @@ -132,6 +131,7 @@ object CassandraTest {
sc.stop()
}
}
// scalastyle:on println

/*
create keyspace casDemo;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.io.File
Expand Down Expand Up @@ -56,9 +57,7 @@ object DFSReadWriteTest {
"localFile - (string) local file to use in test\n" +
"dfsDir - (string) DFS directory for read/write tests\n"

// scalastyle:off println
println(usage)
// scalastyle:on println
}

private def parseArgs(args: Array[String]): Unit = {
Expand All @@ -69,7 +68,6 @@ object DFSReadWriteTest {

var i = 0

// scalastyle:off println
localFilePath = new File(args(i))
if (!localFilePath.exists) {
System.err.println("Given path (" + args(i) + ") does not exist.\n")
Expand All @@ -82,7 +80,6 @@ object DFSReadWriteTest {
printUsage()
System.exit(1)
}
// scalastyle:on println

i += 1
dfsDirPath = args(i)
Expand All @@ -101,7 +98,6 @@ object DFSReadWriteTest {
def main(args: Array[String]): Unit = {
parseArgs(args)

// scalastyle:off println
println("Performing local word count")
val fileContents = readFile(localFilePath.toString())
val localWordCount = runLocalWordCount(fileContents)
Expand Down Expand Up @@ -138,7 +134,7 @@ object DFSReadWriteTest {
println(s"Failure! Local Word Count ($localWordCount) " +
s"and DFS Word Count ($dfsWordCount) disagree.")
}
// scalastyle:on println

}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import scala.collection.JavaConversions._
Expand All @@ -25,7 +26,6 @@ import org.apache.spark.util.Utils
* test driver submission in the standalone scheduler. */
object DriverSubmissionTest {
def main(args: Array[String]) {
// scalastyle:off println
if (args.size < 1) {
println("Usage: DriverSubmissionTest <seconds-to-sleep>")
System.exit(0)
Expand All @@ -45,6 +45,6 @@ object DriverSubmissionTest {
println(s"Alive for $i out of $numSecondsToSleep seconds")
Thread.sleep(1000)
}
// scalastyle:on println
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.util.Random
Expand Down Expand Up @@ -48,10 +49,9 @@ object GroupByTest {
// Enforce that everything has been calculated and in cache
pairs1.count()

// scalastyle:off println
println(pairs1.groupByKey(numReducers).count())
// scalastyle:on println

sc.stop()
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import org.apache.hadoop.hbase.client.HBaseAdmin
Expand All @@ -37,9 +38,7 @@ object HBaseTest {
val conf = HBaseConfiguration.create()

if (args.length < 1) {
// scalastyle:off println
System.err.println("Usage: HBaseTest <table_name>")
// scalastyle:on println
System.exit(1)
}

Expand All @@ -64,3 +63,4 @@ object HBaseTest {
admin.close()
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import org.apache.spark._
Expand All @@ -24,7 +25,6 @@ object HdfsTest {

/** Usage: HdfsTest [file] */
def main(args: Array[String]) {
// scalastyle:off println
if (args.length < 1) {
System.err.println("Usage: HdfsTest <file>")
System.exit(1)
Expand All @@ -39,7 +39,7 @@ object HdfsTest {
val end = System.currentTimeMillis()
println("Iteration " + iter + " took " + (end-start) + " ms")
}
// scalastyle:on println
sc.stop()
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import org.apache.commons.math3.linear._
Expand Down Expand Up @@ -93,18 +94,15 @@ object LocalALS {
}

def showWarning() {
// scalastyle:off println
System.err.println(
"""WARN: This is a naive implementation of ALS and is given as an example!
|Please use the ALS method found in org.apache.spark.mllib.recommendation
|for more conventional use.
""".stripMargin)
// scalastyle:on println
}

def main(args: Array[String]) {

// scalastyle:off println
args match {
case Array(m, u, f, iters) => {
M = m.toInt
Expand Down Expand Up @@ -136,7 +134,6 @@ object LocalALS {
println("RMSE = " + rmse(R, ms, us))
println()
}
// scalastyle:on println
}

private def randomVector(n: Int): RealVector =
Expand All @@ -146,3 +143,4 @@ object LocalALS {
new Array2DRowRealMatrix(Array.fill(rows, cols)(math.random))

}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.util.Random
Expand All @@ -40,14 +41,12 @@ object LocalFileLR {
}

def showWarning() {
// scalastyle:off println
System.err.println(
"""WARN: This is a naive implementation of Logistic Regression and is given as an example!
|Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
|org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
|for more conventional use.
""".stripMargin)
// scalastyle:on println
}

def main(args: Array[String]) {
Expand All @@ -60,7 +59,6 @@ object LocalFileLR {

// Initialize w to a random value
var w = DenseVector.fill(D){2 * rand.nextDouble - 1}
// scalastyle:off println
println("Initial w: " + w)

for (i <- 1 to ITERATIONS) {
Expand All @@ -74,6 +72,6 @@ object LocalFileLR {
}

println("Final w: " + w)
// scalastyle:on println
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.util.Random
Expand Down Expand Up @@ -65,13 +66,11 @@ object LocalKMeans {
}

def showWarning() {
// scalastyle:off println
System.err.println(
"""WARN: This is a naive implementation of KMeans Clustering and is given as an example!
|Please use the KMeans method found in org.apache.spark.mllib.clustering
|for more conventional use.
""".stripMargin)
// scalastyle:on println
}

def main(args: Array[String]) {
Expand All @@ -92,9 +91,7 @@ object LocalKMeans {
kPoints.put(i, iter.next())
}

// scalastyle:off println
println("Initial centers: " + kPoints)
// scalastyle:on println

while(tempDist > convergeDist) {
var closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))
Expand All @@ -120,8 +117,7 @@ object LocalKMeans {
}
}

// scalastyle:off println
println("Final centers: " + kPoints)
// scalastyle:on println
}
}
// scalastyle:on println
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* limitations under the License.
*/

// scalastyle:off println
package org.apache.spark.examples

import java.util.Random
Expand Down Expand Up @@ -47,14 +48,12 @@ object LocalLR {
}

def showWarning() {
// scalastyle:off println
System.err.println(
"""WARN: This is a naive implementation of Logistic Regression and is given as an example!
|Please use either org.apache.spark.mllib.classification.LogisticRegressionWithSGD or
|org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
|for more conventional use.
""".stripMargin)
// scalastyle:on println
}

def main(args: Array[String]) {
Expand All @@ -64,7 +63,6 @@ object LocalLR {
val data = generateData
// Initialize w to a random value
var w = DenseVector.fill(D){2 * rand.nextDouble - 1}
// scalastyle:off println
println("Initial w: " + w)

for (i <- 1 to ITERATIONS) {
Expand All @@ -78,6 +76,6 @@ object LocalLR {
}

println("Final w: " + w)
// scalastyle:on println
}
}
// scalastyle:on println
Loading

0 comments on commit 5b50da1

Please sign in to comment.