Skip to content

Commit

Permalink
More println cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
jonalter committed Jul 2, 2015
1 parent aedaf80 commit 0b1dcb4
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ private[spark] object UIWorkloadGenerator {
if (args.length < 3) {
// scalastyle:off println
println(
"usage: ./bin/spark-class org.apache.spark.ui.UIWorkloadGenerator " +
"Usage: ./bin/spark-class org.apache.spark.ui.UIWorkloadGenerator " +
"[master] [FIFO|FAIR] [#job set (4 jobs per set)]")
// scalastyle:on println
System.exit(1)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ class NumericParserSuite extends SparkFunSuite {
malformatted.foreach { s =>
intercept[SparkException] {
NumericParser.parse(s)
// scalastyle:off println
println(s"Didn't detect malformatted string $s.")
// scalastyle:on println
throw new RuntimeException(s"Didn't detect malformatted string $s.")
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.unsafe.types.UTF8String

import scala.collection.mutable.HashSet

import org.apache.spark.{AccumulatorParam, Accumulator}
import org.apache.spark.{AccumulatorParam, Accumulator, Logging}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.trees.TreeNodeRef
Expand Down Expand Up @@ -57,7 +57,7 @@ package object debug {
* Augments [[DataFrame]]s with debug methods.
*/
@DeveloperApi
implicit class DebugQuery(query: DataFrame) {
implicit class DebugQuery(query: DataFrame) extends Logging {
def debug(): Unit = {
val plan = query.queryExecution.executedPlan
val visited = new collection.mutable.HashSet[TreeNodeRef]()
Expand All @@ -66,9 +66,7 @@ package object debug {
visited += new TreeNodeRef(s)
DebugNode(s)
}
// scalastyle:off println
println(s"Results returned: ${debugPlan.execute().count()}")
// scalastyle:on println
logDebug(s"Results returned: ${debugPlan.execute().count()}")
debugPlan.foreach {
case d: DebugNode => d.dumpStats()
case _ =>
Expand All @@ -84,15 +82,11 @@ package object debug {
TypeCheck(s)
}
try {
// scalastyle:off println
println(s"Results returned: ${debugPlan.execute().count()}")
// scalastyle:on println
logDebug(s"Results returned: ${debugPlan.execute().count()}")
} catch {
case e: Exception =>
def unwrap(e: Throwable): Throwable = if (e.getCause == null) e else unwrap(e.getCause)
// scalastyle:off println
println(s"Deepest Error: ${unwrap(e)}")
// scalastyle:on println
logDebug(s"Deepest Error: ${unwrap(e)}")
}
}
}
Expand Down Expand Up @@ -125,15 +119,11 @@ package object debug {
val columnStats: Array[ColumnMetrics] = Array.fill(child.output.size)(new ColumnMetrics())

def dumpStats(): Unit = {
// scalastyle:off println
println(s"== ${child.simpleString} ==")
println(s"Tuples output: ${tupleCount.value}")
// scalastyle:on println
logDebug(s"== ${child.simpleString} ==")
logDebug(s"Tuples output: ${tupleCount.value}")
child.output.zip(columnStats).foreach { case(attr, metric) =>
val actualDataTypes = metric.elementTypes.value.mkString("{", ",", "}")
// scalastyle:off println
println(s" ${attr.name} ${attr.dataType}: $actualDataTypes")
// scalastyle:on println
logDebug(s" ${attr.name} ${attr.dataType}: $actualDataTypes")
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import org.apache.spark.Logging
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.util.Utils

private[hive] object SparkSQLCLIDriver {
private[hive] object SparkSQLCLIDriver extends Logging {
private var prompt = "spark-sql"
private var continuedPrompt = "".padTo(prompt.length, ' ')
private var transport: TSocket = _
Expand Down Expand Up @@ -158,14 +158,13 @@ private[hive] object SparkSQLCLIDriver {
System.exit(cli.processLine(sessionState.execString))
}

// scalastyle:off println
try {
if (sessionState.fileName != null) {
System.exit(cli.processFile(sessionState.fileName))
}
} catch {
case e: FileNotFoundException =>
System.err.println(s"Could not open input file for reading. (${e.getMessage})")
logError(s"Could not open input file for reading. (${e.getMessage})")
System.exit(3)
}

Expand All @@ -181,16 +180,15 @@ private[hive] object SparkSQLCLIDriver {
val historyFile = historyDirectory + File.separator + ".hivehistory"
reader.setHistory(new History(new File(historyFile)))
} else {
System.err.println("WARNING: Directory for Hive history file: " + historyDirectory +
logWarning("WARNING: Directory for Hive history file: " + historyDirectory +
" does not exist. History will not be available during this session.")
}
} catch {
case e: Exception =>
System.err.println("WARNING: Encountered an error while trying to initialize Hive's " +
logWarning("WARNING: Encountered an error while trying to initialize Hive's " +
"history file. History will not be available during this session.")
System.err.println(e.getMessage)
logWarning(e.getMessage)
}
// scalastyle:on println

val clientTransportTSocketField = classOf[CliSessionState].getDeclaredField("transport")
clientTransportTSocketField.setAccessible(true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.parse._
import org.apache.hadoop.hive.ql.plan.PlanUtils
import org.apache.hadoop.hive.ql.session.SessionState

import org.apache.spark.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
Expand Down Expand Up @@ -73,7 +74,7 @@ private[hive] case class CreateTableAsSelect(
}

/** Provides a mapping from HiveQL statements to catalyst logical plans and expression trees. */
private[hive] object HiveQl {
private[hive] object HiveQl extends Logging {
protected val nativeCommands = Seq(
"TOK_ALTERDATABASE_OWNER",
"TOK_ALTERDATABASE_PROPERTIES",
Expand Down Expand Up @@ -186,9 +187,7 @@ private[hive] object HiveQl {
.map(ast => Option(ast).map(_.transform(rule)).orNull))
} catch {
case e: Exception =>
// scalastyle:off println
println(dumpTree(n))
// scalastyle:on println
logError(dumpTree(n).toString)
throw e
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,6 @@ class InsertIntoHiveTableSuite extends QueryTest with BeforeAndAfter {
val message = intercept[QueryExecutionException] {
sql("CREATE TABLE doubleCreateAndInsertTest (key int, value string)")
}.getMessage

// scalastyle:off println
println("message!!!!" + message)
// scalastyle:on println
}

test("Double create does not fail when allowExisting = true") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -253,9 +253,6 @@ class PairUDF extends GenericUDF {
)

override def evaluate(args: Array[DeferredObject]): AnyRef = {
// scalastyle:off println
println("Type = %s".format(args(0).getClass.getName))
// scalastyle:on println
Integer.valueOf(args(0).get.asInstanceOf[TestPair].entry._2)
}

Expand Down

0 comments on commit 0b1dcb4

Please sign in to comment.