Skip to content

Commit

Permalink
Review feedback.
Browse files Browse the repository at this point in the history
  • Loading branch information
Marcelo Vanzin committed Sep 23, 2014
1 parent f01517c commit 67f5b02
Showing 1 changed file with 12 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ import org.apache.spark.util.Utils

class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers {

private var oldConf: Map[String, String] = _
private var yarnCluster: MiniYARNCluster = _
private var tempDir: File = _
private var fakeSparkJar: File = _
Expand All @@ -45,8 +44,6 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers {
yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1)
yarnCluster.init(new YarnConfiguration())
yarnCluster.start()

oldConf = sys.props.toMap
yarnCluster.getConfig().foreach { e =>
sys.props += ("spark.hadoop." + e.getKey() -> e.getValue())
}
Expand All @@ -62,16 +59,7 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers {

override def afterAll() {
yarnCluster.stop()

val sysProps = sys.props.map { case (k, v) => (k, v) }
sysProps.foreach { case (k, v) =>
if (k.startsWith("spark.")) {
sys.props -= k
}
}

sys.props ++= oldConf

sys.props.retain { case (k, v) => !k.startsWith("spark.") }
super.afterAll()
}

Expand Down Expand Up @@ -116,12 +104,22 @@ class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers {
private object YarnClusterDriver extends Logging with Matchers {

def main(args: Array[String]) = {
if (args.length != 2) {
System.err.println(
s"""
|Invalid command line: ${args.mkString(" ")}
|
|Usage: YarnClusterDriver [master] [result file]
""".stripMargin)
System.exit(1)
}

val sc = new SparkContext(new SparkConf().setMaster(args(0))
.setAppName("yarn \"test app\" 'with quotes' and \\back\\slashes and $dollarSigns"))
val status = new File(args(1))
var result = "failure"
try {
val data = sc.parallelize(1 to 4).map(i => i).collect().toSet
val data = sc.parallelize(1 to 4, 4).collect().toSet
data should be (Set(1, 2, 3, 4))
result = "success"
} finally {
Expand Down

0 comments on commit 67f5b02

Please sign in to comment.