Skip to content

Commit

Permalink
Fix the code style
Browse files Browse the repository at this point in the history
  • Loading branch information
zsxwing committed Apr 8, 2015
1 parent 6b2a104 commit e8ad0a5
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,15 @@ package org.apache.spark.deploy.client

import java.util.concurrent._

import org.apache.spark.deploy.master.Master
import scala.util.control.NonFatal

import org.apache.spark.rpc._
import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.Master
import org.apache.spark.rpc._
import org.apache.spark.util.Utils

import scala.util.control.NonFatal

/**
* Interface allowing applications to speak with a Spark deploy cluster. Takes a master URL,
* an app description, and a listener for cluster events, and calls back the listener when various
Expand Down Expand Up @@ -56,8 +55,9 @@ private[spark] class AppClient(
private class ClientEndpoint(override val rpcEnv: RpcEnv) extends ThreadSafeRpcEndpoint
with Logging {

var master: Option[RpcEndpointRef] = None
var alreadyDisconnected = false // To avoid calling listener.disconnected() multiple times
private var master: Option[RpcEndpointRef] = None
// To avoid calling listener.disconnected() multiple times
private var alreadyDisconnected = false
@volatile private var alreadyDead = false // To avoid calling listener.dead() multiple times
@volatile private var registerMasterFutures: Array[Future[_]] = null
@volatile private var registrationRetryTimer: ScheduledFuture[_] = null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,17 @@ import scala.collection.JavaConversions._
import scala.collection.mutable.{HashMap, HashSet}
import scala.concurrent.ExecutionContext
import scala.util.Random
import scala.util.control.NonFatal

import org.apache.spark.rpc._
import org.apache.spark.{Logging, SecurityManager, SparkConf}
import org.apache.spark.deploy.{Command, ExecutorDescription, ExecutorState}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.{DriverState, Master}
import org.apache.spark.deploy.worker.ui.WorkerWebUI
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.rpc._
import org.apache.spark.util.{SignalLogger, Utils}

import scala.util.control.NonFatal

private[worker] class Worker(
override val rpcEnv: RpcEnv,
webUiPort: Int,
Expand Down

0 comments on commit e8ad0a5

Please sign in to comment.