Skip to content

Commit

Permalink
Remove spark.executor.env.port and spark.standalone.client.port
Browse files Browse the repository at this point in the history
Verified on a cluster that these are not actually necessary. This
is because akka is the one that initiates the connection, and all
responses use the same TCP connection.
  • Loading branch information
andrewor14 committed Aug 5, 2014
1 parent 1d2d5c6 commit 86461e2
Show file tree
Hide file tree
Showing 4 changed files with 2 additions and 28 deletions.
5 changes: 1 addition & 4 deletions core/src/main/scala/org/apache/spark/deploy/Client.scala
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,6 @@ object Client {
}

val conf = new SparkConf()
val port = conf.getInt("spark.standalone.client.port", 0)
val driverArgs = new ClientArguments(args)

if (!driverArgs.logLevel.isGreaterOrEqual(Level.WARN)) {
Expand All @@ -156,10 +155,8 @@ object Client {
conf.set("akka.loglevel", driverArgs.logLevel.toString.replace("WARN", "WARNING"))
Logger.getRootLogger.setLevel(driverArgs.logLevel)

// TODO: See if we can initialize akka so return messages are sent back using the same TCP
// flow. Else, this (sadly) requires the DriverClient be routable from the Master.
val (actorSystem, _) = AkkaUtils.createActorSystem(
"driverClient", Utils.localHostName(), port, conf, new SecurityManager(conf))
"driverClient", Utils.localHostName(), 0, conf, new SecurityManager(conf))

actorSystem.actorOf(Props(classOf[ClientActor], driverArgs, conf))

Expand Down
3 changes: 1 addition & 2 deletions core/src/main/scala/org/apache/spark/executor/Executor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,7 @@ private[spark] class Executor(
// Initialize Spark environment (using system properties read above)
private val env = {
if (!isLocal) {
val port = conf.getInt("spark.executor.env.port", 0)
val _env = SparkEnv.create(conf, executorId, slaveHostname, port,
val _env = SparkEnv.create(conf, executorId, slaveHostname, 0,
isDriver = false, isLocal = false)
SparkEnv.set(_env)
_env.metricsSystem.registerSource(executorSource)
Expand Down
14 changes: 0 additions & 14 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -606,20 +606,6 @@ Apart from these, the following properties are also available, and may be useful
Port for the executor to listen on. This is used for communicating with the driver.
</td>
</tr>
<tr>
<td><code>spark.executor.env.port</code></td>
<td>(random)</td>
<td>
Port used by the executor's actor system for various purposes.
</td>
</tr>
<tr>
<td><code>spark.standalone.cluster.port</code></td>
<td>(random)</td>
<td>
Port used by <code>org.apache.spark.deploy.Client</code> in standalone cluster deploy mode.
</td>
</tr>
<tr>
<td><code>spark.port.maxRetries</code></td>
<td>16</td>
Expand Down
8 changes: 0 additions & 8 deletions docs/security.md
Original file line number Diff line number Diff line change
Expand Up @@ -107,14 +107,6 @@ configure those ports.
<td><code>spark.executor.port</code></td>
<td>Akka-based. Set to "0" to choose a port randomly.</td>
</tr>
<tr>
<td>Driver</td>
<td>Executor</td>
<td>(random)</td>
<td>Executor actor system port</td>
<td><code>spark.executor.env.port</code></td>
<td>Akka-based. Set to "0" to choose a port randomly.</td>
</tr>
<tr>
<td>Executor</td>
<td>Driver</td>
Expand Down

0 comments on commit 86461e2

Please sign in to comment.