From 209640029c3d18329dbd9a8a9fdcaec9b3babc8c Mon Sep 17 00:00:00 2001 From: Sanket Date: Mon, 4 Apr 2016 10:26:00 -0500 Subject: [PATCH] Spark Version will be picked from pom.version and other relavent info related to build and compilation will be displayed --- core/pom.xml | 38 +++ .../java/org/apache/spark/VersionInfo.java | 131 ++++++++ .../resources/spark-version-info.properties | 24 ++ .../scala/org/apache/spark/SparkContext.scala | 4 +- .../org/apache/spark/deploy/SparkSubmit.scala | 8 +- .../apache/spark/deploy/master/Master.scala | 2 +- .../deploy/rest/RestSubmissionClient.scala | 4 +- .../deploy/rest/RestSubmissionServer.scala | 4 +- .../deploy/rest/StandaloneRestServer.scala | 8 +- .../deploy/rest/mesos/MesosRestServer.scala | 8 +- .../apache/spark/deploy/worker/Worker.scala | 2 +- .../spark/launcher/LauncherBackend.scala | 4 +- .../scheduler/EventLoggingListener.scala | 4 +- .../spark/status/api/v1/VersionResource.scala | 2 +- .../scala/org/apache/spark/ui/UIUtils.scala | 4 +- .../org/apache/spark/util/JsonProtocol.scala | 2 +- .../rest/StandaloneRestSubmitSuite.scala | 12 +- .../scheduler/EventLoggingListenerSuite.scala | 5 +- pom.xml | 1 + .../apache/spark/repl/SparkILoopInit.scala | 4 +- .../org/apache/spark/repl/SparkILoop.scala | 6 +- spark-maven-plugins/pom.xml | 81 +++++ .../spark/maven/plugin/util/CommandExec.java | 89 +++++ .../plugin/versioninfo/VersionInfoMojo.java | 304 ++++++++++++++++++ .../datasources/parquet/ParquetTest.scala | 4 +- 25 files changed, 716 insertions(+), 39 deletions(-) create mode 100644 core/src/main/java/org/apache/spark/VersionInfo.java create mode 100644 core/src/main/resources/spark-version-info.properties create mode 100644 spark-maven-plugins/pom.xml create mode 100644 spark-maven-plugins/src/main/java/org/apache/spark/maven/plugin/util/CommandExec.java create mode 100644 spark-maven-plugins/src/main/java/org/apache/spark/maven/plugin/versioninfo/VersionInfoMojo.java diff --git a/core/pom.xml b/core/pom.xml index 4c7e3a36620a9..87af4b93ed7b5 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -324,6 +324,22 @@ target/scala-${scala.binary.version}/classes target/scala-${scala.binary.version}/test-classes + + + ${basedir}/src/main/resources + + spark-version-info.properties + + false + + + ${basedir}/src/main/resources + + spark-version-info.properties + + true + + org.apache.maven.plugins @@ -352,6 +368,28 @@ + + org.apache.spark + spark-maven-plugins_${scala.binary.version} + ${project.version} + + + version-info + generate-resources + + version-info + + + + ${basedir}/src/ + + main/java/**/*.java + + + + + + diff --git a/core/src/main/java/org/apache/spark/VersionInfo.java b/core/src/main/java/org/apache/spark/VersionInfo.java new file mode 100644 index 0000000000000..7a3ad891f294f --- /dev/null +++ b/core/src/main/java/org/apache/spark/VersionInfo.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +public class VersionInfo { + + private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class); + private Properties info; + + protected VersionInfo(String component) { + info = new Properties(); + String versionInfoFile = component + "-version-info.properties"; + InputStream is = null; + try { + is = Thread.currentThread().getContextClassLoader() + .getResourceAsStream(versionInfoFile); + if (is == null) { + throw new IOException("Resource not found"); + } + info.load(is); + } catch (IOException ex) { + } finally { + if (is != null) { + try { + is.close(); + } catch (IOException ioex) { + } + } + } + } + + protected String _getVersion() { + return info.getProperty("version", "Unknown"); + } + + protected String _getRevision() { + return info.getProperty("revision", "Unknown"); + } + + protected String _getBranch() { + return info.getProperty("branch", "Unknown"); + } + + protected String _getDate() { + return info.getProperty("date", "Unknown"); + } + + protected String _getUser() { + return info.getProperty("user", "Unknown"); + } + + protected String _getUrl() { + return info.getProperty("url", "Unknown"); + } + + protected String _getSrcChecksum() { + return info.getProperty("srcChecksum", "Unknown"); + } + + protected String _getBuildVersion(){ + return getVersion() + + " from " + _getRevision() + + " by " + _getUser() + + " source checksum " + _getSrcChecksum(); + } + + private static VersionInfo COMMON_VERSION_INFO = new VersionInfo("spark"); + + public static String getVersion() { + return COMMON_VERSION_INFO._getVersion(); + } + + public static String getRevision() { + return COMMON_VERSION_INFO._getRevision(); + } + + public static String getBranch() { + return COMMON_VERSION_INFO._getBranch(); + } + + public static String getDate() { + return COMMON_VERSION_INFO._getDate(); + } + + public static String getUser() { + return COMMON_VERSION_INFO._getUser(); + } + + public static String getUrl() { + return COMMON_VERSION_INFO._getUrl(); + } + + public static String getSrcChecksum() { + return COMMON_VERSION_INFO._getSrcChecksum(); + } + + public static String getBuildVersion(){ + return COMMON_VERSION_INFO._getBuildVersion(); + } + + public static void main(String[] args) { + System.out.println("Spark " + getVersion()); + System.out.println("URL " + getUrl() + " -r " + getRevision()); + System.out.println("Branch " + getBranch()); + System.out.println("Compiled by " + getUser() + " on " + getDate()); + System.out.println("From source with checksum " + getSrcChecksum()); + } +} diff --git a/core/src/main/resources/spark-version-info.properties b/core/src/main/resources/spark-version-info.properties new file mode 100644 index 0000000000000..7ae3130b21308 --- /dev/null +++ b/core/src/main/resources/spark-version-info.properties @@ -0,0 +1,24 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +version=${pom.version} +revision=${version-info.scm.commit} +branch=${version-info.scm.branch} +user=${user.name} +date=${version-info.build.time} +url=${version-info.scm.uri} +srcChecksum=${version-info.source.md5} diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index d7cb253d69b3c..988e7ea48fcd2 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -187,7 +187,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli this(master, appName, sparkHome, jars, Map()) // log out Spark Version in Spark driver log - logInfo(s"Running Spark version $SPARK_VERSION") + logInfo("Running Spark version " + VersionInfo.getVersion) /* ------------------------------------------------------------------------------------- * | Private variables. These variables keep the internal state of the context, and are | @@ -1461,7 +1461,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli } /** The version of Spark on which this application is running. */ - def version: String = SPARK_VERSION + def version: String = VersionInfo.getVersion /** * Return a map from the slave to the max memory available for caching and the remaining diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 4049fc0c41c53..9ae79180370ae 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -40,7 +40,7 @@ import org.apache.ivy.plugins.matcher.GlobPatternMatcher import org.apache.ivy.plugins.repository.file.FileRepository import org.apache.ivy.plugins.resolver.{ChainResolver, FileSystemResolver, IBiblioResolver} -import org.apache.spark.{SPARK_VERSION, SparkException, SparkUserAppException} +import org.apache.spark.{VersionInfo, SparkException, SparkUserAppException} import org.apache.spark.api.r.RUtils import org.apache.spark.deploy.rest._ import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, Utils} @@ -105,7 +105,11 @@ object SparkSubmit { _\ \/ _ \/ _ `/ __/ '_/ /___/ .__/\_,_/_/ /_/\_\ version %s /_/ - """.format(SPARK_VERSION)) + """.format(VersionInfo.getVersion)) + printStream.println("Branch %s".format(VersionInfo.getBranch)) + printStream.println("Compiled by user %s on %s".format(VersionInfo.getUser, VersionInfo.getDate)) + printStream.println("Url %s".format(VersionInfo.getUrl)) + printStream.println("From source with checksum %s".format(VersionInfo.getSrcChecksum)) printStream.println("Type --help for more information.") exitFn(0) } diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index 01901bbf85d73..f65cdc58fd94e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -140,7 +140,7 @@ private[deploy] class Master( override def onStart(): Unit = { logInfo("Starting Spark master at " + masterUrl) - logInfo(s"Running Spark version ${org.apache.spark.SPARK_VERSION}") + logInfo(s"Running Spark version ${org.apache.spark.VersionInfo.getVersion}") webUi = new MasterWebUI(this, webUiPort) webUi.bind() masterWebUiUrl = "http://" + masterPublicAddress + ":" + webUi.boundPort diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala index c5a5876a896cc..f24c91f4610a4 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala @@ -30,7 +30,7 @@ import scala.io.Source import com.fasterxml.jackson.core.JsonProcessingException -import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf} +import org.apache.spark.{VersionInfo, SparkConf} import org.apache.spark.internal.Logging import org.apache.spark.util.Utils @@ -175,7 +175,7 @@ private[spark] class RestSubmissionClient(master: String) extends Logging { sparkProperties: Map[String, String], environmentVariables: Map[String, String]): CreateSubmissionRequest = { val message = new CreateSubmissionRequest - message.clientSparkVersion = sparkVersion + message.clientSparkVersion = VersionInfo.getVersion message.appResource = appResource message.mainClass = mainClass message.appArgs = appArgs diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala index 14244ea5714c6..39abff93926ff 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionServer.scala @@ -29,7 +29,7 @@ import org.eclipse.jetty.util.thread.QueuedThreadPool import org.json4s._ import org.json4s.jackson.JsonMethods._ -import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf} +import org.apache.spark.{VersionInfo, SparkConf} import org.apache.spark.internal.Logging import org.apache.spark.util.Utils @@ -151,7 +151,7 @@ private[rest] abstract class RestServlet extends HttpServlet with Logging { /** Construct an error message to signal the fact that an exception has been thrown. */ protected def handleError(message: String): ErrorResponse = { val e = new ErrorResponse - e.serverSparkVersion = sparkVersion + e.serverSparkVersion = VersionInfo.getVersion e.message = message e } diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala index c19296c7b3e00..a10ba3edac396 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala @@ -20,7 +20,7 @@ package org.apache.spark.deploy.rest import java.io.File import javax.servlet.http.HttpServletResponse -import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf} +import org.apache.spark.{VersionInfo, SparkConf} import org.apache.spark.deploy.{Command, DeployMessages, DriverDescription} import org.apache.spark.deploy.ClientArguments._ import org.apache.spark.rpc.RpcEndpointRef @@ -74,7 +74,7 @@ private[rest] class StandaloneKillRequestServlet(masterEndpoint: RpcEndpointRef, val response = masterEndpoint.askWithRetry[DeployMessages.KillDriverResponse]( DeployMessages.RequestKillDriver(submissionId)) val k = new KillSubmissionResponse - k.serverSparkVersion = sparkVersion + k.serverSparkVersion = VersionInfo.getVersion k.message = response.message k.submissionId = submissionId k.success = response.success @@ -93,7 +93,7 @@ private[rest] class StandaloneStatusRequestServlet(masterEndpoint: RpcEndpointRe DeployMessages.RequestDriverStatus(submissionId)) val message = response.exception.map { s"Exception from the cluster:\n" + formatException(_) } val d = new SubmissionStatusResponse - d.serverSparkVersion = sparkVersion + d.serverSparkVersion = VersionInfo.getVersion d.submissionId = submissionId d.success = response.found d.driverState = response.state.map(_.toString).orNull @@ -177,7 +177,7 @@ private[rest] class StandaloneSubmitRequestServlet( val response = masterEndpoint.askWithRetry[DeployMessages.SubmitDriverResponse]( DeployMessages.RequestSubmitDriver(driverDescription)) val submitResponse = new CreateSubmissionResponse - submitResponse.serverSparkVersion = sparkVersion + submitResponse.serverSparkVersion = VersionInfo.getVersion submitResponse.message = response.message submitResponse.success = response.success submitResponse.submissionId = response.driverId.orNull diff --git a/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala b/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala index 3b96488a129a9..dca870739f548 100644 --- a/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala +++ b/core/src/main/scala/org/apache/spark/deploy/rest/mesos/MesosRestServer.scala @@ -23,7 +23,7 @@ import java.util.Date import java.util.concurrent.atomic.AtomicLong import javax.servlet.http.HttpServletResponse -import org.apache.spark.{SPARK_VERSION => sparkVersion, SparkConf} +import org.apache.spark.{VersionInfo, SparkConf} import org.apache.spark.deploy.Command import org.apache.spark.deploy.mesos.MesosDriverDescription import org.apache.spark.deploy.rest._ @@ -124,7 +124,7 @@ private[mesos] class MesosSubmitRequestServlet( case submitRequest: CreateSubmissionRequest => val driverDescription = buildDriverDescription(submitRequest) val s = scheduler.submitDriver(driverDescription) - s.serverSparkVersion = sparkVersion + s.serverSparkVersion = VersionInfo.getVersion val unknownFields = findUnknownFields(requestMessageJson, requestMessage) if (unknownFields.nonEmpty) { // If there are fields that the server does not know about, warn the client @@ -142,7 +142,7 @@ private[mesos] class MesosKillRequestServlet(scheduler: MesosClusterScheduler, c extends KillRequestServlet { protected override def handleKill(submissionId: String): KillSubmissionResponse = { val k = scheduler.killDriver(submissionId) - k.serverSparkVersion = sparkVersion + k.serverSparkVersion = VersionInfo.getVersion k } } @@ -151,7 +151,7 @@ private[mesos] class MesosStatusRequestServlet(scheduler: MesosClusterScheduler, extends StatusRequestServlet { protected override def handleStatus(submissionId: String): SubmissionStatusResponse = { val d = scheduler.getDriverStatus(submissionId) - d.serverSparkVersion = sparkVersion + d.serverSparkVersion = VersionInfo.getVersion d } } diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 1b7637a39ca7c..19d41a161ea7e 100755 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -180,7 +180,7 @@ private[deploy] class Worker( assert(!registered) logInfo("Starting Spark worker %s:%d with %d cores, %s RAM".format( host, port, cores, Utils.megabytesToString(memory))) - logInfo(s"Running Spark version ${org.apache.spark.SPARK_VERSION}") + logInfo(s"Running Spark version ${org.apache.spark.VersionInfo.getVersion}") logInfo("Spark home: " + sparkHome) createWorkDir() shuffleService.startIfEnabled() diff --git a/core/src/main/scala/org/apache/spark/launcher/LauncherBackend.scala b/core/src/main/scala/org/apache/spark/launcher/LauncherBackend.scala index a5d41a1eeb479..256f7cacd36ff 100644 --- a/core/src/main/scala/org/apache/spark/launcher/LauncherBackend.scala +++ b/core/src/main/scala/org/apache/spark/launcher/LauncherBackend.scala @@ -19,7 +19,7 @@ package org.apache.spark.launcher import java.net.{InetAddress, Socket} -import org.apache.spark.SPARK_VERSION +import org.apache.spark.VersionInfo import org.apache.spark.launcher.LauncherProtocol._ import org.apache.spark.util.{ThreadUtils, Utils} @@ -42,7 +42,7 @@ private[spark] abstract class LauncherBackend { if (port != None && secret != None) { val s = new Socket(InetAddress.getLoopbackAddress(), port.get) connection = new BackendConnection(s) - connection.send(new Hello(secret.get, SPARK_VERSION)) + connection.send(new Hello(secret.get, VersionInfo.getVersion)) clientThread = LauncherBackend.threadFactory.newThread(connection) clientThread.start() _isConnected = true diff --git a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala index a7d06391176d2..467fecaca11d3 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala @@ -30,7 +30,7 @@ import org.apache.hadoop.fs.permission.FsPermission import org.json4s.JsonAST.JValue import org.json4s.jackson.JsonMethods._ -import org.apache.spark.{SPARK_VERSION, SparkConf} +import org.apache.spark.{VersionInfo, SparkConf} import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.internal.Logging import org.apache.spark.io.CompressionCodec @@ -253,7 +253,7 @@ private[spark] object EventLoggingListener extends Logging { * @param logStream Raw output stream to the event log file. */ def initEventLog(logStream: OutputStream): Unit = { - val metadata = SparkListenerLogStart(SPARK_VERSION) + val metadata = SparkListenerLogStart(VersionInfo.getVersion) val metadataJson = compact(JsonProtocol.logStartToJson(metadata)) + "\n" logStream.write(metadataJson.getBytes(StandardCharsets.UTF_8)) } diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/VersionResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/VersionResource.scala index 673da1ce36b57..a97c69cc0bcdb 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/VersionResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/VersionResource.scala @@ -24,7 +24,7 @@ private[v1] class VersionResource(ui: UIRoot) { @GET def getVersionInfo(): VersionInfo = new VersionInfo( - org.apache.spark.SPARK_VERSION + org.apache.spark.VersionInfo.getVersion ) } diff --git a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala index 28d277df4ae12..6e0e60946fb77 100644 --- a/core/src/main/scala/org/apache/spark/ui/UIUtils.scala +++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala @@ -222,7 +222,7 @@ private[spark] object UIUtils extends Logging {
- {org.apache.spark.SPARK_VERSION} + {org.apache.spark.VersionInfo.getVersion}