Skip to content

Commit

Permalink
Revert "[SPARK-3454] separate json endpoints for data in the UI"
Browse files Browse the repository at this point in the history
This reverts commit d497358.

The commit broke Spark on Windows.
  • Loading branch information
rxin committed May 6, 2015
1 parent 1fd31ba commit 51b3d41
Show file tree
Hide file tree
Showing 100 changed files with 172 additions and 19,946 deletions.
7 changes: 0 additions & 7 deletions .rat-excludes
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,5 @@ logs
.*scalastyle-output.xml
.*dependency-reduced-pom.xml
known_translations
json_expectation
local-1422981759269/*
local-1422981780767/*
local-1425081759269/*
local-1426533911241/*
local-1426633911242/*
local-1427397477963/*
DESCRIPTION
NAMESPACE
8 changes: 0 additions & 8 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -228,14 +228,6 @@
<artifactId>json4s-jackson_${scala.binary.version}</artifactId>
<version>3.2.10</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.mesos</groupId>
<artifactId>mesos</artifactId>
Expand Down
8 changes: 1 addition & 7 deletions core/src/main/java/org/apache/spark/JobExecutionStatus.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,9 @@

package org.apache.spark;

import org.apache.spark.util.EnumUtil;

public enum JobExecutionStatus {
RUNNING,
SUCCEEDED,
FAILED,
UNKNOWN;

public static JobExecutionStatus fromString(String str) {
return EnumUtil.parseIgnoreCase(JobExecutionStatus.class, str);
}
UNKNOWN
}

This file was deleted.

31 changes: 0 additions & 31 deletions core/src/main/java/org/apache/spark/status/api/v1/StageStatus.java

This file was deleted.

48 changes: 0 additions & 48 deletions core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java

This file was deleted.

38 changes: 0 additions & 38 deletions core/src/main/java/org/apache/spark/util/EnumUtil.java

This file was deleted.

2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
_ui =
if (conf.getBoolean("spark.ui.enabled", true)) {
Some(SparkUI.createLiveUI(this, _conf, listenerBus, _jobProgressListener,
_env.securityManager,appName, startTime = startTime))
_env.securityManager,appName))
} else {
// For tests, do not enable the UI
None
Expand Down
41 changes: 0 additions & 41 deletions core/src/main/scala/org/apache/spark/annotation/Private.java

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@ package org.apache.spark.deploy.history

import org.apache.spark.ui.SparkUI

private[spark] case class ApplicationAttemptInfo(
private[history] case class ApplicationAttemptInfo(
attemptId: Option[String],
startTime: Long,
endTime: Long,
lastUpdated: Long,
sparkUser: String,
completed: Boolean = false)

private[spark] case class ApplicationHistoryInfo(
private[history] case class ApplicationHistoryInfo(
id: String,
name: String,
attempts: List[ApplicationAttemptInfo])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,21 +17,23 @@

package org.apache.spark.deploy.history

import java.io.{BufferedInputStream, FileNotFoundException, IOException, InputStream}
import java.io.{IOException, BufferedInputStream, FileNotFoundException, InputStream}
import java.util.concurrent.{ExecutorService, Executors, TimeUnit}

import scala.collection.mutable
import scala.concurrent.duration.Duration

import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder}
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.hadoop.fs.permission.AccessControlException
import com.google.common.util.concurrent.ThreadFactoryBuilder

import org.apache.spark.{Logging, SecurityManager, SparkConf}
import com.google.common.util.concurrent.MoreExecutors
import org.apache.hadoop.fs.permission.AccessControlException
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec
import org.apache.spark.scheduler._
import org.apache.spark.ui.SparkUI
import org.apache.spark.util.{Clock, SystemClock, ThreadUtils, Utils}
import org.apache.spark.{Logging, SecurityManager, SparkConf}

/**
* A class that provides application history from event logs stored in the file system.
Expand Down Expand Up @@ -149,7 +151,7 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
val conf = this.conf.clone()
val appSecManager = new SecurityManager(conf)
SparkUI.createHistoryUI(conf, replayBus, appSecManager, appId,
HistoryServer.getAttemptURI(appId, attempt.attemptId), attempt.startTime)
HistoryServer.getAttemptURI(appId, attempt.attemptId))
// Do not call ui.bind() to avoid creating a new server for each application
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}

import org.apache.spark.{Logging, SecurityManager, SparkConf}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.status.api.v1.{ApplicationInfo, ApplicationsListResource, JsonRootResource, UIRoot}
import org.apache.spark.ui.{SparkUI, UIUtils, WebUI}
import org.apache.spark.ui.JettyUtils._
import org.apache.spark.util.{SignalLogger, Utils}
Expand All @@ -46,7 +45,7 @@ class HistoryServer(
provider: ApplicationHistoryProvider,
securityManager: SecurityManager,
port: Int)
extends WebUI(securityManager, port, conf) with Logging with UIRoot {
extends WebUI(securityManager, port, conf) with Logging {

// How many applications to retain
private val retainedApplications = conf.getInt("spark.history.retainedApplications", 50)
Expand All @@ -57,7 +56,7 @@ class HistoryServer(
require(parts.length == 1 || parts.length == 2, s"Invalid app key $key")
val ui = provider
.getAppUI(parts(0), if (parts.length > 1) Some(parts(1)) else None)
.getOrElse(throw new NoSuchElementException(s"no app with key $key"))
.getOrElse(throw new NoSuchElementException())
attachSparkUI(ui)
ui
}
Expand Down Expand Up @@ -114,10 +113,6 @@ class HistoryServer(
}
}

def getSparkUI(appKey: String): Option[SparkUI] = {
Option(appCache.get(appKey))
}

initialize()

/**
Expand All @@ -128,9 +123,6 @@ class HistoryServer(
*/
def initialize() {
attachPage(new HistoryPage(this))

attachHandler(JsonRootResource.getJsonServlet(this))

attachHandler(createStaticHandler(SparkUI.STATIC_RESOURCE_DIR, "/static"))

val contextHandler = new ServletContextHandler
Expand Down Expand Up @@ -168,13 +160,7 @@ class HistoryServer(
*
* @return List of all known applications.
*/
def getApplicationList(): Iterable[ApplicationHistoryInfo] = {
provider.getListing()
}

def getApplicationInfoList: Iterator[ApplicationInfo] = {
getApplicationList().iterator.map(ApplicationsListResource.appHistoryInfoToPublicAppInfo)
}
def getApplicationList(): Iterable[ApplicationHistoryInfo] = provider.getListing()

/**
* Returns the provider configuration to show in the listing page.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.deploy.ApplicationDescription
import org.apache.spark.util.Utils

private[spark] class ApplicationInfo(
private[deploy] class ApplicationInfo(
val startTime: Long,
val id: String,
val desc: ApplicationDescription,
Expand Down
Loading

0 comments on commit 51b3d41

Please sign in to comment.