Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor statusapiv1 to trait and implement for ease of creation of these objects when we implement our own parser #248

Merged
merged 9 commits into from
May 23, 2017
17 changes: 9 additions & 8 deletions app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.linkedin.drelephant.spark.data.{SparkApplicationData, SparkLogDerivedData, SparkRestDerivedData}
import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, ExecutorSummary, JobData, StageData}
import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfoImpl, ExecutorSummaryImpl, JobDataImpl, StageDataImpl}
import com.linkedin.drelephant.util.SparkUtils
import javax.ws.rs.client.{Client, ClientBuilder, WebTarget}
import javax.ws.rs.core.MediaType
Expand Down Expand Up @@ -125,9 +126,9 @@ class SparkRestClient(sparkConf: SparkConf) {
(applicationInfo, attemptTarget)
}

private def getApplicationInfo(appTarget: WebTarget): ApplicationInfo = {
private def getApplicationInfo(appTarget: WebTarget): ApplicationInfoImpl = {
try {
get(appTarget, SparkRestObjectMapper.readValue[ApplicationInfo])
get(appTarget, SparkRestObjectMapper.readValue[ApplicationInfoImpl])
} catch {
case NonFatal(e) => {
logger.error(s"error reading applicationInfo ${appTarget.getUri}", e)
Expand Down Expand Up @@ -177,10 +178,10 @@ class SparkRestClient(sparkConf: SparkConf) {
}
}

private def getJobDatas(attemptTarget: WebTarget): Seq[JobData] = {
private def getJobDatas(attemptTarget: WebTarget): Seq[JobDataImpl] = {
val target = attemptTarget.path("jobs")
try {
get(target, SparkRestObjectMapper.readValue[Seq[JobData]])
get(target, SparkRestObjectMapper.readValue[Seq[JobDataImpl]])
} catch {
case NonFatal(e) => {
logger.error(s"error reading jobData ${target.getUri}", e)
Expand All @@ -189,10 +190,10 @@ class SparkRestClient(sparkConf: SparkConf) {
}
}

private def getStageDatas(attemptTarget: WebTarget): Seq[StageData] = {
private def getStageDatas(attemptTarget: WebTarget): Seq[StageDataImpl] = {
val target = attemptTarget.path("stages")
try {
get(target, SparkRestObjectMapper.readValue[Seq[StageData]])
get(target, SparkRestObjectMapper.readValue[Seq[StageDataImpl]])
} catch {
case NonFatal(e) => {
logger.error(s"error reading stageData ${target.getUri}", e)
Expand All @@ -201,10 +202,10 @@ class SparkRestClient(sparkConf: SparkConf) {
}
}

private def getExecutorSummaries(attemptTarget: WebTarget): Seq[ExecutorSummary] = {
private def getExecutorSummaries(attemptTarget: WebTarget): Seq[ExecutorSummaryImpl] = {
val target = attemptTarget.path("executors")
try {
get(target, SparkRestObjectMapper.readValue[Seq[ExecutorSummary]])
get(target, SparkRestObjectMapper.readValue[Seq[ExecutorSummaryImpl]])
} catch {
case NonFatal(e) => {
logger.error(s"error reading executorSummary ${target.getUri}", e)
Expand Down
Loading