From 7c9641c0b277933a8eda58e30225a0548382476d Mon Sep 17 00:00:00 2001 From: Thibault Jeandet Date: Fri, 30 Jun 2017 14:44:49 -0400 Subject: [PATCH 01/41] Update cromwell version from 28 to 29 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index 251a4c7d6..3e4ee2bf0 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -4,7 +4,7 @@ import sbt._ object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "28" + val cromwellVersion = "29" // Adapted from SbtGit.versionWithGit def cromwellVersionWithGit: Seq[Setting[_]] = From 635733ca482f4a19dbad3475b35e9fad0c3b1f41 Mon Sep 17 00:00:00 2001 From: Thib Date: Fri, 30 Jun 2017 17:16:48 -0400 Subject: [PATCH 02/41] update release WDL (#2399) --- release/release_workflow.wdl | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/release/release_workflow.wdl b/release/release_workflow.wdl index c56f1a1d1..5bd0b004e 100644 --- a/release/release_workflow.wdl +++ b/release/release_workflow.wdl @@ -22,7 +22,7 @@ task do_release { set -x # Clone repo and checkout develop - git clone https://github.com/${organization}/${repo}.git + git clone git@github.com:${organization}/${repo}.git cd ${repo} git checkout develop git pull --rebase @@ -33,10 +33,6 @@ task do_release { echo "Updating dependencies" ${sep='\n' dependencyCommands} - # Make sure tests pass - sbt update - JAVA_OPTS=-XX:MaxMetaspaceSize=512m sbt test - git add . # If there is nothing to commit, git commit will return 1 which will fail the script. # This ensures we only commit if build.sbt was effectively updated @@ -48,7 +44,7 @@ task do_release { # Generate new scaladoc sbt 'set scalacOptions in (Compile, doc) := List("-skip-packages", "better")' doc git checkout gh-pages - mv target/scala-2.11/api ${releaseV} + mv target/scala-2.12/api ${releaseV} git add ${releaseV} # Update latest pointer @@ -74,15 +70,11 @@ task do_release { # Merge develop into master git checkout master git pull --rebase - git merge develop + git merge develop --no-edit - # Pin centaur for cromwell - if [ ${repo} == "cromwell" ]; then - centaurDevelopHEAD=$(git ls-remote git://github.com/${organization}/centaur.git | grep refs/heads/develop | cut -f 1) - sed -i '' s/CENTAUR_BRANCH=.*/CENTAUR_BRANCH="$centaurDevelopHEAD"/g .travis.yml - git add .travis.yml - git commit -m "Pin release to centaur branch" - fi + # Make sure tests pass + sbt update + JAVA_OPTS=-XX:MaxMetaspaceSize=1024m sbt test # Tag the release git tag ${releaseV} @@ -93,6 +85,15 @@ task do_release { # Create and push the hotfix branch git checkout -b ${releaseV}_hotfix + + # Pin centaur for cromwell + if [ ${repo} == "cromwell" ]; then + centaurDevelopHEAD=$(git ls-remote git://github.com/${organization}/centaur.git | grep refs/heads/develop | cut -f 1) + sed -i '' s/CENTAUR_BRANCH=.*/CENTAUR_BRANCH="$centaurDevelopHEAD"/g .travis.yml + git add .travis.yml + git commit -m "Pin release to centaur branch" + fi + git push origin ${releaseV}_hotfix # Assemble jar for cromwell From 176de92a4577fbd5653c82f9da4533457ee94a50 Mon Sep 17 00:00:00 2001 From: Jeff Gentry Date: Sat, 1 Jul 2017 13:21:15 -0400 Subject: [PATCH 03/41] Migrate Cromwell from Spray to Akka HTTP. Closes #1243 (#2380) --- CHANGELOG.md | 6 + NOTICE | 4 - .../workflow/SingleWorkflowRunnerActor.scala | 17 +- .../execution/callcaching/CallCacheDiffActor.scala | 43 +- .../workflowstore/WorkflowStoreEngineActor.scala | 5 +- .../scala/cromwell/server/CromwellRootActor.scala | 4 +- .../scala/cromwell/server/CromwellServer.scala | 89 +-- .../cromwell/webservice/CromwellApiHandler.scala | 79 -- .../cromwell/webservice/CromwellApiService.scala | 564 +++++-------- .../cromwell/webservice/LabelsManagerActor.scala | 19 +- .../webservice/PartialWorkflowSources.scala | 142 ++++ .../scala/cromwell/webservice/PerRequest.scala | 120 --- .../cromwell/webservice/SprayCanHttpService.scala | 177 ----- .../scala/cromwell/webservice/SwaggerService.scala | 7 + .../cromwell/webservice/SwaggerUiHttpService.scala | 31 +- .../cromwell/webservice/WorkflowJsonSupport.scala | 6 +- .../scala/cromwell/webservice/WrappedRoute.scala | 8 +- .../webservice/metadata/MetadataBuilderActor.scala | 69 +- .../metadata/WorkflowQueryPagination.scala | 38 +- .../test/scala/cromwell/CromwellTestKitSpec.scala | 31 +- .../callcaching/CallCacheDiffActorSpec.scala | 31 +- .../scala/cromwell/server/CromwellServerSpec.scala | 33 - .../webservice/CromwellApiServiceSpec.scala | 875 +++++++++------------ .../webservice/MetadataBuilderActorSpec.scala | 59 +- .../webservice/PartialWorkflowSourcesSpec.scala | 17 + .../webservice/SprayCanHttpServiceSpec.scala | 252 ------ .../cromwell/webservice/SwaggerServiceSpec.scala | 4 +- .../webservice/SwaggerUiHttpServiceSpec.scala | 36 +- .../cromwell/webservice/WrappedRouteSpec.scala | 22 +- project/Dependencies.scala | 51 +- .../services/metadata/MetadataService.scala | 33 +- .../metadata/impl/MetadataServiceActor.scala | 25 +- .../services/metadata/impl/ReadMetadataActor.scala | 6 +- src/main/resources/application.conf | 3 +- .../backend/impl/spark/SparkClusterProcess.scala | 31 +- .../impl/spark/SparkClusterProcessSpec.scala | 8 +- .../tes/TesAsyncBackendJobExecutionActor.scala | 42 +- 37 files changed, 1064 insertions(+), 1923 deletions(-) delete mode 100644 NOTICE delete mode 100644 engine/src/main/scala/cromwell/webservice/CromwellApiHandler.scala create mode 100644 engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala delete mode 100644 engine/src/main/scala/cromwell/webservice/PerRequest.scala delete mode 100644 engine/src/main/scala/cromwell/webservice/SprayCanHttpService.scala create mode 100644 engine/src/main/scala/cromwell/webservice/SwaggerService.scala delete mode 100644 engine/src/test/scala/cromwell/server/CromwellServerSpec.scala create mode 100644 engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala delete mode 100644 engine/src/test/scala/cromwell/webservice/SprayCanHttpServiceSpec.scala diff --git a/CHANGELOG.md b/CHANGELOG.md index 82540c66b..f613fd567 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Cromwell Change Log +## 29 + +### Breaking Changes + +* Request timeouts for HTTP requests on the REST API now return a 503 status code instead of 500. The response for a request timeout is no longer in JSON format. + ## 28 ### Bug Fixes diff --git a/NOTICE b/NOTICE deleted file mode 100644 index 15d3c2b54..000000000 --- a/NOTICE +++ /dev/null @@ -1,4 +0,0 @@ -cromwell.webservice/PerRequest.scala (https://github.com/NET-A-PORTER/spray-actor-per-request) -is distributed with this software under the Apache License, Version 2.0 (see the LICENSE-ASL file). In accordance -with that license, that software comes with the following notices: -    Copyright (C) 2011-2012 Ian Forsey diff --git a/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala b/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala index cb9e3791a..492793386 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala @@ -20,9 +20,8 @@ import cromwell.server.CromwellRootActor import cromwell.services.metadata.MetadataService.{GetSingleWorkflowMetadataAction, GetStatus, WorkflowOutputs} import cromwell.services.metadata.impl.WriteMetadataActor.{CheckPendingWrites, HasPendingWrites, NoPendingWrites} import cromwell.subworkflowstore.EmptySubWorkflowStoreActor -import cromwell.webservice.PerRequest.RequestComplete import cromwell.webservice.metadata.MetadataBuilderActor -import spray.http.StatusCodes +import cromwell.webservice.metadata.MetadataBuilderActor.{BuiltMetadataResponse, FailedMetadataResponse} import spray.json._ import scala.concurrent.ExecutionContext.Implicits.global @@ -69,18 +68,18 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, metadataO case Event(IssuePollRequest, RunningSwraData(_, id)) => requestStatus(id) stay() - case Event(RequestComplete((StatusCodes.OK, jsObject: JsObject)), RunningSwraData(_, _)) if !jsObject.state.isTerminal => + case Event(BuiltMetadataResponse(jsObject: JsObject), RunningSwraData(_, _)) if !jsObject.state.isTerminal => schedulePollRequest() stay() - case Event(RequestComplete((StatusCodes.OK, jsObject: JsObject)), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowSucceeded => + case Event(BuiltMetadataResponse(jsObject: JsObject), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowSucceeded => log.info(s"$Tag workflow finished with status '$WorkflowSucceeded'.") serviceRegistryActor ! CheckPendingWrites goto(WaitingForFlushedMetadata) using SucceededSwraData(replyTo, id) - case Event(RequestComplete((StatusCodes.OK, jsObject: JsObject)), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowFailed => + case Event(BuiltMetadataResponse(jsObject: JsObject), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowFailed => log.info(s"$Tag workflow finished with status '$WorkflowFailed'.") serviceRegistryActor ! CheckPendingWrites goto(WaitingForFlushedMetadata) using FailedSwraData(replyTo, id, new RuntimeException(s"Workflow $id transitioned to state $WorkflowFailed")) - case Event(RequestComplete((StatusCodes.OK, jsObject: JsObject)), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowAborted => + case Event(BuiltMetadataResponse(jsObject: JsObject), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowAborted => log.info(s"$Tag workflow finished with status '$WorkflowAborted'.") serviceRegistryActor ! CheckPendingWrites goto(WaitingForFlushedMetadata) using AbortedSwraData(replyTo, id) @@ -100,13 +99,13 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, metadataO } when (RequestingOutputs) { - case Event(RequestComplete((StatusCodes.OK, outputs: JsObject)), data: TerminalSwraData) => + case Event(BuiltMetadataResponse(outputs: JsObject), data: TerminalSwraData) => outputOutputs(outputs) requestMetadataOrIssueReply(data) } when (RequestingMetadata) { - case Event(RequestComplete((StatusCodes.OK, metadata: JsObject)), data: TerminalSwraData) => + case Event(BuiltMetadataResponse(metadata: JsObject), data: TerminalSwraData) => outputMetadata(metadata) issueReply(data) } @@ -120,7 +119,7 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, metadataO case Event(r: WorkflowStoreEngineActor.WorkflowAbortFailed, data) => failAndFinish(r.reason, data) case Event(Failure(e), data) => failAndFinish(e, data) case Event(Status.Failure(e), data) => failAndFinish(e, data) - case Event(RequestComplete((_, snap)), data) => failAndFinish(new RuntimeException(s"Unexpected API completion message: $snap"), data) + case Event(FailedMetadataResponse(e), data) => failAndFinish(e, data) case Event((CurrentState(_, _) | Transition(_, _, _)), _) => // ignore uninteresting current state and transition messages stay() diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala index 8b6b677fd..19f02bc54 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala @@ -10,22 +10,23 @@ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffQue import cromwell.services.metadata.CallMetadataKeys.CallCachingKeys import cromwell.services.metadata.MetadataService.{GetMetadataQueryAction, MetadataLookupResponse, MetadataServiceKeyLookupFailed} import cromwell.services.metadata._ -import cromwell.webservice.APIResponse -import cromwell.webservice.PerRequest.RequestComplete -import cromwell.webservice.WorkflowJsonSupport._ import cromwell.webservice.metadata.MetadataComponent._ import cromwell.webservice.metadata._ -import spray.http.StatusCodes -import spray.httpx.SprayJsonSupport._ +import spray.json.JsObject import scala.language.postfixOps import scala.util.{Failure, Success, Try} object CallCacheDiffActor { private val PlaceholderMissingHashValue = MetadataPrimitive(MetadataValue("Error: there is a hash entry for this key but the value is null !")) - private val CallAAndBNotFoundException = new Exception("callA and callB were run on a previous version of Cromwell on which this endpoint was not supported.") - private val CallANotFoundException = new Exception("callA was run on a previous version of Cromwell on which this endpoint was not supported.") - private val CallBNotFoundException = new Exception("callB was run on a previous version of Cromwell on which this endpoint was not supported.") + + final case class CachedCallNotFoundException(message: String) extends Exception { + override def getMessage = message + } + + private val CallAAndBNotFoundException = CachedCallNotFoundException("callA and callB were run on a previous version of Cromwell on which this endpoint was not supported.") + private val CallANotFoundException = CachedCallNotFoundException("callA was run on a previous version of Cromwell on which this endpoint was not supported.") + private val CallBNotFoundException = CachedCallNotFoundException("callB was run on a previous version of Cromwell on which this endpoint was not supported.") sealed trait CallCacheDiffActorState case object Idle extends CallCacheDiffActorState @@ -40,6 +41,11 @@ object CallCacheDiffActor { replyTo: ActorRef ) extends CallCacheDiffActorData + sealed abstract class CallCacheDiffActorResponse + case class BuiltCallCacheDiffResponse(response: JsObject) extends CallCacheDiffActorResponse + case class FailedCallCacheDiffResponse(reason: Throwable) extends CallCacheDiffActorResponse + + def props(serviceRegistryActor: ActorRef) = Props(new CallCacheDiffActor(serviceRegistryActor)) } @@ -71,7 +77,7 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call case Event(response: MetadataLookupResponse, CallCacheDiffWithRequest(queryA, queryB, Some(responseA), None, replyTo)) if queryB == response.query => buildDiffAndRespond(queryA, queryB, responseA, response, replyTo) case Event(MetadataServiceKeyLookupFailed(_, failure), data: CallCacheDiffWithRequest) => - data.replyTo ! RequestComplete((StatusCodes.InternalServerError, APIResponse.error(failure))) + data.replyTo ! FailedCallCacheDiffResponse(failure) context stop self stay() } @@ -104,17 +110,16 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call replyTo: ActorRef) = { val response = diffHashes(responseA.eventList, responseB.eventList) match { - case Success(diff) => + case Success(diff) => val diffObject = MetadataObject(Map( - "callA" -> makeCallInfo(queryA, responseA.eventList), - "callB" -> makeCallInfo(queryB, responseB.eventList), - "hashDifferential" -> diff - )) - - RequestComplete((StatusCodes.OK, metadataComponentJsonWriter.write(diffObject).asJsObject)) - case Failure(f) => RequestComplete((StatusCodes.NotFound, APIResponse.error(f))) + "callA" -> makeCallInfo(queryA, responseA.eventList), + "callB" -> makeCallInfo(queryB, responseB.eventList), + "hashDifferential" -> diff + )) + BuiltCallCacheDiffResponse(metadataComponentJsonWriter.write(diffObject).asJsObject) + case Failure(f) => FailedCallCacheDiffResponse(f) } - + replyTo ! response context stop self @@ -210,7 +215,7 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call } } - + private def diffHashEvents(hashesA: Map[String, Option[MetadataValue]], hashesB: Map[String, Option[MetadataValue]]) = { val hashesUniqueToB: Map[String, Option[MetadataValue]] = hashesB.filterNot({ case (k, _) => hashesA.keySet.contains(k) }) diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala index b5767a926..e2a7ac9a1 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala @@ -146,8 +146,9 @@ object WorkflowStoreEngineActor { sealed trait WorkflowStoreEngineActorResponse case object NoNewWorkflowsToStart extends WorkflowStoreEngineActorResponse final case class NewWorkflowsToStart(workflows: NonEmptyList[WorkflowToStart]) extends WorkflowStoreEngineActorResponse - final case class WorkflowAborted(workflowId: WorkflowId) extends WorkflowStoreEngineActorResponse - final case class WorkflowAbortFailed(workflowId: WorkflowId, reason: Throwable) extends WorkflowStoreEngineActorResponse + sealed abstract class WorkflowStoreEngineAbortResponse extends WorkflowStoreEngineActorResponse + final case class WorkflowAborted(workflowId: WorkflowId) extends WorkflowStoreEngineAbortResponse + final case class WorkflowAbortFailed(workflowId: WorkflowId, reason: Throwable) extends WorkflowStoreEngineAbortResponse final case class WorkflowStoreActorCommandWithSender(command: WorkflowStoreActorEngineCommand, sender: ActorRef) diff --git a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala index ae7d799cf..3520ce029 100644 --- a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala +++ b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala @@ -21,7 +21,7 @@ import cromwell.engine.backend.{BackendSingletonCollection, CromwellBackends} import cromwell.engine.io.IoActor import cromwell.engine.workflow.WorkflowManagerActor import cromwell.engine.workflow.lifecycle.CopyWorkflowLogsActor -import cromwell.engine.workflow.lifecycle.execution.callcaching.{CallCache, CallCacheDiffActor, CallCacheReadActor, CallCacheWriteActor} +import cromwell.engine.workflow.lifecycle.execution.callcaching.{CallCache, CallCacheReadActor, CallCacheWriteActor} import cromwell.engine.workflow.tokens.JobExecutionTokenDispenserActor import cromwell.engine.workflow.workflowstore.{SqlWorkflowStore, WorkflowStore, WorkflowStoreActor} import cromwell.jobstore.{JobStore, JobStoreActor, SqlJobStore} @@ -107,8 +107,6 @@ import scala.language.postfixOps lazy val backendSingletonCollection = BackendSingletonCollection(backendSingletons) lazy val jobExecutionTokenDispenserActor = context.actorOf(JobExecutionTokenDispenserActor.props) - - lazy val callCacheDiffActorProps = CallCacheDiffActor.props(serviceRegistryActor) def abortJobsOnTerminate: Boolean diff --git a/engine/src/main/scala/cromwell/server/CromwellServer.scala b/engine/src/main/scala/cromwell/server/CromwellServer.scala index 559ba365b..a182f755d 100644 --- a/engine/src/main/scala/cromwell/server/CromwellServer.scala +++ b/engine/src/main/scala/cromwell/server/CromwellServer.scala @@ -1,75 +1,64 @@ package cromwell.server -import java.util.concurrent.TimeoutException +import akka.actor.{ActorContext, Props} +import akka.http.scaladsl.Http +import akka.http.scaladsl.server.Route +import akka.http.scaladsl.server.Directives._ -import akka.actor.{ActorContext, ActorSystem, Props} import akka.stream.ActorMaterializer -import com.typesafe.config.Config import cromwell.core.Dispatcher.EngineDispatcher -import cromwell.webservice.WorkflowJsonSupport._ -import cromwell.webservice.{APIResponse, CromwellApiService, SwaggerService} -import cromwell.webservice.SprayCanHttpService._ +import cromwell.webservice.{CromwellApiService, SwaggerService} import cromwell.webservice.WrappedRoute._ import net.ceedubs.ficus.Ficus._ -import spray.http._ -import spray.json._ -import spray.routing.Route import scala.concurrent.duration._ -import scala.concurrent.{Await, ExecutionContextExecutor, Future} +import scala.concurrent.{Await, Future} import scala.util.{Failure, Success} // Note that as per the language specification, this is instantiated lazily and only used when necessary (i.e. server mode) object CromwellServer { - def run(cromwellSystem: CromwellSystem): Future[Any] = { - implicit val executionContext = scala.concurrent.ExecutionContext.Implicits.global - - val actorSystem: ActorSystem = cromwellSystem.actorSystem - implicit val materializer: ActorMaterializer = cromwellSystem.materializer - - val service = actorSystem.actorOf(CromwellServerActor.props(cromwellSystem.conf), "cromwell-service") - val webserviceConf = cromwellSystem.conf.getConfig("webservice") - - val interface = webserviceConf.getString("interface") - val port = webserviceConf.getInt("port") - val timeout = webserviceConf.as[FiniteDuration]("binding-timeout") - val futureBind = service.bind(interface, port)(implicitly, timeout, actorSystem, implicitly) - futureBind andThen { - case Success(_) => - actorSystem.log.info("Cromwell service started...") - Await.result(actorSystem.whenTerminated, Duration.Inf) - case Failure(throwable) => - /* - TODO: - If/when CromwellServer behaves like a better async citizen, we may be less paranoid about our async log messages - not appearing due to the actor system shutdown. For now, synchronously print to the stderr so that the user has - some idea of why the server failed to start up. - */ - Console.err.println(s"Binding failed interface $interface port $port") - throwable.printStackTrace(Console.err) - cromwellSystem.shutdownActorSystem() + implicit val actorSystem = cromwellSystem.actorSystem + implicit val materializer = cromwellSystem.materializer + implicit val ec = actorSystem.dispatcher + actorSystem.actorOf(CromwellServerActor.props(cromwellSystem), "cromwell-service") + Future { + Await.result(actorSystem.whenTerminated, Duration.Inf) } } } -class CromwellServerActor(config: Config)(implicit materializer: ActorMaterializer) extends CromwellRootActor with CromwellApiService with SwaggerService { - implicit def executionContext: ExecutionContextExecutor = actorRefFactory.dispatcher +class CromwellServerActor(cromwellSystem: CromwellSystem)(override implicit val materializer: ActorMaterializer) + extends CromwellRootActor + with CromwellApiService + with SwaggerService { + implicit val actorSystem = context.system + override implicit val ec = context.dispatcher + override def actorRefFactory: ActorContext = context override val serverMode = true override val abortJobsOnTerminate = false - override def actorRefFactory: ActorContext = context - override def receive: PartialFunction[Any, Unit] = handleTimeouts orElse runRoute(possibleRoutes) + val webserviceConf = cromwellSystem.conf.getConfig("webservice") + val interface = webserviceConf.getString("interface") + val port = webserviceConf.getInt("port") - val routeUnwrapped: Boolean = config.as[Option[Boolean]]("api.routeUnwrapped").getOrElse(false) - val possibleRoutes: Route = workflowRoutes.wrapped("api", routeUnwrapped) ~ swaggerUiResourceRoute - val timeoutError: String = APIResponse.error(new TimeoutException( - "The server was not able to produce a timely response to your request.")).toJson.prettyPrint + val routeUnwrapped: Boolean = cromwellSystem.conf.as[Option[Boolean]]("api.routeUnwrapped").getOrElse(false) - def handleTimeouts: Receive = { - case Timedout(_: HttpRequest) => - sender() ! HttpResponse(StatusCodes.InternalServerError, HttpEntity(ContentType(MediaTypes.`application/json`), timeoutError)) + val allRoutes: Route = routes.wrapped("api", routeUnwrapped) ~ swaggerUiResourceRoute + + Http().bindAndHandle(allRoutes, interface, port) onComplete { + case Success(_) => actorSystem.log.info("Cromwell service started...") + case Failure(e) => + /* + TODO: + If/when CromwellServer behaves like a better async citizen, we may be less paranoid about our async log messages + not appearing due to the actor system shutdown. For now, synchronously print to the stderr so that the user has + some idea of why the server failed to start up. + */ + Console.err.println(s"Binding failed interface $interface port $port") + e.printStackTrace(Console.err) + cromwellSystem.shutdownActorSystem() } /* @@ -80,7 +69,7 @@ class CromwellServerActor(config: Config)(implicit materializer: ActorMaterializ } object CromwellServerActor { - def props(config: Config)(implicit materializer: ActorMaterializer): Props = { - Props(new CromwellServerActor(config)).withDispatcher(EngineDispatcher) + def props(cromwellSystem: CromwellSystem)(implicit materializer: ActorMaterializer): Props = { + Props(new CromwellServerActor(cromwellSystem)).withDispatcher(EngineDispatcher) } } diff --git a/engine/src/main/scala/cromwell/webservice/CromwellApiHandler.scala b/engine/src/main/scala/cromwell/webservice/CromwellApiHandler.scala deleted file mode 100644 index 984448ba0..000000000 --- a/engine/src/main/scala/cromwell/webservice/CromwellApiHandler.scala +++ /dev/null @@ -1,79 +0,0 @@ -package cromwell.webservice - -import akka.actor.{Actor, ActorRef, Props} -import akka.event.Logging -import cats.data.NonEmptyList -import com.typesafe.config.ConfigFactory -import cromwell.core._ -import cromwell.core.Dispatcher.ApiDispatcher -import cromwell.engine.workflow.WorkflowManagerActor -import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException -import cromwell.engine.workflow.workflowstore.{WorkflowStoreActor, WorkflowStoreEngineActor, WorkflowStoreSubmitActor} -import cromwell.webservice.PerRequest.RequestComplete -import cromwell.webservice.metadata.WorkflowQueryPagination -import spray.http.StatusCodes -import spray.httpx.SprayJsonSupport._ - - -object CromwellApiHandler { - def props(requestHandlerActor: ActorRef): Props = { - Props(new CromwellApiHandler(requestHandlerActor)).withDispatcher(ApiDispatcher) - } - - sealed trait ApiHandlerMessage - - final case class ApiHandlerWorkflowSubmit(source: WorkflowSourceFilesCollection) extends ApiHandlerMessage - final case class ApiHandlerWorkflowSubmitBatch(sources: NonEmptyList[WorkflowSourceFilesCollection]) extends ApiHandlerMessage - final case class ApiHandlerWorkflowAbort(id: WorkflowId, manager: ActorRef) extends ApiHandlerMessage - case object ApiHandlerEngineStats extends ApiHandlerMessage -} - -class CromwellApiHandler(requestHandlerActor: ActorRef) extends Actor with WorkflowQueryPagination { - import CromwellApiHandler._ - import WorkflowJsonSupport._ - - val log = Logging(context.system, classOf[CromwellApiHandler]) - val conf = ConfigFactory.load() - - def callNotFound(callFqn: String, id: WorkflowId) = { - RequestComplete((StatusCodes.NotFound, APIResponse.error( - new RuntimeException(s"Call $callFqn not found for workflow '$id'.")))) - } - - private def error(t: Throwable)(f: Throwable => RequestComplete[_]): Unit = context.parent ! f(t) - - override def receive = { - case ApiHandlerEngineStats => requestHandlerActor ! WorkflowManagerActor.EngineStatsCommand - case stats: EngineStatsActor.EngineStats => context.parent ! RequestComplete((StatusCodes.OK, stats)) - case ApiHandlerWorkflowAbort(id, manager) => requestHandlerActor ! WorkflowStoreActor.AbortWorkflow(id, manager) - case WorkflowStoreEngineActor.WorkflowAborted(id) => - context.parent ! RequestComplete((StatusCodes.OK, WorkflowAbortResponse(id.toString, WorkflowAborted.toString))) - case WorkflowStoreEngineActor.WorkflowAbortFailed(_, e) => - error(e) { - case _: IllegalStateException => RequestComplete((StatusCodes.Forbidden, APIResponse.error(e))) - case _: WorkflowNotFoundException => RequestComplete((StatusCodes.NotFound, APIResponse.error(e))) - case _ => RequestComplete((StatusCodes.InternalServerError, APIResponse.error(e))) - } - - case ApiHandlerWorkflowSubmit(source) => requestHandlerActor ! WorkflowStoreActor.SubmitWorkflow(source) - - case WorkflowStoreSubmitActor.WorkflowSubmittedToStore(id) => - context.parent ! RequestComplete((StatusCodes.Created, WorkflowSubmitResponse(id.toString, WorkflowSubmitted.toString))) - - case ApiHandlerWorkflowSubmitBatch(sources) => requestHandlerActor ! - WorkflowStoreActor.BatchSubmitWorkflows(sources.map(w => - WorkflowSourceFilesCollection( - workflowSource = w.workflowSource, - workflowType = w.workflowType, - workflowTypeVersion = w.workflowTypeVersion, - inputsJson = w.inputsJson, - workflowOptionsJson = w.workflowOptionsJson, - labelsJson = w.labelsJson, - importsFile = w.importsZipFileOption))) - - - case WorkflowStoreSubmitActor.WorkflowsBatchSubmittedToStore(ids) => - val responses = ids map { id => WorkflowSubmitResponse(id.toString, WorkflowSubmitted.toString) } - context.parent ! RequestComplete((StatusCodes.OK, responses.toList)) - } -} diff --git a/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala b/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala index 53686b575..f59ee64d3 100644 --- a/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala +++ b/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala @@ -1,414 +1,270 @@ package cromwell.webservice -import akka.actor._ +import java.util.UUID + +import akka.actor.{ActorRef, ActorRefFactory} +import akka.http.scaladsl.server.Directives._ + +import scala.concurrent.{ExecutionContext, Future} +import akka.http.scaladsl.model._ +import akka.http.scaladsl.model.Multipart.BodyPart +import akka.stream.ActorMaterializer +import cromwell.engine.backend.BackendConfiguration +import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ +import cromwell.core.{WorkflowAborted, WorkflowId, WorkflowSubmitted} +import cromwell.core.Dispatcher.ApiDispatcher +import cromwell.engine.workflow.workflowstore.{WorkflowStoreActor, WorkflowStoreEngineActor, WorkflowStoreSubmitActor} +import akka.pattern.ask +import akka.util.{ByteString, Timeout} +import net.ceedubs.ficus.Ficus._ +import cromwell.engine.workflow.WorkflowManagerActor +import cromwell.services.metadata.MetadataService._ +import cromwell.webservice.metadata.{MetadataBuilderActor, WorkflowQueryPagination} +import cromwell.webservice.metadata.MetadataBuilderActor.{BuiltMetadataResponse, FailedMetadataResponse, MetadataBuilderActorResponse} +import WorkflowJsonSupport._ +import akka.http.scaladsl.coding.{Deflate, Gzip, NoCoding} +import akka.http.scaladsl.server.Route import cats.data.NonEmptyList import cats.data.Validated.{Invalid, Valid} -import cats.syntax.cartesian._ -import cats.syntax.validated._ -import com.typesafe.config.{Config, ConfigFactory} -import cromwell.core._ +import com.typesafe.config.ConfigFactory import cromwell.core.labels.Labels -import cromwell.engine.backend.BackendConfiguration -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffQueryParameter -import cromwell.services.metadata.MetadataService._ -import cromwell.webservice.LabelsManagerActor.{LabelsAddition, LabelsData} -import cromwell.webservice.WorkflowJsonSupport._ -import cromwell.webservice.metadata.MetadataBuilderActor +import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor.{BuiltCallCacheDiffResponse, CachedCallNotFoundException, CallCacheDiffActorResponse, FailedCallCacheDiffResponse} +import cromwell.engine.workflow.lifecycle.execution.callcaching.{CallCacheDiffActor, CallCacheDiffQueryParameter} +import cromwell.engine.workflow.workflowstore.WorkflowStoreEngineActor.WorkflowStoreEngineAbortResponse +import cromwell.webservice.LabelsManagerActor._ import lenthall.exception.AggregatedMessageException -import lenthall.validation.ErrorOr.ErrorOr -import org.slf4j.LoggerFactory -import spray.http.MediaTypes._ -import spray.http._ -import spray.httpx.SprayJsonSupport._ -import spray.json._ -import spray.routing._ -import wdl4s.{WdlJson, WdlSource} +import spray.json.JsObject +import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} -trait SwaggerService extends SwaggerUiResourceHttpService { - override def swaggerServiceName = "cromwell" - - override def swaggerUiVersion = "2.1.1" -} - -trait CromwellApiService extends HttpService with PerRequestCreator { - def workflowManagerActor: ActorRef - def workflowStoreActor: ActorRef - def serviceRegistryActor: ActorRef - def callCacheDiffActorProps: Props +trait CromwellApiService { + import cromwell.webservice.CromwellApiService._ - def toMap(someInput: Option[String]): Map[String, JsValue] = { - import spray.json._ - someInput match { - case Some(inputs: String) => inputs.parseJson match { - case JsObject(inputMap) => inputMap - case _ => - throw new RuntimeException(s"Submitted inputs couldn't be processed, please check for syntactical errors") - } - case None => Map.empty - } - } - - def mergeMaps(allInputs: Seq[Option[String]]): JsObject = { - val convertToMap = allInputs.map(x => toMap(x)) - JsObject(convertToMap reduce (_ ++ _)) - } - - def metadataBuilderProps: Props = MetadataBuilderActor.props(serviceRegistryActor) - - def labelsManagerActorProps: Props = LabelsManagerActor.props(serviceRegistryActor) - - def handleMetadataRequest(message: AnyRef): Route = { - requestContext => - perRequest(requestContext, metadataBuilderProps, message) - } + implicit def actorRefFactory: ActorRefFactory + implicit val materializer: ActorMaterializer + implicit val ec: ExecutionContext - def handleQueryMetadataRequest(parameters: Seq[(String, String)]): Route = { - requestContext => - perRequest(requestContext, metadataBuilderProps, WorkflowQuery(requestContext.request.uri, parameters)) - } - - def handleCallCachingDiffRequest(parameters: Seq[(String, String)]): Route = { - CallCacheDiffQueryParameter.fromParameters(parameters) match { - case Valid(queryParameter) => requestContext => { - perRequest(requestContext, callCacheDiffActorProps, queryParameter) - } - case Invalid(errors) => failBadRequest(AggregatedMessageException("Wrong parameters for call cache diff query", errors.toList)) - } - } - - protected def failBadRequest(t: Throwable, statusCode: StatusCode = StatusCodes.BadRequest) = respondWithMediaType(`application/json`) { - complete((statusCode, APIResponse.fail(t).toJson.prettyPrint)) - } - - val workflowRoutes = queryRoute ~ queryPostRoute ~ workflowOutputsRoute ~ submitRoute ~ submitBatchRoute ~ callCachingDiffRoute ~ - workflowLogsRoute ~ abortRoute ~ metadataRoute ~ timingRoute ~ statusRoute ~ backendRoute ~ statsRoute ~ versionRoute ~ patchLabelsRoute - - protected def withRecognizedWorkflowId(possibleWorkflowId: String)(recognizedWorkflowId: WorkflowId => Route): Route = { - def callback(requestContext: RequestContext) = new ValidationCallback { - // The submitted value is malformed as a UUID and therefore not possibly recognized. - override def onMalformed(possibleWorkflowId: String): Unit = { - val exception = new RuntimeException(s"Invalid workflow ID: '$possibleWorkflowId'.") - failBadRequest(exception)(requestContext) - } - - override def onUnrecognized(possibleWorkflowId: String): Unit = { - val exception = new RuntimeException(s"Unrecognized workflow ID: $possibleWorkflowId") - failBadRequest(exception, StatusCodes.NotFound)(requestContext) - } - - override def onFailure(possibleWorkflowId: String, throwable: Throwable): Unit = { - val exception = new RuntimeException(s"Failed lookup attempt for workflow ID $possibleWorkflowId", throwable) - failBadRequest(exception)(requestContext) - } - - override def onRecognized(workflowId: WorkflowId): Unit = { - recognizedWorkflowId(workflowId)(requestContext) - } - } + val workflowStoreActor: ActorRef + val workflowManagerActor: ActorRef + val serviceRegistryActor: ActorRef - requestContext => { - val message = ValidateWorkflowIdAndExecute(possibleWorkflowId, callback(requestContext)) - serviceRegistryActor ! message - } - } + // Derive timeouts (implicit and not) from akka http's request timeout since there's no point in being higher than that + implicit val duration = ConfigFactory.load().as[FiniteDuration]("akka.http.server.request-timeout") + implicit val timeout: Timeout = duration - def statusRoute = - path("workflows" / Segment / Segment / "status") { (version, possibleWorkflowId) => + val routes = + path("workflows" / Segment / "backends") { version => + get { complete(backendResponse) } + } ~ + path("engine" / Segment / "stats") { version => get { - withRecognizedWorkflowId(possibleWorkflowId) { id => - handleMetadataRequest(GetStatus(id)) + onComplete(workflowManagerActor.ask(WorkflowManagerActor.EngineStatsCommand).mapTo[EngineStatsActor.EngineStats]) { + case Success(stats) => complete(stats) + case Failure(_) => new RuntimeException("Unable to gather engine stats").failRequest(StatusCodes.InternalServerError) } } - } - - def queryRoute = + } ~ + path("engine" / Segment / "version") { version => + get { complete(versionResponse) } + } ~ + path("workflows" / Segment / Segment / "status") { (version, possibleWorkflowId) => + get { metadataBuilderRequest(possibleWorkflowId, (w: WorkflowId) => GetStatus(w)) } + } ~ + path("workflows" / Segment / Segment / "outputs") { (version, possibleWorkflowId) => + get { metadataBuilderRequest(possibleWorkflowId, (w: WorkflowId) => WorkflowOutputs(w)) } + } ~ + path("workflows" / Segment / Segment / "logs") { (version, possibleWorkflowId) => + get { metadataBuilderRequest(possibleWorkflowId, (w: WorkflowId) => GetLogs(w)) } + } ~ path("workflows" / Segment / "query") { version => + (post | get) { + parameterSeq { parameters => + extractUri { uri => + metadataQueryRequest(parameters, uri) + } + } + } + } ~ + encodeResponseWith(Gzip, Deflate, NoCoding) { + path("workflows" / Segment / Segment / "metadata") { (version, possibleWorkflowId) => + parameters('includeKey.*, 'excludeKey.*, 'expandSubWorkflows.as[Boolean].?) { (includeKeys, excludeKeys, expandSubWorkflowsOption) => + val includeKeysOption = NonEmptyList.fromList(includeKeys.toList) + val excludeKeysOption = NonEmptyList.fromList(excludeKeys.toList) + val expandSubWorkflows = expandSubWorkflowsOption.getOrElse(false) + + (includeKeysOption, excludeKeysOption) match { + case (Some(_), Some(_)) => + val e = new IllegalArgumentException("includeKey and excludeKey may not be specified together") + e.failRequest(StatusCodes.BadRequest) + case (_, _) => metadataBuilderRequest(possibleWorkflowId, (w: WorkflowId) => GetSingleWorkflowMetadataAction(w, includeKeysOption, excludeKeysOption, expandSubWorkflows)) + } + } + } + } ~ + path("workflows" / Segment / "callcaching" / "diff") { version => parameterSeq { parameters => get { - handleQueryMetadataRequest(parameters) + CallCacheDiffQueryParameter.fromParameters(parameters) match { + case Valid(queryParameter) => + val diffActor = actorRefFactory.actorOf(CallCacheDiffActor.props(serviceRegistryActor), "CallCacheDiffActor-" + UUID.randomUUID()) + onComplete(diffActor.ask(queryParameter).mapTo[CallCacheDiffActorResponse]) { + case Success(r: BuiltCallCacheDiffResponse) => complete(r.response) + case Success(r: FailedCallCacheDiffResponse) => r.reason.errorRequest(StatusCodes.InternalServerError) + case Failure(e: CachedCallNotFoundException) => e.errorRequest(StatusCodes.NotFound) + case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) + } + case Invalid(errors) => + val e = AggregatedMessageException("Wrong parameters for call cache diff query", errors.toList) + e.errorRequest(StatusCodes.BadRequest) + } } } - } - - def queryPostRoute = - path("workflows" / Segment / "query") { version => - entity(as[Seq[Map[String, String]]]) { parameterMap => - post { - handleQueryMetadataRequest(parameterMap.flatMap(_.toSeq)) - } + } ~ + path("workflows" / Segment / Segment / "timing") { (version, possibleWorkflowId) => + onComplete(validateWorkflowId(possibleWorkflowId)) { + case Success(_) => getFromResource("workflowTimings/workflowTimings.html") + case Failure(e) => e.failRequest(StatusCodes.InternalServerError) } - } - - def abortRoute = + } ~ path("workflows" / Segment / Segment / "abort") { (version, possibleWorkflowId) => post { - withRecognizedWorkflowId(possibleWorkflowId) { id => - requestContext => perRequest(requestContext, CromwellApiHandler.props(workflowStoreActor), CromwellApiHandler.ApiHandlerWorkflowAbort(id, workflowManagerActor)) + val response = validateWorkflowId(possibleWorkflowId) flatMap { w => + workflowStoreActor.ask(WorkflowStoreActor.AbortWorkflow(w, workflowManagerActor)).mapTo[WorkflowStoreEngineAbortResponse] } - } - } - def callCachingDiffRoute = - path("workflows" / Segment / "callcaching" / "diff") { version => - parameterSeq { parameters => - get { - handleCallCachingDiffRequest(parameters) + onComplete(response) { + case Success(WorkflowStoreEngineActor.WorkflowAborted(id)) => complete(WorkflowAbortResponse(id.toString, WorkflowAborted.toString)) + case Success(WorkflowStoreEngineActor.WorkflowAbortFailed(_, e: IllegalStateException)) => e.errorRequest(StatusCodes.Forbidden) + case Success(WorkflowStoreEngineActor.WorkflowAbortFailed(_, e: WorkflowNotFoundException)) => e.errorRequest(StatusCodes.NotFound) + case Success(WorkflowStoreEngineActor.WorkflowAbortFailed(_, e)) => e.errorRequest(StatusCodes.InternalServerError) + case Failure(e: UnrecognizedWorkflowException) => e.failRequest(StatusCodes.NotFound) + case Failure(e: InvalidWorkflowException) => e.failRequest(StatusCodes.BadRequest) + case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) } } - } - - - def patchLabelsRoute = + } ~ path("workflows" / Segment / Segment / "labels") { (version, possibleWorkflowId) => entity(as[Map[String, String]]) { parameterMap => patch { - withRecognizedWorkflowId(possibleWorkflowId) { id => - requestContext => - Labels.validateMapOfLabels(parameterMap) match { - case Valid(labels) => - perRequest(requestContext, labelsManagerActorProps, LabelsAddition(LabelsData(id, labels))) - case Invalid(err) => failBadRequest(new IllegalArgumentException(err.toList.mkString(",")))(requestContext) + Labels.validateMapOfLabels(parameterMap) match { + case Valid(labels) => + val response = validateWorkflowId(possibleWorkflowId) flatMap { id => + val lma = actorRefFactory.actorOf(LabelsManagerActor.props(serviceRegistryActor).withDispatcher(ApiDispatcher)) + lma.ask(LabelsAddition(LabelsData(id, labels))).mapTo[LabelsManagerActorResponse] } - } - } - } - } - - case class PartialWorkflowSources - ( - workflowSource: Option[WdlSource], - workflowType: Option[WorkflowType], - workflowTypeVersion: Option[WorkflowTypeVersion], - workflowInputs: Vector[WdlJson], - workflowInputsAux: Map[Int, WdlJson], - workflowOptions: Option[WorkflowOptionsJson], - customLabels: Option[WdlJson], - zippedImports: Option[Array[Byte]]) - - object PartialWorkflowSources { - - val log = LoggerFactory.getLogger(classOf[PartialWorkflowSources]) - - def empty = PartialWorkflowSources( - workflowSource = None, - // TODO do not hardcode, especially not out here at the boundary layer good gravy - workflowType = Option("WDL"), - workflowTypeVersion = None, - workflowInputs = Vector.empty, - workflowInputsAux = Map.empty, - workflowOptions = None, - customLabels = None, - zippedImports = None - ) - - private def workflowInputs(bodyPart: BodyPart): Vector[WdlJson] = { - import spray.json._ - bodyPart.entity.data.asString.parseJson match { - case JsArray(Seq(x, xs@_*)) => (Vector(x) ++ xs).map(_.compactPrint) - case JsArray(_) => Vector.empty - case v: JsValue => Vector(v.compactPrint) - } - } - - def partialSourcesToSourceCollections(partialSources: ErrorOr[PartialWorkflowSources], allowNoInputs: Boolean): ErrorOr[Seq[WorkflowSourceFilesCollection]] = { - - def validateInputs(pws: PartialWorkflowSources): ErrorOr[Seq[WdlJson]] = - (pws.workflowInputs.isEmpty, allowNoInputs) match { - case (true, true) => Vector("{}").validNel - case (true, false) => "No inputs were provided".invalidNel - case _ => - val sortedInputAuxes = pws.workflowInputsAux.toSeq.sortBy { case (index, _) => index } map { case(_, inputJson) => Option(inputJson) } - (pws.workflowInputs map { workflowInputSet: WdlJson => mergeMaps(Seq(Option(workflowInputSet)) ++ sortedInputAuxes).toString }).validNel - } + onComplete(response) { + case Success(r: BuiltLabelsManagerResponse) => complete(r.response) + case Success(e: FailedLabelsManagerResponse) => e.reason.failRequest(StatusCodes.InternalServerError) + case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) - def validateOptions(options: Option[WorkflowOptionsJson]): ErrorOr[WorkflowOptions] = - WorkflowOptions.fromJsonString(options.getOrElse("{}")).tryToErrorOr leftMap { _ map { i => s"Invalid workflow options provided: $i" } } - - def validateWorkflowSources(partialSource: PartialWorkflowSources): ErrorOr[WdlJson] = partialSource.workflowSource match { - case Some(src) => src.validNel - case _ => s"Incomplete workflow submission: $partialSource".invalidNel - } - - partialSources match { - case Valid(partialSource) => - (validateWorkflowSources(partialSource) |@| validateInputs(partialSource) |@| validateOptions(partialSource.workflowOptions)) map { - case (wfSource, wfInputs, wfOptions) => - wfInputs.map(inputsJson => WorkflowSourceFilesCollection( - workflowSource = wfSource, - workflowType = partialSource.workflowType, - workflowTypeVersion = partialSource.workflowTypeVersion, - inputsJson = inputsJson, - workflowOptionsJson = wfOptions.asPrettyJson, - labelsJson = partialSource.customLabels.getOrElse("{}"), - importsFile = partialSource.zippedImports)) - } - case Invalid(err) => err.invalid - } - } - - def deprecationWarning(out: String, in: String): Unit = { - val warning = - s""" - |The '$out' parameter name has been deprecated in favor of '$in'. - |Support for '$out' will be removed from future versions of Cromwell. - |Please switch to using '$in' in future submissions. - """.stripMargin - log.warn(warning) - } - - def fromSubmitRoute(formData: MultipartFormData, allowNoInputs: Boolean): Try[Seq[WorkflowSourceFilesCollection]] = { - val partialSources = Try(formData.fields.foldLeft(PartialWorkflowSources.empty) { (partialSources: PartialWorkflowSources, bodyPart: BodyPart) => - val name = bodyPart.name - lazy val data = bodyPart.entity.data - if (name.contains("wdlSource") || name.contains("workflowSource")) { - if (name.contains("wdlSource")) deprecationWarning(out = "wdlSource", in = "workflowSource") - partialSources.copy(workflowSource = Option(data.asString)) - } else if (name.contains("workflowType")) { - partialSources.copy(workflowType = Option(data.asString)) - } else if (name.contains("workflowTypeVersion")) { - partialSources.copy(workflowTypeVersion = Option(data.asString)) - } else if (name.contains("workflowInputs")) { - partialSources.copy(workflowInputs = workflowInputs(bodyPart)) - } else if (name.forall(_.startsWith("workflowInputs_"))) { - val index = name.get.stripPrefix("workflowInputs_").toInt - partialSources.copy(workflowInputsAux = partialSources.workflowInputsAux + (index -> data.asString)) - } else if (name.contains("workflowOptions")) { - partialSources.copy(workflowOptions = Option(data.asString)) - } else if (name.contains("wdlDependencies") || name.contains("workflowDependencies")) { - if (name.contains("wdlDependencies")) deprecationWarning(out = "wdlDependencies", in = "workflowDependencies") - partialSources.copy(zippedImports = Option(data.toByteArray)) - } else if (name.contains("customLabels")) { - partialSources.copy(customLabels = Option(data.asString)) - } else { - throw new IllegalArgumentException(s"Unexpected body part name: ${name.getOrElse("None")}") - } - }) - partialSourcesToSourceCollections(partialSources.tryToErrorOr, allowNoInputs).errorOrToTry - } - } - - def submitRoute = - path("workflows" / Segment) { version => - post { - entity(as[MultipartFormData]) { formData => - PartialWorkflowSources.fromSubmitRoute(formData, allowNoInputs = true) match { - case Success(workflowSourceFiles) if workflowSourceFiles.size == 1 => - requestContext => { - perRequest(requestContext, CromwellApiHandler.props(workflowStoreActor), CromwellApiHandler.ApiHandlerWorkflowSubmit(workflowSourceFiles.head)) } - case Success(workflowSourceFiles) => - failBadRequest(new IllegalArgumentException("To submit more than one workflow at a time, use the batch endpoint.")) - case Failure(t) => - failBadRequest(t) + case Invalid(e) => + val iae = new IllegalArgumentException(e.toList.mkString(",")) + iae.failRequest(StatusCodes.BadRequest) } } } - } - - def submitBatchRoute = - path("workflows" / Segment / "batch") { version => + } ~ + path("workflows" / Segment) { version => post { - entity(as[MultipartFormData]) { formData => - PartialWorkflowSources.fromSubmitRoute(formData, allowNoInputs = false) match { - case Success(workflowSourceFiles) => - requestContext => { - perRequest(requestContext, CromwellApiHandler.props(workflowStoreActor), CromwellApiHandler.ApiHandlerWorkflowSubmitBatch(NonEmptyList.fromListUnsafe(workflowSourceFiles.toList))) - } - case Failure(t) => - failBadRequest(t) - } + entity(as[Multipart.FormData]) { formData => + submitRequest(formData, true) } } - } - - def workflowOutputsRoute = - path("workflows" / Segment / Segment / "outputs") { (version, possibleWorkflowId) => - get { - withRecognizedWorkflowId(possibleWorkflowId) { id => - handleMetadataRequest(WorkflowOutputs(id)) - } + } ~ + path("workflows" / Segment / "batch") { version => + post { + entity(as[Multipart.FormData]) { formData => + submitRequest(formData, false) } } + } - def workflowLogsRoute = - path("workflows" / Segment / Segment / "logs") { (version, possibleWorkflowId) => - get { - withRecognizedWorkflowId(possibleWorkflowId) { id => - handleMetadataRequest(GetLogs(id)) + private def submitRequest(formData: Multipart.FormData, isSingleSubmission: Boolean): Route = { + val allParts: Future[Map[String, ByteString]] = formData.parts.mapAsync[(String, ByteString)](1) { + case b: BodyPart => b.toStrict(duration).map(strict => b.name -> strict.entity.data) + }.runFold(Map.empty[String, ByteString])((map, tuple) => map + tuple) + + onComplete(allParts) { + case Success(data) => + PartialWorkflowSources.fromSubmitRoute(data, allowNoInputs = isSingleSubmission) match { + case Success(workflowSourceFiles) if isSingleSubmission && workflowSourceFiles.size == 1 => + onComplete(workflowStoreActor.ask(WorkflowStoreActor.SubmitWorkflow(workflowSourceFiles.head)).mapTo[WorkflowStoreSubmitActor.WorkflowSubmittedToStore]) { + case Success(w) => complete((StatusCodes.Created, WorkflowSubmitResponse(w.workflowId.toString, WorkflowSubmitted.toString))) + case Failure(e) => e.failRequest(StatusCodes.InternalServerError) + } + // Catches the case where someone has gone through the single submission endpoint w/ more than one workflow + case Success(workflowSourceFiles) if isSingleSubmission => + val e = new IllegalArgumentException("To submit more than one workflow at a time, use the batch endpoint.") + e.failRequest(StatusCodes.BadRequest) + case Success(workflowSourceFiles) => + onComplete(workflowStoreActor.ask(WorkflowStoreActor.BatchSubmitWorkflows(NonEmptyList.fromListUnsafe(workflowSourceFiles.toList))).mapTo[WorkflowStoreSubmitActor.WorkflowsBatchSubmittedToStore]) { + case Success(w) => + val responses = w.workflowIds map { id => WorkflowSubmitResponse(id.toString, WorkflowSubmitted.toString) } + complete((StatusCodes.Created, responses.toList)) + case Failure(e) => e.failRequest(StatusCodes.InternalServerError) + } + case Failure(t) => t.failRequest(StatusCodes.BadRequest) } - } + case Failure(e) => e.failRequest(StatusCodes.InternalServerError) } + } - def metadataRoute = compressResponse() { - path("workflows" / Segment / Segment / "metadata") { (version, possibleWorkflowId) => - parameterMultiMap { parameters => - val includeKeysOption = NonEmptyList.fromList(parameters.getOrElse("includeKey", List.empty)) - val excludeKeysOption = NonEmptyList.fromList(parameters.getOrElse("excludeKey", List.empty)) - val expandSubWorkflowsOption = { - parameters.get("expandSubWorkflows") match { - case Some(v :: Nil) => Try(v.toBoolean) - case _ => Success(false) - } - } - - (includeKeysOption, excludeKeysOption, expandSubWorkflowsOption) match { - case (Some(_), Some(_), _) => - failBadRequest(new IllegalArgumentException("includeKey and excludeKey may not be specified together")) - case (_, _, Success(expandSubWorkflows)) => - withRecognizedWorkflowId(possibleWorkflowId) { id => - handleMetadataRequest(GetSingleWorkflowMetadataAction(id, includeKeysOption, excludeKeysOption, expandSubWorkflows)) - } - case (_, _, Failure(ex)) => failBadRequest(new IllegalArgumentException(ex)) + private def validateWorkflowId(possibleWorkflowId: String): Future[WorkflowId] = { + Try(WorkflowId.fromString(possibleWorkflowId)) match { + case Success(w) => + serviceRegistryActor.ask(ValidateWorkflowId(w)).mapTo[WorkflowValidationResponse] map { + case RecognizedWorkflowId => w + case UnrecognizedWorkflowId => throw UnrecognizedWorkflowException(s"Unrecognized workflow ID: $w") + case FailedToCheckWorkflowId(t) => throw t } - } + case Failure(t) => Future.failed(InvalidWorkflowException(s"Invalid workflow ID: '$possibleWorkflowId'.")) } } - def timingRoute = - path("workflows" / Segment / Segment / "timing") { (version, possibleWorkflowId) => - withRecognizedWorkflowId(possibleWorkflowId) { id => - getFromResource("workflowTimings/workflowTimings.html") - } - } + private def metadataBuilderRequest(possibleWorkflowId: String, request: WorkflowId => ReadAction): Route = { + val metadataBuilderActor = actorRefFactory.actorOf(MetadataBuilderActor.props(serviceRegistryActor).withDispatcher(ApiDispatcher), MetadataBuilderActor.uniqueActorName) + val response = validateWorkflowId(possibleWorkflowId) flatMap { w => metadataBuilderActor.ask(request(w)).mapTo[MetadataBuilderActorResponse] } - def statsRoute = - path("engine" / Segment / "stats") { version => - get { - requestContext => - perRequest(requestContext, CromwellApiHandler.props(workflowManagerActor), CromwellApiHandler.ApiHandlerEngineStats) - } + onComplete(response) { + case Success(r: BuiltMetadataResponse) => complete(r.response) + case Success(r: FailedMetadataResponse) => r.reason.errorRequest(StatusCodes.InternalServerError) + case Failure(e: UnrecognizedWorkflowException) => e.failRequest(StatusCodes.NotFound) + case Failure(e: InvalidWorkflowException) => e.failRequest(StatusCodes.BadRequest) + case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) } + } - def versionRoute = - path("engine" / Segment / "version") { version => - get { - complete { - lazy val versionConf = ConfigFactory.load("cromwell-version.conf").getConfig("version") - versionResponse(versionConf) + protected[this] def metadataQueryRequest(parameters: Seq[(String, String)], uri: Uri): Route = { + val response = serviceRegistryActor.ask(WorkflowQuery(parameters)).mapTo[MetadataQueryResponse] + + onComplete(response) { + case Success(w: WorkflowQuerySuccess) => + val headers = WorkflowQueryPagination.generateLinkHeaders(uri, w.meta) + respondWithHeaders(headers) { + complete(w.response) } - } + case Success(w: WorkflowQueryFailure) => w.reason.failRequest(StatusCodes.BadRequest) + case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) } + } +} - def versionResponse(versionConf: Config) = JsObject(Map( - "cromwell" -> versionConf.getString("cromwell").toJson - )) +object CromwellApiService { + import spray.json._ - def backendRoute = - path("workflows" / Segment / "backends") { version => - get { - complete { - // Note that this is not using our standard per-request scheme, since the result is pre-calculated already - backendResponse - } - } - } + implicit class EnhancedThrowable(val e: Throwable) extends AnyVal { + def failRequest(statusCode: StatusCode): Route = complete((statusCode, APIResponse.fail(e).toJson.prettyPrint)) + def errorRequest(statusCode: StatusCode): Route = complete((statusCode, APIResponse.error(e).toJson.prettyPrint)) + } - val backendResponse = JsObject(Map( - "supportedBackends" -> BackendConfiguration.AllBackendEntries.map(_.name).sorted.toJson, - "defaultBackend" -> BackendConfiguration.DefaultBackendEntry.name.toJson - )) + final case class BackendResponse(supportedBackends: List[String], defaultBackend: String) -} + final case class UnrecognizedWorkflowException(message: String) extends Exception(message) + final case class InvalidWorkflowException(message: String) extends Exception(message) + val backendResponse = BackendResponse(BackendConfiguration.AllBackendEntries.map(_.name).sorted, BackendConfiguration.DefaultBackendEntry.name) + val versionResponse = JsObject(Map("cromwell" -> ConfigFactory.load("cromwell-version.conf").getConfig("version").getString("cromwell").toJson)) +} diff --git a/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala b/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala index be70dad26..7a27e9ce1 100644 --- a/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala +++ b/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala @@ -6,9 +6,6 @@ import cromwell.core.{Dispatcher, WorkflowId, WorkflowMetadataKeys} import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue} import cromwell.services.metadata.MetadataService._ import cromwell.webservice.LabelsManagerActor._ -import cromwell.webservice.PerRequest.RequestComplete -import spray.http.StatusCodes -import spray.httpx.SprayJsonSupport._ import spray.json.{DefaultJsonProtocol, JsObject, JsString} import scala.language.postfixOps @@ -43,6 +40,10 @@ object LabelsManagerActor { def labelsToMetadataEvents(labels: Labels, workflowId: WorkflowId): Iterable[MetadataEvent] = { labels.value map { l => MetadataEvent(MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Labels}:${l.key}"), MetadataValue(l.value)) } } + + sealed abstract class LabelsManagerActorResponse + final case class BuiltLabelsManagerResponse(response: JsObject) extends LabelsManagerActorResponse + final case class FailedLabelsManagerResponse(reason: Throwable) extends LabelsManagerActorResponse } class LabelsManagerActor(serviceRegistryActor: ActorRef) extends Actor with ActorLogging with DefaultJsonProtocol { @@ -50,18 +51,18 @@ class LabelsManagerActor(serviceRegistryActor: ActorRef) extends Actor with Acto implicit val ec = context.dispatcher private var wfId: Option[WorkflowId] = None - - import WorkflowJsonSupport._ + private var target: ActorRef = ActorRef.noSender def receive = { case LabelsAddition(data) => wfId = Option(data.workflowId) + target = sender() serviceRegistryActor ! PutMetadataActionAndRespond(labelsToMetadataEvents(data.labels, data.workflowId), self) case MetadataWriteSuccess(events) => - val response = processLabelsResponse(wfId.get, metadataEventsToLabels(events)) - context.parent ! RequestComplete((StatusCodes.OK, response)) + target ! BuiltLabelsManagerResponse(processLabelsResponse(wfId.get, metadataEventsToLabels(events))) + context stop self case MetadataWriteFailure(failure, events) => - val response = APIResponse.fail(new RuntimeException(s"Unable to update labels for ${wfId.get} due to ${failure.getMessage}")) - context.parent ! RequestComplete((StatusCodes.InternalServerError, response)) + target ! FailedLabelsManagerResponse(new RuntimeException(s"Unable to update labels for ${wfId.get} due to ${failure.getMessage}")) + context stop self } } diff --git a/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala b/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala new file mode 100644 index 000000000..f4610fc5c --- /dev/null +++ b/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala @@ -0,0 +1,142 @@ +package cromwell.webservice + +import akka.util.ByteString +import cromwell.core.{WorkflowOptions, WorkflowOptionsJson, WorkflowSourceFilesCollection} +import wdl4s.{WdlJson, WdlSource} +import cats.data.Validated.{Invalid, Valid} +import cats.syntax.validated._ +import cats.syntax.cartesian._ +import lenthall.validation.ErrorOr.ErrorOr +import cromwell.core._ +import org.slf4j.LoggerFactory +import spray.json.{JsObject, JsValue} + +import scala.util.Try + +final case class PartialWorkflowSources(workflowSource: Option[WdlSource], + workflowType: Option[WorkflowType], + workflowTypeVersion: Option[WorkflowTypeVersion], + workflowInputs: Vector[WdlJson], + workflowInputsAux: Map[Int, WdlJson], + workflowOptions: Option[WorkflowOptionsJson], + customLabels: Option[WdlJson], + zippedImports: Option[Array[Byte]]) + +object PartialWorkflowSources { + val log = LoggerFactory.getLogger(classOf[PartialWorkflowSources]) + + def empty = PartialWorkflowSources( + workflowSource = None, + // TODO do not hardcode, especially not out here at the boundary layer good gravy + workflowType = Option("WDL"), + workflowTypeVersion = None, + workflowInputs = Vector.empty, + workflowInputsAux = Map.empty, + workflowOptions = None, + customLabels = None, + zippedImports = None + ) + + def fromSubmitRoute(formData: Map[String, ByteString], allowNoInputs: Boolean): Try[Seq[WorkflowSourceFilesCollection]] = { + val partialSources = Try(formData.foldLeft(PartialWorkflowSources.empty) { (partialSources: PartialWorkflowSources, kv: (String, ByteString)) => + val name = kv._1 + val data = kv._2 + + if (name == "wdlSource" || name == "workflowSource") { + if (name == "wdlSource") deprecationWarning(out = "wdlSource", in = "workflowSource") + partialSources.copy(workflowSource = Option(data.utf8String)) + } else if (name == "workflowType") { + partialSources.copy(workflowType = Option(data.utf8String)) + } else if (name == "workflowTypeVersion") { + partialSources.copy(workflowTypeVersion = Option(data.utf8String)) + } else if (name == "workflowInputs") { + partialSources.copy(workflowInputs = workflowInputs(data.utf8String)) + } else if (name.startsWith("workflowInputs_")) { + val index = name.stripPrefix("workflowInputs_").toInt + partialSources.copy(workflowInputsAux = partialSources.workflowInputsAux + (index -> data.utf8String)) + } else if (name == "workflowOptions") { + partialSources.copy(workflowOptions = Option(data.utf8String)) + } else if (name == "wdlDependencies" || name == "workflowDependencies") { + if (name == "wdlDependencies") deprecationWarning(out = "wdlDependencies", in = "workflowDependencies") + partialSources.copy(zippedImports = Option(data.toArray)) + } else if (name == "customLabels") { + partialSources.copy(customLabels = Option(data.utf8String)) + } else { + throw new IllegalArgumentException(s"Unexpected body part name: $name") + } + }) + + partialSourcesToSourceCollections(partialSources.tryToErrorOr, allowNoInputs).errorOrToTry + } + + private def workflowInputs(data: String): Vector[WdlJson] = { + import spray.json._ + data.parseJson match { + case JsArray(Seq(x, xs@_*)) => (Vector(x) ++ xs).map(_.compactPrint) + case JsArray(_) => Vector.empty + case v: JsValue => Vector(v.compactPrint) + } + } + + private def partialSourcesToSourceCollections(partialSources: ErrorOr[PartialWorkflowSources], allowNoInputs: Boolean): ErrorOr[Seq[WorkflowSourceFilesCollection]] = { + def validateInputs(pws: PartialWorkflowSources): ErrorOr[Seq[WdlJson]] = + (pws.workflowInputs.isEmpty, allowNoInputs) match { + case (true, true) => Vector("{}").validNel + case (true, false) => "No inputs were provided".invalidNel + case _ => + val sortedInputAuxes = pws.workflowInputsAux.toSeq.sortBy { case (index, _) => index } map { case(_, inputJson) => Option(inputJson) } + (pws.workflowInputs map { workflowInputSet: WdlJson => mergeMaps(Seq(Option(workflowInputSet)) ++ sortedInputAuxes).toString }).validNel + } + + def validateOptions(options: Option[WorkflowOptionsJson]): ErrorOr[WorkflowOptions] = + WorkflowOptions.fromJsonString(options.getOrElse("{}")).tryToErrorOr leftMap { _ map { i => s"Invalid workflow options provided: $i" } } + + def validateWorkflowSource(partialSource: PartialWorkflowSources): ErrorOr[WdlJson] = partialSource.workflowSource match { + case Some(src) => src.validNel + case _ => s"Incomplete workflow submission: $partialSource".invalidNel + } + + partialSources match { + case Valid(partialSource) => + (validateWorkflowSource(partialSource) |@| validateInputs(partialSource) |@| validateOptions(partialSource.workflowOptions)) map { + case (wfSource, wfInputs, wfOptions) => + wfInputs.map(inputsJson => WorkflowSourceFilesCollection( + workflowSource = wfSource, + workflowType = partialSource.workflowType, + workflowTypeVersion = partialSource.workflowTypeVersion, + inputsJson = inputsJson, + workflowOptionsJson = wfOptions.asPrettyJson, + labelsJson = partialSource.customLabels.getOrElse("{}"), + importsFile = partialSource.zippedImports)) } + case Invalid(err) => err.invalid + } + } + + private def deprecationWarning(out: String, in: String): Unit = { + val warning = + s""" + |The '$out' parameter name has been deprecated in favor of '$in'. + |Support for '$out' will be removed from future versions of Cromwell. + |Please switch to using '$in' in future submissions. + """.stripMargin + log.warn(warning) + } + + def mergeMaps(allInputs: Seq[Option[String]]): JsObject = { + val convertToMap = allInputs.map(x => toMap(x)) + JsObject(convertToMap reduce (_ ++ _)) + } + + private def toMap(someInput: Option[String]): Map[String, JsValue] = { + import spray.json._ + someInput match { + case Some(inputs: String) => inputs.parseJson match { + case JsObject(inputMap) => inputMap + case _ => + throw new RuntimeException(s"Submitted inputs couldn't be processed, please check for syntactical errors") + } + case None => Map.empty + } + } +} + diff --git a/engine/src/main/scala/cromwell/webservice/PerRequest.scala b/engine/src/main/scala/cromwell/webservice/PerRequest.scala deleted file mode 100644 index 415005785..000000000 --- a/engine/src/main/scala/cromwell/webservice/PerRequest.scala +++ /dev/null @@ -1,120 +0,0 @@ -package cromwell.webservice - -import java.util.UUID - -import akka.actor.SupervisorStrategy.Stop -import akka.actor.{OneForOneStrategy, _} -import cromwell.core.Dispatcher.ApiDispatcher -import cromwell.webservice.PerRequest._ -import spray.http.StatusCodes._ -import spray.http._ -import spray.httpx.marshalling.ToResponseMarshaller -import spray.routing.RequestContext - -import scala.concurrent.duration._ -import scala.language.postfixOps - -/** - * This actor controls the lifecycle of a request. It is responsible for forwarding the initial message - * to a target handling actor. This actor waits for the target actor to signal completion (via a message), - * timeout, or handle an exception. It is this actors responsibility to respond to the request and - * shutdown itself and child actors. - * - * Request completion can be signaled in 2 ways: - * 1) with just a response object - * 2) with a RequestComplete message which can specify http status code as well as the response - */ -trait PerRequest extends Actor { - import context._ - - def r: RequestContext - def target: ActorRef - def message: AnyRef - def timeout: Duration - - setReceiveTimeout(timeout) - target ! message - - def receive = { - // The [Any] type parameter appears to be required for version of Scala > 2.11.2, - // the @ unchecked is required to muzzle erasure warnings. - case message: RequestComplete[Any] @ unchecked => complete(message.response)(message.marshaller) - case message: RequestCompleteWithHeaders[Any] @ unchecked => complete(message.response, message.headers:_*)(message.marshaller) - case ReceiveTimeout => complete(GatewayTimeout) - case x => - system.log.error("Unsupported response message sent to PreRequest actor: " + Option(x).getOrElse("null").toString) - complete(InternalServerError) - } - - /** - * Complete the request sending the given response and status code - * @param response to send to the caller - * @param marshaller to use for marshalling the response - * @tparam T the type of the response - * @return - */ - private def complete[T](response: T, headers: HttpHeader*)(implicit marshaller: ToResponseMarshaller[T]) = { - val additionalHeaders = None - r.withHttpResponseHeadersMapped(h => h ++ headers ++ additionalHeaders).complete(response) - stop(self) - } - - override val supervisorStrategy = - OneForOneStrategy() { - case e => - system.log.error(e, "error processing request: " + r.request.uri) - r.complete((InternalServerError, e.getMessage)) - Stop - } -} - -object PerRequest { - sealed trait PerRequestMessage - /** - * Report complete, follows same pattern as spray.routing.RequestContext.complete; examples of how to call - * that method should apply here too. E.g. even though this method has only one parameter, it can be called - * with 2 where the first is a StatusCode: RequestComplete(StatusCode.Created, response) - */ - case class RequestComplete[T](response: T)(implicit val marshaller: ToResponseMarshaller[T]) extends PerRequestMessage - - /** - * Report complete with response headers. To response with a special status code the first parameter can be a - * tuple where the first element is StatusCode: RequestCompleteWithHeaders((StatusCode.Created, results), header). - * Note that this is here so that RequestComplete above can behave like spray.routing.RequestContext.complete. - */ - case class RequestCompleteWithHeaders[T](response: T, headers: HttpHeader*)(implicit val marshaller: ToResponseMarshaller[T]) extends PerRequestMessage - - /** allows for pattern matching with extraction of marshaller */ - object RequestComplete_ { - def unapply[T](requestComplete: RequestComplete[T]) = Option((requestComplete.response, requestComplete.marshaller)) - } - - /** allows for pattern matching with extraction of marshaller */ - object RequestCompleteWithHeaders_ { - def unapply[T](requestComplete: RequestCompleteWithHeaders[T]) = Option((requestComplete.response, requestComplete.headers, requestComplete.marshaller)) - } - - case class WithProps(r: RequestContext, props: Props, message: AnyRef, timeout: Duration, name: String) extends PerRequest { - lazy val target = context.actorOf(props.withDispatcher(ApiDispatcher), name) - } -} - -/** - * Provides factory methods for creating per request actors - */ -trait PerRequestCreator { - implicit def actorRefFactory: ActorRefFactory - - def perRequest(r: RequestContext, - props: Props, message: AnyRef, - timeout: Duration = 2 minutes, - name: String = PerRequestCreator.endpointActorName): Unit = { - actorRefFactory.actorOf(Props(WithProps(r, props, message, timeout, name)).withDispatcher(ApiDispatcher), name) - () - } -} - -object PerRequestCreator { - // This scheme was changed away from the Agora System.nanoTime approach due to actor naming collisions (!) - def endpointActorName = List("Endpoint", java.lang.Thread.currentThread.getStackTrace()(1).getMethodName, UUID.randomUUID()).mkString("-") -} diff --git a/engine/src/main/scala/cromwell/webservice/SprayCanHttpService.scala b/engine/src/main/scala/cromwell/webservice/SprayCanHttpService.scala deleted file mode 100644 index a42bf99a6..000000000 --- a/engine/src/main/scala/cromwell/webservice/SprayCanHttpService.scala +++ /dev/null @@ -1,177 +0,0 @@ -package cromwell.webservice - -import java.net.{InetAddress, InetSocketAddress} - -import akka.actor._ -import akka.io.Tcp.Unbound -import akka.io.{IO, Inet} -import akka.util.Timeout -import spray.can.Http -import spray.can.Http.Bound -import spray.can.server.ServerSettings -import spray.io.ServerSSLEngineProvider - -import scala.collection.immutable -import scala.concurrent._ -import scala.concurrent.duration.Duration - -/** - * Adds bind and unbind to http services running on spray-can. - * - * {{{ - * val httpServiceActorRef: ActorRef = ... - * httpServiceActorRef.bind(LoopbackAddress, 8080) onSuccess { - * case boundListener => - * boundListener.unbind() // Unbind the listener - * } - * }}} - */ -object SprayCanHttpService { - - val LoopbackAddress = InetAddress.getLoopbackAddress.getHostAddress - val AnyAddress = new InetSocketAddress(0).getAddress.getHostAddress - - implicit class EnhancedSprayCanActor(val service: ActorRef) extends AnyVal { - - def bind(interface: String = LoopbackAddress, port: Int = 80, backlog: Int = 100, - options: immutable.Traversable[Inet.SocketOption] = Nil, settings: Option[ServerSettings] = None) - (implicit ec: ExecutionContext, timeout: Timeout, actorSystem: ActorSystem, - sslEngineProvider: ServerSSLEngineProvider): Future[BoundListener] = { - Future(Http.Bind(service, interface, port, backlog, options, settings)) flatMap { httpBind => - val promise = Promise[BoundListener]() - actorSystem.actorOf(Props(classOf[SprayCanBindActor], httpBind, promise, timeout)) - promise.future - } - } - - def bindOrShutdown(interface: String = LoopbackAddress, port: Int = 80, backlog: Int = 100, - options: immutable.Traversable[Inet.SocketOption] = Nil, settings: Option[ServerSettings] = None) - (implicit ec: ExecutionContext, actorSystem: ActorSystem, timeout: Timeout, - sslEngineProvider: ServerSSLEngineProvider): Future[BoundListener] = { - bind(interface, port, backlog, options, settings) recover { - case throwable => - actorSystem.log.error(throwable, s"Binding failed interface $interface port $port") - actorSystem.terminate() - /* - Using scala's Await.ready due to akka recommendations: - - http://doc.akka.io/docs/akka/2.4/project/migration-guide-2.3.x-2.4.x.html#Actor_system_shutdown - - https://github.com/akka/akka/blob/v2.4.10/akka-actor/src/main/scala/akka/actor/ActorSystem.scala#L664-L665 - */ - Await.ready(actorSystem.whenTerminated, Duration.Inf) - throw throwable - } - } - - } - - case class BoundListener(bound: Bound, listener: ActorRef) { - - def unbind(duration: Duration = Duration.Zero) - (implicit ec: ExecutionContext, actorSystem: ActorSystem, timeout: Timeout): Future[Unbound] = { - Future(Http.Unbind(duration)) flatMap { httpUnbind => - val promise = Promise[Unbound]() - actorSystem.actorOf(Props(classOf[SprayCanUnbindActor], listener, httpUnbind, promise, timeout)) - promise.future - } - } - - def unbindAndShutdown(duration: Duration = Duration.Zero) - (implicit ec: ExecutionContext, actorSystem: ActorSystem, timeout: Timeout) - : Future[Unbound] = { - unbind(duration) andThen { - case _ => - actorSystem.terminate() - /* - Using scala's Await.ready due to akka recommendations: - - http://doc.akka.io/docs/akka/2.4/project/migration-guide-2.3.x-2.4.x.html#Actor_system_shutdown - - https://github.com/akka/akka/blob/v2.4.10/akka-actor/src/main/scala/akka/actor/ActorSystem.scala#L664-L665 - */ - Await.ready(actorSystem.whenTerminated, Duration.Inf) - } - } - - // Alias for testing - private[webservice] def port = bound.localAddress.getPort - - } - - trait BindException { - this: Exception => - val httpBind: Http.Bind - } - - class BindFailedException(message: String, val httpBind: Http.Bind) - extends RuntimeException(message) with BindException - - class BindTimeoutException(message: String, val httpBind: Http.Bind, val timeout: Timeout) - extends TimeoutException(message) with BindException - - class UnexpectedBindMessageException(message: String, val httpBind: Http.Bind, val unexpected: Any) - extends RuntimeException(message) with BindException - - class SprayCanBindActor(httpBind: Http.Bind, promise: Promise[BoundListener], timeout: Timeout) extends Actor { - - override def preStart(): Unit = { - IO(Http)(context.system) ! httpBind - context setReceiveTimeout timeout.duration - } - - override def receive = { - case bound: Bound => - val sentBy = sender() - promise trySuccess BoundListener(bound, sentBy) - context stop self - case failed: Http.CommandFailed => - promise tryFailure new BindFailedException(s"Failed to bind to ${httpBind.endpoint}: $failed", httpBind) - context stop self - case ReceiveTimeout => - promise tryFailure new BindTimeoutException( - s"Timeout ${timeout.duration} during bind to ${httpBind.endpoint}", httpBind, timeout) - context stop self - case unexpected => - promise tryFailure new UnexpectedBindMessageException( - s"Unexpected message during bind to ${httpBind.endpoint}: $unexpected", httpBind, unexpected) - context stop self - } - } - - trait UnbindException { - this: Exception => - val httpUnbind: Http.Unbind - } - - class UnbindFailedException(message: String, val httpUnbind: Http.Unbind) - extends RuntimeException(message) with UnbindException - - class UnbindTimeoutException(message: String, val httpUnbind: Http.Unbind, val timeout: Timeout) - extends TimeoutException(message) with UnbindException - - class UnexpectedUnbindMessageException(message: String, val httpUnbind: Http.Unbind, val unexpected: Any) - extends RuntimeException(message) with UnbindException - - class SprayCanUnbindActor(listener: ActorRef, httpUnbind: Http.Unbind, promise: Promise[Unbound], timeout: Timeout) - extends Actor { - - override def preStart(): Unit = { - listener ! httpUnbind - context setReceiveTimeout timeout.duration - } - - override def receive = { - case unbound: Unbound => - promise trySuccess unbound - context stop self - case failed: Http.CommandFailed => - promise tryFailure new UnbindFailedException(s"Failed to unbind: $failed", httpUnbind) - context stop self - case ReceiveTimeout => - promise tryFailure new UnbindTimeoutException(s"Timeout ${timeout.duration} during unbind", httpUnbind, timeout) - context stop self - case unexpected => - promise tryFailure new UnexpectedUnbindMessageException( - s"Unexpected message during unbind: $unexpected", httpUnbind, unexpected) - context stop self - } - } - -} diff --git a/engine/src/main/scala/cromwell/webservice/SwaggerService.scala b/engine/src/main/scala/cromwell/webservice/SwaggerService.scala new file mode 100644 index 000000000..0af4edd9b --- /dev/null +++ b/engine/src/main/scala/cromwell/webservice/SwaggerService.scala @@ -0,0 +1,7 @@ +package cromwell.webservice + +trait SwaggerService extends SwaggerUiResourceHttpService { + override def swaggerServiceName = "cromwell" + + override def swaggerUiVersion = "2.1.1" +} diff --git a/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala b/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala index f187bbfde..ea0eb98ff 100644 --- a/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala +++ b/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala @@ -1,14 +1,15 @@ package cromwell.webservice +import akka.http.scaladsl.model.StatusCodes +import akka.http.scaladsl.server.Route import com.typesafe.config.Config import net.ceedubs.ficus.Ficus._ -import spray.http.StatusCodes -import spray.routing.HttpService +import akka.http.scaladsl.server.Directives._ /** * Serves up the swagger UI from org.webjars/swagger-ui. */ -trait SwaggerUiHttpService extends HttpService { +trait SwaggerUiHttpService { /** * @return The version of the org.webjars/swagger-ui artifact. For example "2.1.1". */ @@ -21,12 +22,12 @@ trait SwaggerUiHttpService extends HttpService { * * @return The base URL used by the application, or the empty string if there is no base URL. For example "/myapp". */ - def swaggerUiBaseUrl = "" + def swaggerUiBaseUrl: String = "" /** * @return The path to the swagger UI html documents. For example "swagger" */ - def swaggerUiPath = "swagger" + def swaggerUiPath: String = "swagger" /** * The path to the actual swagger documentation in either yaml or json, to be rendered by the swagger UI html. @@ -34,14 +35,14 @@ trait SwaggerUiHttpService extends HttpService { * @return The path to the api documentation to render in the swagger UI. * For example "api-docs" or "swagger/lenthall.yaml". */ - def swaggerUiDocsPath = "api-docs" + def swaggerUiDocsPath: String = "api-docs" /** * @return When true, if someone requests / (or /baseUrl if setup), redirect to the swagger UI. */ - def swaggerUiFromRoot = true + def swaggerUiFromRoot: Boolean = true - private def routeFromRoot = get { + private def routeFromRoot: Route = get { pathEndOrSingleSlash { // Redirect / to the swagger UI redirect(s"$swaggerUiBaseUrl/$swaggerUiPath", StatusCodes.TemporaryRedirect) @@ -53,7 +54,7 @@ trait SwaggerUiHttpService extends HttpService { * * @return Route serving the swagger UI. */ - final def swaggerUiRoute = { + final def swaggerUiRoute: Route = { val route = get { pathPrefix(separateOnSlashes(swaggerUiPath)) { // when the user hits the doc url, redirect to the index.html with api docs specified on the url @@ -92,11 +93,11 @@ trait SwaggerUiConfigHttpService extends SwaggerUiHttpService { * directory and path on the classpath must match the path for route. The resource can be any file type supported by the * swagger UI, but defaults to "yaml". This is an alternative to spray-swagger's SwaggerHttpService. */ -trait SwaggerResourceHttpService extends HttpService { +trait SwaggerResourceHttpService { /** * @return The directory for the resource under the classpath, and in the url */ - def swaggerDirectory = "swagger" + def swaggerDirectory: String = "swagger" /** * @return Name of the service, used to map the documentation resource at "/uiPath/serviceName.resourceType". @@ -106,7 +107,7 @@ trait SwaggerResourceHttpService extends HttpService { /** * @return The type of the resource, usually "yaml" or "json". */ - def swaggerResourceType = "yaml" + def swaggerResourceType: String = "yaml" /** * Swagger UI sends HTTP OPTIONS before ALL requests, and expects a status 200 / OK. When true (the default) the @@ -119,7 +120,7 @@ trait SwaggerResourceHttpService extends HttpService { * * @return True if status code 200 should be returned for HTTP OPTIONS requests for the swagger resource. */ - def swaggerAllOptionsOk = true + def swaggerAllOptionsOk: Boolean = true /** * @return The path to the swagger docs. @@ -129,7 +130,7 @@ trait SwaggerResourceHttpService extends HttpService { /** * @return A route that returns the swagger resource. */ - final def swaggerResourceRoute = { + final def swaggerResourceRoute: Route = { val swaggerDocsDirective = path(separateOnSlashes(swaggerDocsPath)) val route = get { swaggerDocsDirective { @@ -156,5 +157,5 @@ trait SwaggerUiResourceHttpService extends SwaggerUiHttpService with SwaggerReso /** * @return A route that redirects to the swagger UI and returns the swagger resource. */ - final def swaggerUiResourceRoute = swaggerUiRoute ~ swaggerResourceRoute + final def swaggerUiResourceRoute: Route = swaggerUiRoute ~ swaggerResourceRoute } diff --git a/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala b/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala index a71395efd..a813278dc 100644 --- a/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala +++ b/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala @@ -10,7 +10,9 @@ import MetadataService._ import cromwell.util.JsonFormatting.WdlValueJsonFormatter import WdlValueJsonFormatter._ import better.files.File -import spray.json._ +import cromwell.webservice.CromwellApiService.BackendResponse +import cromwell.webservice.metadata.MetadataBuilderActor.BuiltMetadataResponse +import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat} object WorkflowJsonSupport extends DefaultJsonProtocol { implicit val workflowStatusResponseProtocol = jsonFormat2(WorkflowStatusResponse) @@ -19,6 +21,8 @@ object WorkflowJsonSupport extends DefaultJsonProtocol { implicit val workflowOutputResponseProtocol = jsonFormat2(WorkflowOutputResponse) implicit val callOutputResponseProtocol = jsonFormat3(CallOutputResponse) implicit val engineStatsProtocol = jsonFormat2(EngineStatsActor.EngineStats) + implicit val BackendResponseFormat = jsonFormat2(BackendResponse) + implicit val BuiltStatusResponseFormat = jsonFormat1(BuiltMetadataResponse) implicit val callAttempt = jsonFormat2(CallAttempt) implicit val workflowSourceData = jsonFormat6(WorkflowSourceFilesWithoutImports) diff --git a/engine/src/main/scala/cromwell/webservice/WrappedRoute.scala b/engine/src/main/scala/cromwell/webservice/WrappedRoute.scala index e5f221429..1b47e9e69 100644 --- a/engine/src/main/scala/cromwell/webservice/WrappedRoute.scala +++ b/engine/src/main/scala/cromwell/webservice/WrappedRoute.scala @@ -1,7 +1,7 @@ package cromwell.webservice -import spray.routing._ -import spray.routing.directives.PathDirectives +import akka.http.scaladsl.server.{PathMatcher0, Route} + object WrappedRoute { @@ -19,8 +19,8 @@ object WrappedRoute { * @return The wrappedRoute, followed optionally by the unwrappedRoute if routeUnwrapped is true. */ def wrapped(wrappedPathPrefix: PathMatcher0, routeUnwrapped: Boolean = false): Route = { - import PathDirectives._ - import RouteConcatenation._ + import akka.http.scaladsl.server.directives.PathDirectives._ + import akka.http.scaladsl.server.RouteConcatenation._ val route = pathPrefix(wrappedPathPrefix) { unwrappedRoute } diff --git a/engine/src/main/scala/cromwell/webservice/metadata/MetadataBuilderActor.scala b/engine/src/main/scala/cromwell/webservice/metadata/MetadataBuilderActor.scala index b0bc17e01..8da17f474 100644 --- a/engine/src/main/scala/cromwell/webservice/metadata/MetadataBuilderActor.scala +++ b/engine/src/main/scala/cromwell/webservice/metadata/MetadataBuilderActor.scala @@ -1,5 +1,7 @@ package cromwell.webservice.metadata +import java.util.UUID + import akka.actor.{ActorRef, LoggingFSM, Props} import cromwell.webservice.metadata.MetadataComponent._ import cromwell.core.Dispatcher.ApiDispatcher @@ -8,15 +10,16 @@ import cromwell.core.{WorkflowId, WorkflowMetadataKeys, WorkflowState} import cromwell.services.ServiceRegistryActor.ServiceRegistryFailure import cromwell.services.metadata.MetadataService._ import cromwell.services.metadata._ -import cromwell.webservice.PerRequest.{RequestComplete, RequestCompleteWithHeaders} -import cromwell.webservice.metadata.MetadataBuilderActor.{Idle, MetadataBuilderActorData, MetadataBuilderActorState, WaitingForMetadataService, WaitingForSubWorkflows} -import cromwell.webservice.{APIResponse, PerRequestCreator, WorkflowJsonSupport} +import cromwell.webservice.metadata.MetadataBuilderActor._ import org.slf4j.LoggerFactory -import spray.http.{StatusCodes, Uri} -import spray.httpx.SprayJsonSupport._ import spray.json._ + object MetadataBuilderActor { + sealed abstract class MetadataBuilderActorResponse + case class BuiltMetadataResponse(response: JsObject) extends MetadataBuilderActorResponse + case class FailedMetadataResponse(reason: Throwable) extends MetadataBuilderActorResponse + sealed trait MetadataBuilderActorState case object Idle extends MetadataBuilderActorState case object WaitingForMetadataService extends MetadataBuilderActorState @@ -128,18 +131,22 @@ object MetadataBuilderActor { private def parse(events: Seq[MetadataEvent], expandedValues: Map[String, JsValue]): JsObject = { JsObject(events.groupBy(_.key.workflowId.toString) mapValues parseWorkflowEvents(includeCallsIfEmpty = true, expandedValues)) } + + def uniqueActorName: String = List("MetadataBuilderActor", UUID.randomUUID()).mkString("-") } class MetadataBuilderActor(serviceRegistryActor: ActorRef) extends LoggingFSM[MetadataBuilderActorState, Option[MetadataBuilderActorData]] - with DefaultJsonProtocol with WorkflowQueryPagination { + with DefaultJsonProtocol { + import MetadataBuilderActor._ - import WorkflowJsonSupport._ + private var target: ActorRef = ActorRef.noSender startWith(Idle, None) val tag = self.path.name when(Idle) { case Event(action: MetadataServiceAction, _) => + target = sender() serviceRegistryActor ! action goto(WaitingForMetadataService) } @@ -150,43 +157,34 @@ class MetadataBuilderActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Me } when(WaitingForMetadataService) { - case Event(MetadataLookupResponse(query, metadata), None) => - processMetadataResponse(query, metadata) case Event(StatusLookupResponse(w, status), _) => - context.parent ! RequestComplete((StatusCodes.OK, processStatusResponse(w, status))) - allDone - case Event(_: ServiceRegistryFailure, _) => - val response = APIResponse.fail(new RuntimeException("Can't find metadata service")) - context.parent ! RequestComplete((StatusCodes.InternalServerError, response)) - allDone - case Event(WorkflowQuerySuccess(uri: Uri, response, metadata), _) => - context.parent ! RequestCompleteWithHeaders(response, generateLinkHeaders(uri, metadata):_*) - allDone - case Event(failure: WorkflowQueryFailure, _) => - context.parent ! RequestComplete((StatusCodes.BadRequest, APIResponse.fail(failure.reason))) + target ! BuiltMetadataResponse(processStatusResponse(w, status)) allDone case Event(WorkflowOutputsResponse(id, events), _) => // Add in an empty output event if there aren't already any output events. val hasOutputs = events exists { _.key.key.startsWith(WorkflowMetadataKeys.Outputs + ":") } val updatedEvents = if (hasOutputs) events else MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Outputs)) +: events - context.parent ! RequestComplete((StatusCodes.OK, workflowMetadataResponse(id, updatedEvents, includeCallsIfEmpty = false, Map.empty))) + target ! BuiltMetadataResponse(workflowMetadataResponse(id, updatedEvents, includeCallsIfEmpty = false, Map.empty)) allDone case Event(LogsResponse(w, l), _) => - context.parent ! RequestComplete((StatusCodes.OK, workflowMetadataResponse(w, l, includeCallsIfEmpty = false, Map.empty))) + target ! BuiltMetadataResponse(workflowMetadataResponse(w, l, includeCallsIfEmpty = false, Map.empty)) + allDone + case Event(MetadataLookupResponse(query, metadata), None) => processMetadataResponse(query, metadata) + case Event(failure: ServiceRegistryFailure, _) => + target ! FailedMetadataResponse(new RuntimeException("Can't find metadata service")) allDone case Event(failure: MetadataServiceFailure, _) => - context.parent ! RequestComplete((StatusCodes.InternalServerError, APIResponse.error(failure.reason))) + target ! FailedMetadataResponse(failure.reason) allDone case Event(unexpectedMessage, stateData) => - val response = APIResponse.fail(new RuntimeException(s"MetadataBuilderActor $tag(WaitingForMetadataService, $stateData) got an unexpected message: $unexpectedMessage")) - context.parent ! RequestComplete((StatusCodes.InternalServerError, response)) + target ! FailedMetadataResponse(new RuntimeException(s"MetadataBuilderActor $tag(WaitingForMetadataService, $stateData) got an unexpected message: $unexpectedMessage")) context stop self stay() } when(WaitingForSubWorkflows) { - case Event(RequestComplete(metadata), Some(data)) => - processSubWorkflowMetadata(metadata, data) + case Event(mbr: MetadataBuilderActorResponse, Some(data)) => + processSubWorkflowMetadata(mbr, data) } whenUnhandled { @@ -195,9 +193,9 @@ class MetadataBuilderActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Me stay() } - def processSubWorkflowMetadata(metadataResponse: Any, data: MetadataBuilderActorData) = { + def processSubWorkflowMetadata(metadataResponse: MetadataBuilderActorResponse, data: MetadataBuilderActorData) = { metadataResponse match { - case (StatusCodes.OK, js: JsObject) => + case BuiltMetadataResponse(js) => js.fields.get(WorkflowMetadataKeys.Id) match { case Some(subId: JsString) => val newData = data.withSubWorkflow(subId.value, js) @@ -209,18 +207,18 @@ class MetadataBuilderActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Me } case _ => failAndDie(new RuntimeException("Received unexpected response while waiting for sub workflow metadata.")) } - case _ => failAndDie(new RuntimeException("Failed to retrieve metadata for a sub workflow.")) + case FailedMetadataResponse(e) => failAndDie(new RuntimeException("Failed to retrieve metadata for a sub workflow.", e)) } } def failAndDie(reason: Throwable) = { - context.parent ! RequestComplete((StatusCodes.InternalServerError, APIResponse.error(reason))) + target ! FailedMetadataResponse(reason) context stop self stay() } def buildAndStop(query: MetadataQuery, eventsList: Seq[MetadataEvent], expandedValues: Map[String, JsValue]) = { - context.parent ! RequestComplete((StatusCodes.OK, processMetadataEvents(query, eventsList, expandedValues))) + target ! BuiltMetadataResponse(processMetadataEvents(query, eventsList, expandedValues)) allDone } @@ -236,7 +234,7 @@ class MetadataBuilderActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Me else { // Otherwise spin up a metadata builder actor for each sub workflow subWorkflowIds foreach { subId => - val subMetadataBuilder = context.actorOf(MetadataBuilderActor.props(serviceRegistryActor), PerRequestCreator.endpointActorName) + val subMetadataBuilder = context.actorOf(MetadataBuilderActor.props(serviceRegistryActor), uniqueActorName) subMetadataBuilder ! GetMetadataQueryAction(query.copy(workflowId = WorkflowId.fromString(subId))) } goto(WaitingForSubWorkflows) using Option(MetadataBuilderActorData(query, eventsList, Map.empty, subWorkflowIds.size)) @@ -264,7 +262,10 @@ class MetadataBuilderActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Me )) } - private def workflowMetadataResponse(workflowId: WorkflowId, eventsList: Seq[MetadataEvent], includeCallsIfEmpty: Boolean, expandedValues: Map[String, JsValue]) = { + private def workflowMetadataResponse(workflowId: WorkflowId, + eventsList: Seq[MetadataEvent], + includeCallsIfEmpty: Boolean, + expandedValues: Map[String, JsValue]): JsObject = { JsObject(MetadataBuilderActor.parseWorkflowEvents(includeCallsIfEmpty, expandedValues)(eventsList).fields + ("id" -> JsString(workflowId.toString))) } } diff --git a/engine/src/main/scala/cromwell/webservice/metadata/WorkflowQueryPagination.scala b/engine/src/main/scala/cromwell/webservice/metadata/WorkflowQueryPagination.scala index 8f6650e88..9ae121067 100644 --- a/engine/src/main/scala/cromwell/webservice/metadata/WorkflowQueryPagination.scala +++ b/engine/src/main/scala/cromwell/webservice/metadata/WorkflowQueryPagination.scala @@ -1,33 +1,36 @@ package cromwell.webservice.metadata +import akka.http.scaladsl.model.Uri.Query +import akka.http.scaladsl.model.headers.{Link, LinkParams} +import akka.http.scaladsl.model.{HttpHeader, Uri} import cromwell.services.metadata.MetadataService.QueryMetadata -import spray.http.HttpHeaders.Link -import spray.http.{HttpHeader, Uri} + /** * Attempts to add query parameters for pagination. * - * NOTE: This trait is effectively broken, as the returned links are not suitable for use by cromwell clients. + * NOTE: This is effectively broken, as the returned links are not suitable for use by cromwell clients. * - * The trait discards the search parameters for GETs, for example it drops parameters such as "start" and "end". Also + * This discards the search parameters for GETs, for example it drops parameters such as "start" and "end". Also * generates links incompatible with POSTs, as the endpoints read parameters from the HTTP body during POST, __not__ * from the URI. * - * This trait may need to receive an entire `spray.http.HttpRequest` and not just the `spray.http.Uri` to ensure that - * it doesn't generate links for POST. + * This may need to receive an entire `HttpRequest` and not just the `Uri` to ensure that it doesn't generate links for POST. * * The existing `CromwellApiServiceSpec` should be updated to verify the expected behavior for both GET and POST. * * Left behind for legacy reasons, but don't believe anyone has ever used these non-functional links. + * + * Note: As of 6/7/17 the above is confirmed by JG, but leaving it mostly as-is for now */ -trait WorkflowQueryPagination { +object WorkflowQueryPagination { - protected def generatePaginationParams(page: Int, pageSize: Int): String = { - s"page=$page&pagesize=$pageSize" + private def generatePaginationParams(page: Int, pageSize: Int): Query = { + Query(s"page=$page&pagesize=$pageSize") } //Generates link headers for pagination navigation https://tools.ietf.org/html/rfc5988#page-6 - protected def generateLinkHeaders(uri: Uri, metadata: Option[QueryMetadata]): Seq[HttpHeader] = { + def generateLinkHeaders(uri: Uri, metadata: Option[QueryMetadata]): List[HttpHeader] = { //strip off the query params val baseUrl = uri.scheme + ":" + uri.authority + uri.path metadata match { @@ -35,22 +38,21 @@ trait WorkflowQueryPagination { (meta.page, meta.pageSize) match { case (Some(p), Some(ps)) => - val firstLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(1, ps)), Link.first) + val firstLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(1, ps)), LinkParams.first) val prevPage = math.max(p - 1, 1) - val prevLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(prevPage, ps)), Link.prev) + val prevLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(prevPage, ps)), LinkParams.prev) val lastPage = math.ceil(meta.totalRecords.getOrElse(1).toDouble / ps.toDouble).toInt - val lastLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(lastPage, ps)), Link.last) + val lastLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(lastPage, ps)), LinkParams.last) val nextPage = math.min(p + 1, lastPage) - val nextLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(nextPage, ps)), Link.next) - - Seq(firstLink, prevLink, nextLink, lastLink) + val nextLink = Link(Uri(baseUrl).withQuery(generatePaginationParams(nextPage, ps)), LinkParams.next) - case _ => Seq() + List(firstLink, prevLink, nextLink, lastLink) + case _ => List.empty } - case None => Seq() + case None => List.empty } } } diff --git a/engine/src/test/scala/cromwell/CromwellTestKitSpec.scala b/engine/src/test/scala/cromwell/CromwellTestKitSpec.scala index 3a1173503..2a59744c0 100644 --- a/engine/src/test/scala/cromwell/CromwellTestKitSpec.scala +++ b/engine/src/test/scala/cromwell/CromwellTestKitSpec.scala @@ -25,17 +25,15 @@ import cromwell.engine.workflow.workflowstore.{InMemoryWorkflowStore, WorkflowSt import cromwell.jobstore.JobStoreActor.{JobStoreWriteSuccess, JobStoreWriterCommand} import cromwell.server.{CromwellRootActor, CromwellSystem} import cromwell.services.ServiceRegistryActor -import cromwell.services.metadata.MetadataQuery import cromwell.services.metadata.MetadataService._ import cromwell.subworkflowstore.EmptySubWorkflowStoreActor import cromwell.util.SampleWdl -import cromwell.webservice.PerRequest.RequestComplete import cromwell.webservice.metadata.MetadataBuilderActor +import cromwell.webservice.metadata.MetadataBuilderActor.{BuiltMetadataResponse, FailedMetadataResponse, MetadataBuilderActorResponse} import org.scalactic.Equality import org.scalatest._ import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.time.{Millis, Seconds, Span} -import spray.http.StatusCode import spray.json._ import wdl4s.TaskCall import wdl4s.expression.{NoFunctions, WdlStandardLibraryFunctions} @@ -408,22 +406,6 @@ abstract class CromwellTestKitSpec(val twms: TestWorkflowManagerSystem = default workflowId } - def getWorkflowMetadata(workflowId: WorkflowId, serviceRegistryActor: ActorRef, key: Option[String] = None)(implicit ec: ExecutionContext): JsObject = { - // MetadataBuilderActor sends its response to context.parent, so we can't just use an ask to talk to it here - val message = GetMetadataQueryAction(MetadataQuery(workflowId, None, key, None, None, expandSubWorkflows = false)) - val parentProbe = TestProbe() - - TestActorRef(MetadataBuilderActor.props(serviceRegistryActor), parentProbe.ref, s"MetadataActor-${UUID.randomUUID()}") ! message - val metadata = parentProbe.expectMsgPF(TimeoutDuration) { - // Because of type erasure the scala compiler can't check that the RequestComplete generic type will be (StatusCode, JsObject), which would generate a warning - // As long as Metadata sends back a JsObject this is safe - case response: RequestComplete[(StatusCode, JsObject)] @unchecked => response.response._2 - } - - system.stop(parentProbe.ref) - metadata - } - /** * Verifies that a state is correct. // TODO: There must be a better way...? */ @@ -441,7 +423,16 @@ abstract class CromwellTestKitSpec(val twms: TestWorkflowManagerSystem = default } private def getWorkflowOutputsFromMetadata(id: WorkflowId, serviceRegistryActor: ActorRef): Map[FullyQualifiedName, WdlValue] = { - getWorkflowMetadata(id, serviceRegistryActor, None).getFields(WorkflowMetadataKeys.Outputs).toList match { + val mba = system.actorOf(MetadataBuilderActor.props(serviceRegistryActor)) + val response = mba.ask(WorkflowOutputs(id)).mapTo[MetadataBuilderActorResponse] collect { + case BuiltMetadataResponse(r) => r + case FailedMetadataResponse(e) => throw e + } + val jsObject = Await.result(response, TimeoutDuration) + + system.stop(mba) + + jsObject.getFields(WorkflowMetadataKeys.Outputs).toList match { case head::_ => head.asInstanceOf[JsObject].fields.map( x => (x._1, jsValueToWdlValue(x._2))) case _ => Map.empty } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala index ec40fa4b5..a5e9c73fe 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala @@ -3,15 +3,12 @@ package cromwell.engine.workflow.lifecycle.execution.callcaching import akka.testkit.{ImplicitSender, TestFSMRef, TestProbe} import cats.data.NonEmptyList import cromwell.core.{TestKitSuite, WorkflowId} -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor.{CallCacheDiffWithRequest, WaitingForMetadata} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffQueryParameter.CallCacheDiffQueryCall import cromwell.services.metadata.MetadataService.{GetMetadataQueryAction, MetadataLookupResponse, MetadataServiceKeyLookupFailed} import cromwell.services.metadata._ -import cromwell.webservice.FailureResponse -import cromwell.webservice.PerRequest.RequestComplete import org.scalatest.concurrent.Eventually import org.scalatest.{FlatSpecLike, Matchers} -import spray.http.{StatusCode, StatusCodes} class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matchers with ImplicitSender with Eventually { @@ -113,7 +110,7 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher actor ! MetadataLookupResponse(queryA, eventsA) - expectMsgClass(classOf[RequestComplete[_]]) + expectMsgClass(classOf[CallCacheDiffActorResponse]) expectTerminated(actor) } @@ -127,16 +124,12 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher actor ! MetadataLookupResponse(queryB, eventsB) - expectMsgClass(classOf[RequestComplete[_]]) + expectMsgClass(classOf[CallCacheDiffActorResponse]) expectTerminated(actor) } it should "build a correct response" in { import cromwell.services.metadata.MetadataService.MetadataLookupResponse - import cromwell.webservice.PerRequest.RequestComplete - import cromwell.webservice.WorkflowJsonSupport._ - import spray.http.StatusCodes - import spray.httpx.SprayJsonSupport._ import spray.json._ val mockServiceRegistryActor = TestProbe() @@ -187,7 +180,7 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher |} """.stripMargin.parseJson.asJsObject - val expectedResponse = RequestComplete((StatusCodes.OK, expectedJson)) + val expectedResponse = BuiltCallCacheDiffResponse(expectedJson) expectMsg(expectedResponse) expectTerminated(actor) @@ -208,18 +201,17 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher actor ! responseA expectMsgPF(1 second) { - case RequestComplete((StatusCodes.InternalServerError, response: FailureResponse)) => - response.status shouldBe "error" - response.message shouldBe "Query lookup failed - but it's ok ! this is a test !" + case FailedCallCacheDiffResponse(e: Throwable) => + e.getMessage shouldBe "Query lookup failed - but it's ok ! this is a test !" } expectTerminated(actor) } - it should "respond with 404 if hashes are missing" in { + it should "Respond with a CachedCallNotFoundException if hashes are missing" in { import scala.concurrent.duration._ import scala.language.postfixOps - + val mockServiceRegistryActor = TestProbe() val actor = TestFSMRef(new CallCacheDiffActor(mockServiceRegistryActor.ref)) watch(actor) @@ -230,12 +222,9 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher actor ! MetadataLookupResponse(queryA, eventsA.filterNot(_.key.key.contains("hashes"))) expectMsgPF(1 second) { - case response: RequestComplete[(StatusCode, FailureResponse)]@unchecked => - response.response._1 shouldBe StatusCodes.NotFound - response.response._2.status shouldBe "error" - response.response._2.message shouldBe "callA and callB were run on a previous version of Cromwell on which this endpoint was not supported." + case FailedCallCacheDiffResponse(e: CachedCallNotFoundException) => + e.getMessage shouldBe "callA and callB were run on a previous version of Cromwell on which this endpoint was not supported." } expectTerminated(actor) } - } diff --git a/engine/src/test/scala/cromwell/server/CromwellServerSpec.scala b/engine/src/test/scala/cromwell/server/CromwellServerSpec.scala deleted file mode 100644 index 9bf6180e3..000000000 --- a/engine/src/test/scala/cromwell/server/CromwellServerSpec.scala +++ /dev/null @@ -1,33 +0,0 @@ -package cromwell.server - -import akka.pattern.ask -import akka.util.Timeout -import com.typesafe.config.ConfigFactory -import org.scalatest.concurrent.{PatienceConfiguration, ScalaFutures} -import org.scalatest.{FlatSpec, Matchers} -import org.specs2.mock.Mockito -import spray.http.{ContentTypes, HttpRequest, HttpResponse, Timedout} - -import scala.concurrent.duration._ - -class CromwellServerSpec extends FlatSpec with Matchers with Mockito with ScalaFutures { - implicit val timeout: Timeout = 5.seconds - - it should "return 500 errors as Json" in { - val cromwellSystem = new CromwellSystem {} - - val cromwellServerActor = cromwellSystem.actorSystem.actorOf(CromwellServerActor.props(ConfigFactory.empty())(cromwellSystem.materializer)) - val response = cromwellServerActor.ask(Timedout(mock[HttpRequest])) - - response.futureValue(PatienceConfiguration.Timeout(timeout.duration)) match { - case h: HttpResponse => - h.entity.toOption match { - case Some(e) => e.contentType.toString() shouldBe ContentTypes.`application/json`.mediaType.value.toString - case None => fail() - } - case _ => fail() - } - - cromwellSystem.shutdownActorSystem() - } -} diff --git a/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala index cd32f00e6..2acfa1f89 100644 --- a/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala @@ -2,50 +2,53 @@ package cromwell.webservice import akka.actor.{Actor, ActorSystem, Props} import cromwell.core.{WorkflowId, WorkflowMetadataKeys, WorkflowSubmitted, WorkflowSucceeded} -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffQueryParameter - -import scala.util.{Failure, Success, Try} +import akka.http.scaladsl.coding.{Decoder, Gzip} +import akka.http.scaladsl.server.Route +import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ +import spray.json.DefaultJsonProtocol._ import cromwell.engine.workflow.workflowstore.WorkflowStoreActor.{AbortWorkflow, BatchSubmitWorkflows, SubmitWorkflow} import cromwell.engine.workflow.workflowstore.WorkflowStoreEngineActor import cromwell.engine.workflow.workflowstore.WorkflowStoreEngineActor.WorkflowAbortFailed -import cromwell.services.metadata.MetadataService._ -import org.scalatest.{FlatSpec, Matchers} -import spray.http._ -import spray.json.DefaultJsonProtocol._ -import spray.json._ -import spray.routing._ import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{WorkflowSubmittedToStore, WorkflowsBatchSubmittedToStore} +import cromwell.services.metadata.MetadataService._ +import akka.http.scaladsl.model._ +import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest} +import akka.http.scaladsl.unmarshalling.Unmarshal +import akka.stream.ActorMaterializer +import cromwell.services.metadata._ import cromwell.util.SampleWdl.HelloWorld -import cromwell.webservice.PerRequest.RequestComplete -import spray.httpx.ResponseTransformation -import spray.httpx.SprayJsonSupport._ -import spray.httpx.encoding.Gzip -import spray.testkit.ScalatestRouteTest -import spray.routing.Directives._ - -class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Matchers with ResponseTransformation { +import org.scalatest.{AsyncFlatSpec, Matchers} +import spray.json._ + +import scala.concurrent.Await +import scala.concurrent.duration._ + +class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with Matchers { import CromwellApiServiceSpec._ - val cromwellApiService = new MockApiService() + val akkaHttpService = new MockApiService() val version = "v1" - behavior of "REST API /status endpoint" + implicit def default(implicit system: ActorSystem) = RouteTestTimeout(5.seconds) + + + behavior of "REST API /status endpoint" it should "return 200 for get of a known workflow id" in { - val workflowId = MockApiService.ExistingWorkflowId + val workflowId = CromwellApiServiceSpec.ExistingWorkflowId + Get(s"/workflows/$version/$workflowId/status") ~> - cromwellApiService.statusRoute ~> + akkaHttpService.routes ~> check { status should be(StatusCodes.OK) - val result = responseAs[JsObject] - result.fields(WorkflowMetadataKeys.Status) should be(JsString("Submitted")) + // Along w/ checking value, ensure it is valid JSON despite the requested content type + responseAs[JsObject].fields(WorkflowMetadataKeys.Status) should be(JsString("Submitted")) } } it should "return 404 for get of unknown workflow" in { - val workflowId = MockApiService.UnrecognizedWorkflowId - + val workflowId = CromwellApiServiceSpec.UnrecognizedWorkflowId Get(s"/workflows/$version/$workflowId/status") ~> - cromwellApiService.statusRoute ~> + akkaHttpService.routes ~> check { assertResult(StatusCodes.NotFound) { status @@ -55,7 +58,7 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match it should "return 400 for get of a malformed workflow id's status" in { Get(s"/workflows/$version/foobar/status") ~> - cromwellApiService.statusRoute ~> + akkaHttpService.routes ~> check { assertResult(StatusCodes.BadRequest) { status @@ -71,12 +74,12 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match } } - behavior of "REST API /abort endpoint" + behavior of "REST API /abort endpoint" it should "return 404 for abort of unknown workflow" in { - val workflowId = MockApiService.UnrecognizedWorkflowId + val workflowId = CromwellApiServiceSpec.UnrecognizedWorkflowId Post(s"/workflows/$version/$workflowId/abort") ~> - cromwellApiService.abortRoute ~> + akkaHttpService.routes ~> check { assertResult(StatusCodes.NotFound) { status @@ -86,7 +89,7 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match it should "return 400 for abort of a malformed workflow id" in { Post(s"/workflows/$version/foobar/abort") ~> - cromwellApiService.abortRoute ~> + akkaHttpService.routes ~> check { assertResult(StatusCodes.BadRequest) { status @@ -103,8 +106,8 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match } it should "return 403 for abort of a workflow in a terminal state" in { - Post(s"/workflows/$version/${MockApiService.AbortedWorkflowId}/abort") ~> - cromwellApiService.abortRoute ~> + Post(s"/workflows/$version/${CromwellApiServiceSpec.AbortedWorkflowId}/abort") ~> + akkaHttpService.routes ~> check { assertResult(StatusCodes.Forbidden) { status @@ -112,24 +115,19 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match assertResult( s"""{ | "status": "error", - | "message": "Workflow ID '${MockApiService.AbortedWorkflowId}' is in terminal state 'Aborted' and cannot be aborted." + | "message": "Workflow ID '${CromwellApiServiceSpec.AbortedWorkflowId}' is in terminal state 'Aborted' and cannot be aborted." |}""".stripMargin ) { responseAs[String] } } } - it should "return 200 for abort of a known workflow id" in { - Post(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/abort") ~> - cromwellApiService.abortRoute ~> + Post(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/abort") ~> + akkaHttpService.routes ~> check { assertResult( - s"""{ - | "id": "${MockApiService.ExistingWorkflowId.toString}", - | "status": "Aborted" - |}""" - .stripMargin) { + s"""{"id":"${CromwellApiServiceSpec.ExistingWorkflowId.toString}","status":"Aborted"}""") { responseAs[String] } assertResult(StatusCodes.OK) { @@ -138,226 +136,222 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match } } - behavior of "REST API submission endpoint" - it should "return 201 for a successful workflow submission " in { - val bodyParts: Map[String, BodyPart] = Map("workflowSource" -> BodyPart(HelloWorld.workflowSource()), "workflowInputs" -> BodyPart(HelloWorld.rawInputs.toJson.toString())) - Post(s"/workflows/$version", MultipartFormData(bodyParts)) ~> - cromwellApiService.submitRoute ~> - check { - assertResult( - s"""{ - | "id": "${MockApiService.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}""".stripMargin) { - responseAs[String] - } - assertResult(StatusCodes.Created) { - status + behavior of "REST API submission endpoint" + it should "return 201 for a successful workflow submission " in { + val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) + val workflowInputs = Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString())) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() + Post(s"/workflows/$version", formData) ~> + akkaHttpService.routes ~> + check { + assertResult( + s"""{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}""".stripMargin) { + responseAs[String].parseJson.prettyPrint + } + assertResult(StatusCodes.Created) { + status + } } - } - } + } - it should "return 400 for an unrecognized form data request parameter " in { - val bodyParts: Map[String, BodyPart] = Map("incorrectParameter" -> BodyPart(HelloWorld.workflowSource())) - Post(s"/workflows/$version", MultipartFormData(bodyParts)) ~> - cromwellApiService.submitRoute ~> - check { - assertResult( - s"""{ - | "status": "fail", - | "message": "Error(s): Unexpected body part name: incorrectParameter" - |}""".stripMargin) { - responseAs[String] + it should "return 400 for an unrecognized form data request parameter " in { + val formData = Multipart.FormData(Multipart.FormData.BodyPart("incorrectParameter", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()))).toEntity() + Post(s"/workflows/$version", formData) ~> + akkaHttpService.routes ~> + check { + assertResult( + s"""{ + | "status": "fail", + | "message": "Error(s): Unexpected body part name: incorrectParameter" + |}""".stripMargin) { + responseAs[String] + } + assertResult(StatusCodes.BadRequest) { + status + } } - assertResult(StatusCodes.BadRequest) { - status + } + + it should "return 400 for a workflow submission with unsupported workflow option keys" in { + val options = """ + |{ + | "defaultRuntimeOptions": { + | "cpu":1 + | } + |} + |""".stripMargin + + val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) + val workflowInputs = Multipart.FormData.BodyPart("workflowOptions", HttpEntity(MediaTypes.`application/json`, options)) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() + + Post(s"/workflows/$version", formData) ~> + akkaHttpService.routes ~> + check { + assertResult(StatusCodes.BadRequest) { + status + } } - } - } + } - it should "return 400 for a workflow submission with unsupported workflow option keys" in { - val options = """ - |{ - | "defaultRuntimeOptions": { - | "cpu":1 - | } - |} - |""".stripMargin + it should "return 400 for a workflow submission with malformed workflow options json" in { + val options = s""" + |{"read_from_cache": "true" + |""".stripMargin - val bodyParts = Map("workflowSource" -> BodyPart(HelloWorld.workflowSource()), "workflowOptions" -> BodyPart(options)) + val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) + val workflowInputs = Multipart.FormData.BodyPart("workflowOptions", HttpEntity(MediaTypes.`application/json`, options)) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - Post(s"/workflows/$version", MultipartFormData(bodyParts)) ~> - cromwellApiService.submitRoute ~> - check { - assertResult(StatusCodes.BadRequest) { - status + Post(s"/workflows/$version", formData) ~> + akkaHttpService.routes ~> + check { + assertResult(StatusCodes.BadRequest) { + status + } } - } - } - - it should "return 400 for a workflow submission with malformed workflow options json" in { - val options = s""" - |{"read_from_cache": "true" - |""".stripMargin + } - val bodyParts = Map("workflowSource" -> BodyPart(HelloWorld.workflowSource()), "workflowOptions" -> BodyPart(options)) + behavior of "REST API batch submission endpoint" + it should "return 200 for a successful workflow submission " in { + val inputs = HelloWorld.rawInputs.toJson + val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) + val workflowInputs = Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, s"[$inputs, $inputs]")) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - Post(s"/workflows/$version", MultipartFormData(bodyParts)) ~> - cromwellApiService.submitRoute ~> - check { - assertResult(StatusCodes.BadRequest) { - status + Post(s"/workflows/$version/batch", formData) ~> + akkaHttpService.routes ~> + check { + assertResult( + s"""[{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}, { + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}]""".stripMargin) { + responseAs[String].parseJson.prettyPrint + } + assertResult(StatusCodes.Created) { + status + } } - } - } - - it should "succesfully merge and override multiple input files" in { + } - val input1 = Map("wf.a1" -> "hello", "wf.a2" -> "world").toJson.toString - val input2 = Map.empty[String, String].toJson.toString - val overrideInput1 = Map("wf.a2" -> "universe").toJson.toString - val allInputs = cromwellApiService.mergeMaps(Seq(Option(input1), Option(input2), Option(overrideInput1))) + it should "return 400 for an submission with no inputs" in { + val formData = Multipart.FormData(Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()))).toEntity() - check { - allInputs.fields.keys should contain allOf("wf.a1", "wf.a2") - allInputs.fields("wf.a2") should be(JsString("universe")) + Post(s"/workflows/$version/batch", formData) ~> + akkaHttpService.routes ~> + check { + assertResult( + s"""{ + | "status": "fail", + | "message": "Error(s): No inputs were provided" + |}""".stripMargin) { + responseAs[String] + } + assertResult(StatusCodes.BadRequest) { + status + } + } } - } - behavior of "REST API batch submission endpoint" - it should "return 200 for a successful workflow submission " in { - val inputs = HelloWorld.rawInputs.toJson - val bodyParts = Map("workflowSource" -> BodyPart(HelloWorld.workflowSource()), "workflowInputs" -> BodyPart(s"[$inputs, $inputs]")) + behavior of "REST API /outputs endpoint" + it should "return 200 with GET of outputs on successful execution of workflow" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/outputs") ~> + akkaHttpService.routes ~> + check { + status should be(StatusCodes.OK) + responseAs[JsObject].fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.Outputs) + } + } - Post(s"/workflows/$version/batch", MultipartFormData(bodyParts)) ~> - cromwellApiService.submitBatchRoute ~> + it should "return 404 with outputs on unknown workflow" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.UnrecognizedWorkflowId}/outputs") ~> + akkaHttpService.routes ~> check { - assertResult( - s"""[{ - | "id": "${MockApiService.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}, { - | "id": "${MockApiService.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}]""".stripMargin) { - responseAs[String] - } - assertResult(StatusCodes.OK) { + assertResult(StatusCodes.NotFound) { status } } - } - - it should "return 400 for an submission with no inputs" in { - val bodyParts = Map("workflowSource" -> BodyPart(HelloWorld.workflowSource())) + } - Post(s"/workflows/$version/batch", MultipartFormData(bodyParts)) ~> - cromwellApiService.submitBatchRoute ~> - check { - assertResult( - s"""{ - | "status": "fail", - | "message": "Error(s): No inputs were provided" - |}""".stripMargin) { - responseAs[String] - } - assertResult(StatusCodes.BadRequest) { - status + it should "return 405 with POST of outputs on successful execution of workflow" in { + Post(s"/workflows/$version/${CromwellApiServiceSpec.UnrecognizedWorkflowId}/outputs") ~> + Route.seal(akkaHttpService.routes) ~> + check { + assertResult(StatusCodes.MethodNotAllowed) { + status + } } - } - } + } - behavior of "REST API /outputs endpoint" - it should "return 200 with GET of outputs on successful execution of workflow" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/outputs") ~> - cromwellApiService.workflowOutputsRoute ~> - check { - status should be(StatusCodes.OK) - val result = responseAs[JsObject] - result.fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.Outputs) - } - } + behavior of "REST API /logs endpoint" + it should "return 200 with paths to stdout/stderr/backend log" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/logs") ~> + akkaHttpService.routes ~> + check { + status should be(StatusCodes.OK) - it should "return 404 with outputs on unknown workflow" in { - Get(s"/workflows/$version/${MockApiService.UnrecognizedWorkflowId}/outputs") ~> - cromwellApiService.workflowOutputsRoute ~> - check { - assertResult(StatusCodes.NotFound) { - status - } + val call = responseAs[JsObject].fields("calls").convertTo[JsObject].fields("mycall").convertTo[Seq[JsObject]].head + call.fields("stdout") should be(JsString("stdout.txt")) + call.fields("stderr") should be(JsString("stderr.txt")) + call.fields("stdout") should be(JsString("stdout.txt")) + call.fields("backendLogs").convertTo[JsObject].fields("log") should be (JsString("backend.log")) + } } - } - it should "return 405 with POST of outputs on successful execution of workflow" in { - Post(s"/workflows/$version/${MockApiService.UnrecognizedWorkflowId}/outputs") ~> - cromwellApiService.sealRoute(cromwellApiService.workflowOutputsRoute) ~> - check { - assertResult(StatusCodes.MethodNotAllowed) { - status + it should "return 404 with logs on unknown workflow" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.UnrecognizedWorkflowId}/logs") ~> + akkaHttpService.routes ~> + check { + assertResult(StatusCodes.NotFound) { + status + } } - } - } - - behavior of "REST API /logs endpoint" - it should "return 200 with paths to stdout/stderr/backend log" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/logs") ~> - cromwellApiService.workflowLogsRoute ~> - check { - status should be(StatusCodes.OK) - val result = responseAs[JsObject] - - val call = result.fields("calls").convertTo[JsObject].fields("mycall").convertTo[Seq[JsObject]].head - call.fields("stdout") should be(JsString("stdout.txt")) - call.fields("stderr") should be(JsString("stderr.txt")) - call.fields("stdout") should be(JsString("stdout.txt")) - call.fields("backendLogs").convertTo[JsObject].fields("log") should be (JsString("backend.log")) - } - } + } - it should "return 404 with logs on unknown workflow" in { - Get(s"/workflows/$version/${MockApiService.UnrecognizedWorkflowId}/logs") ~> - cromwellApiService.workflowLogsRoute ~> - check { - assertResult(StatusCodes.NotFound) { - status + behavior of "REST API /metadata endpoint" + it should "return with full metadata from the metadata route" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata") ~> + akkaHttpService.routes ~> + check { + status should be(StatusCodes.OK) + val decoder: Decoder = Gzip + val result = Await.result(Unmarshal(decoder.decode(response)).to[JsObject], 1.second) + result.fields.keys should contain allOf("testKey1", "testKey2") + result.fields.keys shouldNot contain("testKey3") + result.fields("testKey1") should be(JsString("myValue1")) + result.fields("testKey2") should be(JsString("myValue2")) } - } - } - - behavior of "REST API /metadata endpoint" - it should "return with full metadata from the metadata route" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/metadata") ~> - mapHttpResponse(decode(Gzip))(cromwellApiService.metadataRoute) ~> - check { - status should be(StatusCodes.OK) - val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1", "testKey2") - result.fields.keys shouldNot contain("testKey3") - result.fields("testKey1") should be(JsString("myValue1")) - result.fields("testKey2") should be(JsString("myValue2")) - } - } + } - it should "return with included metadata from the metadata route" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/metadata?includeKey=testKey1&includeKey=testKey2a") ~> - mapHttpResponse(decode(Gzip))(cromwellApiService.metadataRoute) ~> - check { - status should be(StatusCodes.OK) - val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") - result.fields.keys should contain noneOf("testKey2b", "testKey3") - result.fields("testKey1a") should be(JsString("myValue1a")) - result.fields("testKey1b") should be(JsString("myValue1b")) - result.fields("testKey2a") should be(JsString("myValue2a")) - } - } + it should "return with included metadata from the metadata route" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?includeKey=testKey1&includeKey=testKey2a") ~> + akkaHttpService.routes ~> + check { + status should be(StatusCodes.OK) + val decoder: Decoder = Gzip + val result = Await.result(Unmarshal(decoder.decode(response)).to[JsObject], 1.second) + result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") + result.fields.keys should contain noneOf("testKey2b", "testKey3") + result.fields("testKey1a") should be(JsString("myValue1a")) + result.fields("testKey1b") should be(JsString("myValue1b")) + result.fields("testKey2a") should be(JsString("myValue2a")) + } + } it should "return with excluded metadata from the metadata route" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/metadata?excludeKey=testKey2b&excludeKey=testKey3") ~> - mapHttpResponse(decode(Gzip))(cromwellApiService.metadataRoute) ~> + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?excludeKey=testKey2b&excludeKey=testKey3") ~> + akkaHttpService.routes ~> check { status should be(StatusCodes.OK) - val result = responseAs[JsObject] + val decoder: Decoder = Gzip + val result = Await.result(Unmarshal(decoder.decode(response)).to[JsObject], 1.second) result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") result.fields.keys should contain noneOf("testKey2b", "testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) @@ -366,341 +360,176 @@ class CromwellApiServiceSpec extends FlatSpec with ScalatestRouteTest with Match } } - it should "return an error when included and excluded metadata requested from the metadata route" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/metadata?includeKey=testKey1&excludeKey=testKey2") ~> - mapHttpResponse(decode(Gzip))(cromwellApiService.metadataRoute) ~> - check { - assertResult(StatusCodes.BadRequest) { - status - } - assertResult( - s"""{ - | "status": "fail", - | "message": "includeKey and excludeKey may not be specified together" - |}""".stripMargin - ) { - responseAs[String] - } - } - } + it should "return an error when included and excluded metadata requested from the metadata route" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?includeKey=testKey1&excludeKey=testKey2") ~> + akkaHttpService.routes ~> + check { + assertResult(StatusCodes.BadRequest) { + status + } - behavior of "REST API /timing endpoint" - it should "return 200 with an HTML document for the timings route" in { - Get(s"/workflows/$version/${MockApiService.ExistingWorkflowId}/timing") ~> - cromwellApiService.timingRoute ~> - check { - assertResult(StatusCodes.OK) { status } - assertResult("") { - responseAs[String].substring(0, 6) + val decoder: Decoder = Gzip + Unmarshal(decoder.decode(response)).to[String] map { r => + assertResult( + s"""{ + | "status": "fail", + | "message": "includeKey and excludeKey may not be specified together" + |}""".stripMargin + ) { r } + } } - } - } - - behavior of "REST API /query GET endpoint" - it should "return good results for a good query" in { - Get(s"/workflows/$version/query?status=Succeeded&id=${MockApiService.ExistingWorkflowId}") ~> - cromwellApiService.queryRoute ~> - check { - status should be(StatusCodes.OK) - val results = responseAs[JsObject].fields("results").convertTo[Seq[JsObject]] - - results.head.fields("id") should be(JsString(MockApiService.ExistingWorkflowId.toString)) - results.head.fields("status") should be(JsString("Succeeded")) - } - } + } - behavior of "REST API /query POST endpoint" - it should "return good results for a good query map body" in { - Post(s"/workflows/$version/query", HttpEntity(ContentTypes.`application/json`, """[{"status":"Succeeded"}]""")) ~> - cromwellApiService.queryPostRoute ~> - check { - assertResult(StatusCodes.OK) { - status - } - assertResult(true) { - body.asString.contains("\"status\": \"Succeeded\"") + behavior of "REST API /timing endpoint" + it should "return 200 with an HTML document for the timings route" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/timing") ~> + akkaHttpService.routes ~> + check { + assertResult(StatusCodes.OK) { status } + assertResult("") { + responseAs[String].substring(0, 6) + } } - } - } + } - behavior of "REST API /callcaching/diff GET endpoint" - it should "return good results for a good query" in { - Get(s"/workflows/$version/callcaching/diff?workflowA=85174842-4a44-4355-a3a9-3a711ce556f1&callA=wf_hello.hello&workflowB=7479f8a8-efa4-46e4-af0d-802addc66e5d&callB=wf_hello.hello") ~> - cromwellApiService.callCachingDiffRoute ~> - check { - assertResult(StatusCodes.OK) { - status - } - assertResult( - """{ - | "callA": { - | "workflowId": "85174842-4a44-4355-a3a9-3a711ce556f1", - | "callFqn": "wf_hello.hello", - | "jobIndex": -1, - | "allowResultReuse": true - | }, - | "callB": { - | "workflowId": "85174842-4a44-4355-a3a9-3a711ce556f1", - | "callFqn": "wf_hello.hello", - | "jobIndex": -1, - | "allowResultReuse": false - | }, - | "hashDifferential": [{ - | "key1": { - | "callA": "somehash", - | "callB": "someotherhash" - | } - | }, { - | "key2": { - | "callA": "somehash", - | "callB": null - | } - | }, { - | "key3": { - | "callA": null, - | "callB": "someotherhash" - | } - | }] - |}""".stripMargin - ) { - responseAs[String] + behavior of "REST API /query GET endpoint" + it should "return good results for a good query" in { + Get(s"/workflows/$version/query?status=Succeeded&id=${CromwellApiServiceSpec.ExistingWorkflowId}") ~> + akkaHttpService.routes ~> + check { + status should be(StatusCodes.OK) + contentType should be(ContentTypes.`application/json`) + val results = responseAs[JsObject].fields("results").convertTo[Seq[JsObject]] + results.head.fields("id") should be(JsString(CromwellApiServiceSpec.ExistingWorkflowId.toString)) + results.head.fields("status") should be(JsString("Succeeded")) } - } - } + } - it should "return an error for a bad query" in { - Get(s"/workflows/$version/callcaching/diff?missingStuff") ~> - cromwellApiService.callCachingDiffRoute ~> - check { - assertResult(StatusCodes.BadRequest) { - status - } - assertResult( - """{ - | "status": "fail", - | "message": "Wrong parameters for call cache diff query:\nmissing workflowA query parameter\nmissing callA query parameter\nmissing workflowB query parameter\nmissing callB query parameter", - | "errors": ["missing workflowA query parameter", "missing callA query parameter", "missing workflowB query parameter", "missing callB query parameter"] - |}""".stripMargin - ) { - responseAs[String] + behavior of "REST API /query POST endpoint" + it should "return good results for a good query map body" in { + Post(s"/workflows/$version/query", HttpEntity(ContentTypes.`application/json`, """[{"status":"Succeeded"}]""")) ~> + akkaHttpService.routes ~> + check { + assertResult(StatusCodes.OK) { + status + } + assertResult(true) { + entityAs[String].contains("\"status\":\"Succeeded\"") + } } - } - } - - behavior of "REST API /labels PATCH endpoint" - it should "return successful status response when assigning valid labels to an existing workflow ID" in { - - val validLabelsJson = - """ - |{ - | "label-key-1":"label-value-1", - | "label-key-2":"label-value-2" - |} - """.stripMargin - - val workflowId = MockApiService.ExistingWorkflowId - - Patch(s"/workflows/$version/$workflowId/labels", HttpEntity(ContentTypes.`application/json`, validLabelsJson)) ~> - cromwellApiService.patchLabelsRoute ~> - check { - status shouldBe StatusCodes.OK - val actualResult = responseAs[JsObject] - val expectedResults = - s""" - |{ - | "id": "${workflowId}", - | "labels": { - | "label-key-1":"label-value-1", - | "label-key-2":"label-value-2" - | } - |} - """.stripMargin.parseJson - - actualResult shouldBe expectedResults - } - } + } - it should "return failed response when simulating a write metadata failure" in { + behavior of "REST API /labels PATCH endpoint" + it should "return successful status response when assigning valid labels to an existing workflow ID" in { - val validLabelsJson = - """ - |{ - | "label-key-1":"label-value-1", - | "label-key-2":"label-value-2" - |} - """.stripMargin + val validLabelsJson = + """ + |{ + | "label-key-1":"label-value-1", + | "label-key-2":"label-value-2" + |} + """.stripMargin - val workflowId = MockApiService.AbortedWorkflowId + val workflowId = CromwellApiServiceSpec.ExistingWorkflowId - Patch(s"/workflows/$version/$workflowId/labels", HttpEntity(ContentTypes.`application/json`, validLabelsJson)) ~> - cromwellApiService.patchLabelsRoute ~> - check { - status shouldBe StatusCodes.InternalServerError - val actualResult = responseAs[JsObject] - val expectedResult = - s"""{ - | "status": "fail", - | "message": "Unable to update labels for ${MockApiService.AbortedWorkflowId} due to mock exception of db failure" + Patch(s"/workflows/$version/$workflowId/labels", HttpEntity(ContentTypes.`application/json`, validLabelsJson)) ~> + akkaHttpService.routes ~> + check { + status shouldBe StatusCodes.OK + val actualResult = responseAs[JsObject] + val expectedResults = + s""" + |{ + | "id": "$workflowId", + | "labels": { + | "label-key-1":"label-value-1", + | "label-key-2":"label-value-2" + | } |} """.stripMargin.parseJson - actualResult shouldBe expectedResult - } - } + actualResult shouldBe expectedResults + } + } } object CromwellApiServiceSpec { - class MockApiService()(implicit val system: ActorSystem) extends CromwellApiService { - import MockApiService._ + val ExistingWorkflowId = WorkflowId.fromString("c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5") + val AbortedWorkflowId = WorkflowId.fromString("0574111c-c7d3-4145-8190-7a7ed8e8324a") + val UnrecognizedWorkflowId = WorkflowId.fromString("2bdd06cc-e794-46c8-a897-4c86cedb6a06") + val RecognizedWorkflowIds = Set(ExistingWorkflowId, AbortedWorkflowId) + class MockApiService()(implicit val system: ActorSystem) extends CromwellApiService { override def actorRefFactory = system + + override val materializer = ActorMaterializer() + override val ec = system.dispatcher override val workflowStoreActor = actorRefFactory.actorOf(Props(new MockWorkflowStoreActor())) - override val serviceRegistryActor = actorRefFactory.actorOf(Props(new MockServiceRegistryActor)) + override val serviceRegistryActor = actorRefFactory.actorOf(Props(new MockServiceRegistryActor())) override val workflowManagerActor = actorRefFactory.actorOf(Props.empty) - override val callCacheDiffActorProps = Props(new MockCallCacheDiffActor()) - - - override def handleMetadataRequest(message: AnyRef): Route = { - message match { - case GetStatus(w) => complete(submittedStatusResponse(w)) - case WorkflowOutputs(w) => complete(outputResponse(w)) - case GetLogs(w) => complete(logsResponse(w)) - case GetSingleWorkflowMetadataAction(w, None, None, _) => complete(fullMetadataResponse(w)) - case GetSingleWorkflowMetadataAction(w, Some(i), None, _) => complete(filteredMetadataResponse(w)) - case GetSingleWorkflowMetadataAction(w, None, Some(_), _) => complete(filteredMetadataResponse(w)) - case _ => throw new IllegalArgumentException("Woopsie!") - } - } + } - override def handleQueryMetadataRequest(parameters: Seq[(String, String)]): Route = { - complete(QueryResponse) + object MockServiceRegistryActor { + def fullMetadataResponse(workflowId: WorkflowId) = { + List(MetadataEvent(MetadataKey(workflowId, None, "testKey1"), MetadataValue("myValue1", MetadataString)), + MetadataEvent(MetadataKey(workflowId, None, "testKey2"), MetadataValue("myValue2", MetadataString))) + } + def filteredMetadataResponse(workflowId: WorkflowId) = { + List(MetadataEvent(MetadataKey(workflowId, None, "testKey1a"), MetadataValue("myValue1a", MetadataString)), + MetadataEvent(MetadataKey(workflowId, None, "testKey1b"), MetadataValue("myValue1b", MetadataString)), + MetadataEvent(MetadataKey(workflowId, None, "testKey2a"), MetadataValue("myValue2a", MetadataString))) } - override def withRecognizedWorkflowId(possibleWorkflowId: String)(recognizedWorkflowId: WorkflowId => Route): Route = { - requestContext => - Try(WorkflowId.fromString(possibleWorkflowId)) match { - case Success(workflowId) => - if (RecognizedWorkflowIds.contains(workflowId)) recognizedWorkflowId(workflowId)(requestContext) - else failBadRequest(new NoSuchElementException("A hollow voice says 'fool'"), StatusCodes.NotFound)(requestContext) - case Failure(e) => - failBadRequest(new RuntimeException(s"Invalid workflow ID: '$possibleWorkflowId'."))(requestContext) - } + def metadataQuery(workflowId: WorkflowId) = MetadataQuery(workflowId, None, None, None, None, false) + + def logsEvents(id: WorkflowId) = { + val stdout = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), CallMetadataKeys.Stdout), MetadataValue("stdout.txt", MetadataString)) + val stderr = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), CallMetadataKeys.Stderr), MetadataValue("stderr.txt", MetadataString)) + val backend = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), s"${CallMetadataKeys.BackendLogsPrefix}:log"), MetadataValue("backend.log", MetadataString)) + Vector(stdout, stderr, backend) } } - object MockApiService { - val ExistingWorkflowId = WorkflowId.fromString("c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5") - val AbortedWorkflowId = WorkflowId.fromString("0574111c-c7d3-4145-8190-7a7ed8e8324a") - val UnrecognizedWorkflowId = WorkflowId.fromString("2bdd06cc-e794-46c8-a897-4c86cedb6a06") - val RecognizedWorkflowIds = Set(ExistingWorkflowId, AbortedWorkflowId) - - val QueryResponse = JsObject(Map( - "results" -> JsArray(JsObject(Map( - WorkflowMetadataKeys.Id -> JsString(ExistingWorkflowId.toString), - WorkflowMetadataKeys.Status -> JsString(WorkflowSucceeded.toString) - ))) - )) - - def fullMetadataResponse(workflowId: WorkflowId) = JsObject(Map( - "testKey1" -> JsString("myValue1"), - "testKey2" -> JsString("myValue2") - )) - - def filteredMetadataResponse(workflowId: WorkflowId) = JsObject(Map( - "testKey1a" -> JsString("myValue1a"), - "testKey1b" -> JsString("myValue1b"), - "testKey2a" -> JsString("myValue2a") - )) - - def submittedStatusResponse(workflowId: WorkflowId) = JsObject(Map( - WorkflowMetadataKeys.Status -> JsString(WorkflowSubmitted.toString), - WorkflowMetadataKeys.Id -> JsString(workflowId.toString) - )) - - def outputResponse(workflowId: WorkflowId) = JsObject(Map( - WorkflowMetadataKeys.Id -> JsString(workflowId.toString), - WorkflowMetadataKeys.Outputs -> JsString("Some random stuff") - )) - - def logsResponse(workflowId: WorkflowId) = JsObject(Map( - "calls" -> JsObject(Map( - "mycall" -> JsArray(JsObject(Map( - "stdout" -> JsString("stdout.txt"), - "stderr" -> JsString("stderr.txt"), - "backendLogs" -> JsObject(Map( - "log" -> JsString("backend.log") - )) - ))) - )) - )) + class MockServiceRegistryActor extends Actor { + import MockServiceRegistryActor._ + override def receive = { + case WorkflowQuery(params) => + val response = WorkflowQuerySuccess(WorkflowQueryResponse(List(WorkflowQueryResult(ExistingWorkflowId.toString, + None, Some(WorkflowSucceeded.toString), None, None))), None) + sender ! response + case ValidateWorkflowId(id) => + if (RecognizedWorkflowIds.contains(id)) sender ! MetadataService.RecognizedWorkflowId + else sender ! MetadataService.UnrecognizedWorkflowId + case GetStatus(id) => sender ! StatusLookupResponse(id, WorkflowSubmitted) + case WorkflowOutputs(id) => + val event = Vector(MetadataEvent(MetadataKey(id, None, "outputs:test.hello.salutation"), MetadataValue("Hello foo!", MetadataString))) + sender ! WorkflowOutputsResponse(id, event) + case GetLogs(id) => sender ! LogsResponse(id, logsEvents(id)) + case GetSingleWorkflowMetadataAction(id, None, None, _) => sender ! MetadataLookupResponse(metadataQuery(id), fullMetadataResponse(id)) + case GetSingleWorkflowMetadataAction(id, Some(i), None, _) => sender ! MetadataLookupResponse(metadataQuery(id), filteredMetadataResponse(id)) + case GetSingleWorkflowMetadataAction(id, None, Some(_), _) => sender ! MetadataLookupResponse(metadataQuery(id), filteredMetadataResponse(id)) + case PutMetadataActionAndRespond(events, _) => + events.head.key.workflowId match { + case CromwellApiServiceSpec.ExistingWorkflowId => sender ! MetadataWriteSuccess(events) + case CromwellApiServiceSpec.AbortedWorkflowId => sender ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) + } + } } class MockWorkflowStoreActor extends Actor { override def receive = { - case SubmitWorkflow(source) => sender ! WorkflowSubmittedToStore(MockApiService.ExistingWorkflowId) + case SubmitWorkflow(source) => sender ! WorkflowSubmittedToStore(ExistingWorkflowId) case BatchSubmitWorkflows(sources) => - val response = WorkflowsBatchSubmittedToStore(sources map { _ => MockApiService.ExistingWorkflowId }) + val response = WorkflowsBatchSubmittedToStore(sources map { _ => ExistingWorkflowId }) sender ! response case AbortWorkflow(id, manager) => val message = id match { - case MockApiService.ExistingWorkflowId => + case ExistingWorkflowId => WorkflowStoreEngineActor.WorkflowAborted(id) - case MockApiService.AbortedWorkflowId => + case AbortedWorkflowId => WorkflowAbortFailed(id, new IllegalStateException(s"Workflow ID '$id' is in terminal state 'Aborted' and cannot be aborted.")) } - sender ! message } } - - class MockServiceRegistryActor extends Actor { - override def receive = { - case PutMetadataActionAndRespond(events, _) => - events.head.key.workflowId match { - case MockApiService.ExistingWorkflowId => - sender ! MetadataWriteSuccess(events) - case MockApiService.AbortedWorkflowId => - sender ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) - } - } - } - - class MockCallCacheDiffActor extends Actor { - override def receive = { - case _: CallCacheDiffQueryParameter => - val json = """{ - | "callA": { - | "workflowId": "85174842-4a44-4355-a3a9-3a711ce556f1", - | "callFqn": "wf_hello.hello", - | "jobIndex": -1, - | "allowResultReuse": true - | }, - | "callB": { - | "workflowId": "85174842-4a44-4355-a3a9-3a711ce556f1", - | "callFqn": "wf_hello.hello", - | "jobIndex": -1, - | "allowResultReuse": false - | }, - | "hashDifferential": [{ - | "key1": { - | "callA": "somehash", - | "callB": "someotherhash" - | } - | }, { - | "key2": { - | "callA": "somehash", - | "callB": null - | } - | }, { - | "key3": { - | "callA": null, - | "callB": "someotherhash" - | } - | }] - |}""".stripMargin.parseJson.asJsObject - - val response = RequestComplete((StatusCodes.OK, json)) - sender() ! response - } - } } diff --git a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala index 9d4c7b66f..9fb608dcf 100644 --- a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala @@ -4,43 +4,42 @@ import java.time.OffsetDateTime import java.util.UUID import akka.testkit._ +import akka.pattern.ask +import akka.util.Timeout import cromwell.core.{TestKitSuite, WorkflowId} import cromwell.services.metadata.MetadataService._ import cromwell.services.metadata._ -import cromwell.webservice.PerRequest.RequestComplete import cromwell.webservice.metadata.MetadataBuilderActor +import cromwell.webservice.metadata.MetadataBuilderActor.{BuiltMetadataResponse, MetadataBuilderActorResponse} import org.scalatest.prop.TableDrivenPropertyChecks -import org.scalatest.{FlatSpecLike, Matchers} +import org.scalatest.{Assertion, AsyncFlatSpecLike, Matchers, Succeeded} import org.specs2.mock.Mockito -import spray.http.{StatusCode, StatusCodes} import spray.json._ +import scala.concurrent.Future import scala.concurrent.duration._ import scala.language.postfixOps -class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with FlatSpecLike with Matchers with Mockito +class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with AsyncFlatSpecLike with Matchers with Mockito with TableDrivenPropertyChecks with ImplicitSender { behavior of "MetadataParser" val defaultTimeout = 200 millis + implicit val timeout: Timeout = defaultTimeout + val mockServiceRegistry = TestProbe() def assertMetadataResponse(action: MetadataServiceAction, queryReply: MetadataQuery, events: Seq[MetadataEvent], - expectedRes: String) = { - val parentProbe = TestProbe() - val metadataBuilder = TestActorRef(MetadataBuilderActor.props(mockServiceRegistry.ref), parentProbe.ref, s"MetadataActor-${UUID.randomUUID()}") - metadataBuilder ! action // Ask for everything - mockServiceRegistry.expectMsg(defaultTimeout, action) // TestActor runs on CallingThreadDispatcher + expectedRes: String): Future[Assertion] = { + val mba = system.actorOf(MetadataBuilderActor.props(mockServiceRegistry.ref)) + val response = mba.ask(action).mapTo[MetadataBuilderActorResponse] + mockServiceRegistry.expectMsg(defaultTimeout, action) mockServiceRegistry.reply(MetadataLookupResponse(queryReply, events)) - - parentProbe.expectMsgPF(defaultTimeout) { - case response: RequestComplete[(StatusCode, JsObject)] @unchecked => - response.response._1 shouldBe StatusCodes.OK - response.response._2 shouldBe expectedRes.parseJson - } + response map { r => r shouldBe a [BuiltMetadataResponse] } + response.mapTo[BuiltMetadataResponse] map { b => b.response shouldBe expectedRes.parseJson} } it should "build workflow scope tree from metadata events" in { @@ -292,15 +291,16 @@ class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with FlatSpecLik val kisv3 = ("key[0]:subkey", "value3", OffsetDateTime.now.plusSeconds(2)) val kiv4 = ("key[0]", "value4", OffsetDateTime.now.plusSeconds(3)) - val t = Table( - ("list", "res"), + val t = List( (List(kv), """"key": "value""""), (List(kv, ksv2), """"key": { "subkey": "value2" }"""), (List(kv, ksv2, kisv3), """"key": [ { "subkey": "value3" } ]"""), (List(kv, ksv2, kisv3, kiv4), """"key": [ "value4" ]""") ) - forAll(t) { (l, r) => assertMetadataKeyStructure(l, r) } + Future.sequence(t map { case (l, r) => assertMetadataKeyStructure(l, r) }) map { assertions => + assertions should contain only Succeeded + } } it should "coerce values to supported types" in { @@ -445,7 +445,7 @@ class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with FlatSpecLik val parentProbe = TestProbe() val metadataBuilder = TestActorRef(MetadataBuilderActor.props(mockServiceRegistry.ref), parentProbe.ref, s"MetadataActor-${UUID.randomUUID()}") - metadataBuilder ! mainQueryAction + val response = metadataBuilder.ask(mainQueryAction).mapTo[MetadataBuilderActorResponse] mockServiceRegistry.expectMsg(defaultTimeout, mainQueryAction) mockServiceRegistry.reply(MetadataLookupResponse(mainQuery, mainEvents)) mockServiceRegistry.expectMsg(defaultTimeout, subQueryAction) @@ -471,11 +471,9 @@ class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with FlatSpecLik |} """.stripMargin - parentProbe.expectMsgPF(defaultTimeout) { - case response: RequestComplete[(StatusCode, JsObject)] @unchecked => - response.response._1 shouldBe StatusCodes.OK - response.response._2 shouldBe expandedRes.parseJson - } + response map { r => r shouldBe a [BuiltMetadataResponse] } + val bmr = response.mapTo[BuiltMetadataResponse] + bmr map { b => b.response shouldBe expandedRes.parseJson} } it should "NOT expand sub workflow metadata when NOT asked for" in { @@ -491,7 +489,7 @@ class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with FlatSpecLik val parentProbe = TestProbe() val metadataBuilder = TestActorRef(MetadataBuilderActor.props(mockServiceRegistry.ref), parentProbe.ref, s"MetadataActor-${UUID.randomUUID()}") - metadataBuilder ! queryNoExpandAction + val response = metadataBuilder.ask(queryNoExpandAction).mapTo[MetadataBuilderActorResponse] mockServiceRegistry.expectMsg(defaultTimeout, queryNoExpandAction) mockServiceRegistry.reply(MetadataLookupResponse(queryNoExpand, mainEvents)) @@ -511,11 +509,10 @@ class MetadataBuilderActorSpec extends TestKitSuite("Metadata") with FlatSpecLik | "id": "$mainWorkflowId" |} """.stripMargin - - parentProbe.expectMsgPF(defaultTimeout) { - case response: RequestComplete[(StatusCode, JsObject)] @unchecked => - response.response._1 shouldBe StatusCodes.OK - response.response._2 shouldBe nonExpandedRes.parseJson - } + + response map { r => r shouldBe a [BuiltMetadataResponse] } + val bmr = response.mapTo[BuiltMetadataResponse] + bmr map { b => b.response shouldBe nonExpandedRes.parseJson} + } } diff --git a/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala b/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala new file mode 100644 index 000000000..cd7422ffc --- /dev/null +++ b/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala @@ -0,0 +1,17 @@ +package cromwell.webservice + +import org.scalatest.{FlatSpec, Matchers} +import spray.json._ +import spray.json.DefaultJsonProtocol._ + +class PartialWorkflowSourcesSpec extends FlatSpec with Matchers { + it should "succesfully merge and override multiple input files" in { + val input1 = Map("wf.a1" -> "hello", "wf.a2" -> "world").toJson.toString + val input2 = Map.empty[String, String].toJson.toString + val overrideInput1 = Map("wf.a2" -> "universe").toJson.toString + val allInputs = PartialWorkflowSources.mergeMaps(Seq(Option(input1), Option(input2), Option(overrideInput1))) + + allInputs.fields.keys should contain allOf("wf.a1", "wf.a2") + allInputs.fields("wf.a2") should be(JsString("universe")) + } +} diff --git a/engine/src/test/scala/cromwell/webservice/SprayCanHttpServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/SprayCanHttpServiceSpec.scala deleted file mode 100644 index 753f0da3f..000000000 --- a/engine/src/test/scala/cromwell/webservice/SprayCanHttpServiceSpec.scala +++ /dev/null @@ -1,252 +0,0 @@ -package cromwell.webservice - -import akka.actor.{Actor, Props} -import akka.io.Tcp.Unbound -import akka.testkit._ -import akka.util.Timeout -import cromwell.webservice.SprayCanHttpService._ -import lenthall.test.actor.TestActorSystem._ -import org.scalatest.concurrent.ScalaFutures -import org.scalatest.time.{Millis, Seconds, Span} -import org.scalatest.{Assertions, FlatSpec, Matchers} -import spray.can.Http -import spray.routing.HttpService - -import scala.concurrent.duration._ -import scala.concurrent.{ExecutionContext, Promise} - -class SprayCanHttpServiceSpec extends FlatSpec with Matchers with ScalaFutures with Assertions { - - implicit val defaultPatience = PatienceConfig(timeout = Span(5, Seconds), interval = Span(100, Millis)) - - import ExecutionContext.Implicits.global - - behavior of "SprayCanHttpService" - - it should "bind and unbind to a random port" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val (port, unbound) = (for { - boundListener <- service.bind(LoopbackAddress, 0) - port = boundListener.port - unbound <- boundListener.unbind() - } yield (port, unbound)).futureValue - - port should be > 0 - unbound should be(Http.Unbound) - - assert(!system.whenTerminated.isCompleted) - () - } - } - - it should "bind and unbind to a random port without shutting down" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val (port, unbound) = (for { - boundListener <- service.bindOrShutdown(LoopbackAddress, 0) - port = boundListener.port - unbound <- boundListener.unbind() - } yield (port, unbound)).futureValue - - port should be > 0 - unbound should be(Http.Unbound) - - assert(!system.whenTerminated.isCompleted) - () - } - } - - it should "bind and unbind to a random port and then shut down" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val (port, unbound) = (for { - boundListener <- service.bind(LoopbackAddress, 0) - port = boundListener.port - unbound <- boundListener.unbindAndShutdown() - } yield (port, unbound)).futureValue - - port should be > 0 - unbound should be(Http.Unbound) - - assert(system.whenTerminated.isCompleted) - () - } - } - - it should "fail to bind to an invalid endpoint" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val exception = (for { - boundListener <- service.bind(LoopbackAddress, -1) - _ <- boundListener.unbind() - } yield Unit).failed.futureValue - - exception should be(an[IllegalArgumentException]) - exception.getMessage should be("port out of range:-1") - - assert(!system.whenTerminated.isCompleted) - () - } - } - - it should "fail to bind to an invalid endpoint and shutdown" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val exception = (for { - boundListener <- service.bindOrShutdown(LoopbackAddress, -1) - _ <- boundListener.unbind() - } yield Unit).failed.futureValue - - exception should be(an[IllegalArgumentException]) - exception.getMessage should be("port out of range:-1") - - assert(system.whenTerminated.isCompleted) - () - } - } - - it should "fail to bind to an open endpoint" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val (port, exception, unbound) = (for { - boundListener <- service.bind(LoopbackAddress, 0) - port = boundListener.port - exception <- service.bind(LoopbackAddress, port).failed - unbound <- boundListener.unbind() - } yield (port, exception, unbound)).futureValue - - port should be > 0 - exception should be(a[BindFailedException]) - unbound should be(Http.Unbound) - - assert(!system.whenTerminated.isCompleted) - () - } - } - - it should "return with a bind timeout exception" in { - withActorSystem { implicit system => - val promise = Promise[BoundListener]() - system.actorOf(Props(classOf[TestSprayCanBindActor], promise, Timeout(100.milliseconds.dilated))) - promise.future.failed.futureValue should be(a[BindTimeoutException]) - () - } - } - - it should "return with an unexpected bind message exception" in { - withActorSystem { implicit system => - val promise = Promise[BoundListener]() - val actorRef = system.actorOf(Props(classOf[TestSprayCanBindActor], promise, Timeout(2.seconds.dilated))) - actorRef ! "unexpected" - promise.future.failed.futureValue should be(an[UnexpectedBindMessageException]) - () - } - } - - it should "timeout when unbind called twice" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val (port, unbound, exception) = (for { - boundListener <- service.bind(LoopbackAddress, 0) - port = boundListener.port - unbound <- boundListener.unbind() - exception <- boundListener.unbind().failed // sends a dead letter - } yield (port, unbound, exception)).futureValue - - port should be > 0 - unbound should be(Http.Unbound) - exception should be(an[UnbindTimeoutException]) - - assert(!system.whenTerminated.isCompleted) - () - } - } - - it should "timeout when unbind called twice and shutdown" in { - withActorSystem { implicit system => - val service = system.actorOf(Props[TestHttpServiceActor]) - implicit val timeout = Timeout(2.seconds.dilated) - - val (port, unbound, exception) = (for { - boundListener <- service.bind(LoopbackAddress, 0) - port = boundListener.port - unbound <- boundListener.unbind() - exception <- boundListener.unbindAndShutdown().failed // sends a dead letter - } yield (port, unbound, exception)).futureValue - - port should be > 0 - unbound should be(Http.Unbound) - exception should be(an[UnbindTimeoutException]) - - assert(system.whenTerminated.isCompleted) - () - } - } - - it should "return with an unbind failed exception" in { - withActorSystem { implicit system => - val promise = Promise[Unbound]() - val actorRef = system.actorOf(Props(classOf[TestSprayCanUnbindActor], promise, Timeout(2.seconds.dilated))) - actorRef ! Http.CommandFailed(Http.Unbind) - promise.future.failed.futureValue should be(an[UnbindFailedException]) - () - } - } - - it should "return with an unbind timeout exception" in { - withActorSystem { implicit system => - val promise = Promise[Unbound]() - system.actorOf(Props(classOf[TestSprayCanUnbindActor], promise, Timeout(100.milliseconds.dilated))) - promise.future.failed.futureValue should be(an[UnbindTimeoutException]) - () - } - } - - it should "return with an unexpected unbind message exception" in { - withActorSystem { implicit system => - val promise = Promise[Unbound]() - val actorRef = system.actorOf(Props(classOf[TestSprayCanUnbindActor], promise, Timeout(2.seconds.dilated))) - actorRef ! "unexpected" - promise.future.failed.futureValue should be(an[UnexpectedUnbindMessageException]) - () - } - } - -} - -class TestHttpServiceActor extends Actor with HttpService { - override implicit def actorRefFactory = context - - override def receive = { - case _ => - } -} - -class TestSprayCanBindActor(promise: Promise[BoundListener], timeout: Timeout) - extends SprayCanBindActor(Http.Bind(null, LoopbackAddress), promise, timeout) { - override def preStart(): Unit = { - context setReceiveTimeout timeout.duration - } -} - -class TestSprayCanUnbindActor(promise: Promise[Unbound], timeout: Timeout) - extends SprayCanUnbindActor(null, Http.Unbind(Duration.Zero), promise, timeout) { - override def preStart(): Unit = { - context setReceiveTimeout timeout.duration - } -} diff --git a/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala index 18a8f81af..20c7c3e6d 100644 --- a/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala @@ -1,5 +1,7 @@ package cromwell.webservice +import akka.http.scaladsl.model.StatusCodes +import akka.http.scaladsl.testkit.ScalatestRouteTest import io.swagger.models.properties.RefProperty import io.swagger.parser.SwaggerParser import org.scalatest.prop.TableDrivenPropertyChecks @@ -8,8 +10,6 @@ import org.yaml.snakeyaml.constructor.Constructor import org.yaml.snakeyaml.error.YAMLException import org.yaml.snakeyaml.nodes.MappingNode import org.yaml.snakeyaml.{Yaml => SnakeYaml} -import spray.http._ -import spray.testkit.ScalatestRouteTest import scala.collection.JavaConverters._ diff --git a/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala index 11759f4e3..292d5ad73 100644 --- a/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala @@ -1,23 +1,21 @@ package cromwell.webservice +import akka.http.scaladsl.model.{StatusCodes, Uri} +import akka.http.scaladsl.model.headers.Location +import akka.http.scaladsl.server.Route +import akka.http.scaladsl.testkit.ScalatestRouteTest import com.typesafe.config.ConfigFactory import cromwell.webservice.SwaggerUiHttpServiceSpec._ import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} -import spray.http.HttpHeaders.Location -import spray.http.{StatusCodes, Uri} -import spray.testkit.ScalatestRouteTest -trait SwaggerUiHttpServiceSpec extends FlatSpec with Matchers with ScalatestRouteTest with SwaggerUiHttpService { - override def actorRefFactory = system +trait SwaggerUiHttpServiceSpec extends FlatSpec with Matchers with ScalatestRouteTest with SwaggerUiHttpService { override def swaggerUiVersion = TestSwaggerUiVersion } trait SwaggerResourceHttpServiceSpec extends FlatSpec with Matchers with ScalatestRouteTest with TableDrivenPropertyChecks with SwaggerResourceHttpService { - override def actorRefFactory = system - val testPathsForOptions = Table("endpoint", "/", "/swagger", "/swagger/index.html", "/api", "/api/example", "/api/example?with=param", "/api/example/path") } @@ -47,7 +45,7 @@ class BasicSwaggerUiHttpServiceSpec extends SwaggerUiHttpServiceSpec { } it should "not return options for /" in { - Options() ~> sealRoute(swaggerUiRoute) ~> check { + Options() ~> Route.seal(swaggerUiRoute) ~> check { status should be(StatusCodes.MethodNotAllowed) } } @@ -72,13 +70,13 @@ class NoRedirectRootSwaggerUiHttpServiceSpec extends SwaggerUiHttpServiceSpec { behavior of "SwaggerUiHttpService" it should "not redirect / to /swagger" in { - Get() ~> sealRoute(swaggerUiRoute) ~> check { + Get() ~> Route.seal(swaggerUiRoute) ~> check { status should be(StatusCodes.NotFound) } } it should "not return options for /" in { - Options() ~> sealRoute(swaggerUiRoute) ~> check { + Options() ~> Route.seal(swaggerUiRoute) ~> check { status should be(StatusCodes.MethodNotAllowed) } } @@ -156,13 +154,13 @@ class YamlSwaggerResourceHttpServiceSpec extends SwaggerResourceHttpServiceSpec } it should "not service swagger json" in { - Get("/swagger/testservice.json") ~> sealRoute(swaggerResourceRoute) ~> check { + Get("/swagger/testservice.json") ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } it should "not service /swagger" in { - Get("/swagger") ~> sealRoute(swaggerResourceRoute) ~> check { + Get("/swagger") ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } @@ -192,13 +190,13 @@ class JsonSwaggerResourceHttpServiceSpec extends SwaggerResourceHttpServiceSpec } it should "not service swagger yaml" in { - Get("/swagger/testservice.yaml") ~> sealRoute(swaggerResourceRoute) ~> check { + Get("/swagger/testservice.yaml") ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } it should "not service /swagger" in { - Get("/swagger") ~> sealRoute(swaggerResourceRoute) ~> check { + Get("/swagger") ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } @@ -228,20 +226,20 @@ class NoOptionsSwaggerResourceHttpServiceSpec extends SwaggerResourceHttpService } it should "not service swagger json" in { - Get("/swagger/testservice.json") ~> sealRoute(swaggerResourceRoute) ~> check { + Get("/swagger/testservice.json") ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } it should "not service /swagger" in { - Get("/swagger") ~> sealRoute(swaggerResourceRoute) ~> check { + Get("/swagger") ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } it should "not return options for all routes" in { forAll(testPathsForOptions) { path => - Options(path) ~> sealRoute(swaggerResourceRoute) ~> check { + Options(path) ~> Route.seal(swaggerResourceRoute) ~> check { status should be(StatusCodes.MethodNotAllowed) } } @@ -268,7 +266,7 @@ class YamlSwaggerUiResourceHttpServiceSpec extends SwaggerUiResourceHttpServiceS } it should "not service swagger json" in { - Get("/swagger/testservice.json") ~> sealRoute(swaggerUiResourceRoute) ~> check { + Get("/swagger/testservice.json") ~> Route.seal(swaggerUiResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } @@ -306,7 +304,7 @@ class JsonSwaggerUiResourceHttpServiceSpec extends SwaggerUiResourceHttpServiceS } it should "not service swagger yaml" in { - Get("/swagger/testservice.yaml") ~> sealRoute(swaggerUiResourceRoute) ~> check { + Get("/swagger/testservice.yaml") ~> Route.seal(swaggerUiResourceRoute) ~> check { status should be(StatusCodes.NotFound) } } diff --git a/engine/src/test/scala/cromwell/webservice/WrappedRouteSpec.scala b/engine/src/test/scala/cromwell/webservice/WrappedRouteSpec.scala index 9b576d1a0..0dcdf85c1 100644 --- a/engine/src/test/scala/cromwell/webservice/WrappedRouteSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/WrappedRouteSpec.scala @@ -1,13 +1,13 @@ package cromwell.webservice +import akka.http.scaladsl.model.StatusCodes +import akka.http.scaladsl.testkit.ScalatestRouteTest import cromwell.webservice.WrappedRoute._ import org.scalatest.{FlatSpec, Matchers} -import spray.http.StatusCodes -import spray.routing.HttpService -import spray.testkit.ScalatestRouteTest +import akka.http.scaladsl.server.Directives._ +import akka.http.scaladsl.server.Route -class WrappedRouteSpec extends FlatSpec with Matchers with HttpService with ScalatestRouteTest { - override def actorRefFactory = system +class WrappedRouteSpec extends FlatSpec with Matchers with ScalatestRouteTest { def unwrappedRoute = path("hello") { get { @@ -36,21 +36,21 @@ class WrappedRouteSpec extends FlatSpec with Matchers with HttpService with Scal } it should "not service unwrapped routes" in { - Get("/hello") ~> sealRoute(wrappedRoute) ~> check { + Get("/hello") ~> Route.seal(wrappedRoute) ~> check { status should be(StatusCodes.NotFound) } - Post("/hello") ~> sealRoute(wrappedRoute) ~> check { + Post("/hello") ~> Route.seal(wrappedRoute) ~> check { status should be(StatusCodes.NotFound) } } it should "not service other routes" in { - Delete("/hello") ~> sealRoute(wrappedRoute) ~> check { + Delete("/hello") ~> Route.seal(wrappedRoute) ~> check { status should be(StatusCodes.NotFound) } - Get("/swagger") ~> sealRoute(wrappedRoute) ~> check { + Get("/swagger") ~> Route.seal(wrappedRoute) ~> check { status should be(StatusCodes.NotFound) } } @@ -80,11 +80,11 @@ class WrappedRouteSpec extends FlatSpec with Matchers with HttpService with Scal } it should "not service other routes when routeUnwrapped is true" in { - Delete("/hello") ~> sealRoute(legacyWrappedRoute) ~> check { + Delete("/hello") ~> Route.seal(legacyWrappedRoute) ~> check { status should be(StatusCodes.MethodNotAllowed) } - Get("/swagger") ~> sealRoute(legacyWrappedRoute) ~> check { + Get("/swagger") ~> Route.seal(legacyWrappedRoute) ~> check { status should be(StatusCodes.NotFound) } } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 227fbfbf8..c02a48d5d 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -3,17 +3,10 @@ import sbt._ object Dependencies { lazy val lenthallV = "0.25" lazy val wdl4sV = "0.13" - lazy val sprayV = "1.3.3" - /* - spray-json is an independent project from the "spray suite" - - https://github.com/spray/spray - - https://github.com/spray/spray-json - - http://spray.io/documentation/1.2.2/spray-httpx/spray-json-support/ - - http://doc.akka.io/docs/akka/2.4/scala/http/common/json-support.html#akka-http-spray-json - */ - lazy val sprayJsonV = "1.3.2" + lazy val akkaV = "2.4.16" - lazy val akkaHttpV = "2.4.11.2" + lazy val akkaHttpV = "10.0.5" + lazy val slickV = "3.2.0" // TODO: Re-combine these when cromwell is 2.12: lazy val cromwellApiClientAkkaV = "2.4.17" @@ -70,14 +63,13 @@ object Dependencies { "com.mattbertolini" % "liquibase-slf4j" % "2.0.0" ) - private val sprayServerDependencies = List( - "org.webjars" % "swagger-ui" % "2.1.1", - "io.spray" %% "spray-can" % sprayV, - "io.spray" %% "spray-routing-shapeless2" % sprayV, - "io.spray" %% "spray-http" % sprayV, - "io.spray" %% "spray-testkit" % sprayV % Test + val akkaHttpDependencies = List( + "com.typesafe.akka" %% "akka-http" % akkaHttpV, + "com.typesafe.akka" %% "akka-http-testkit" % akkaHttpV % Test ) + val akkaHttpServerDependencies = akkaHttpDependencies :+ "org.webjars" % "swagger-ui" % "2.1.1" + private val googleApiClientDependencies = List( // Used by swagger, but only in tests. This overrides an older 2.1.3 version of jackson-core brought in by // these Google dependencies, but which isn't properly evicted by IntelliJ's sbt integration. @@ -126,17 +118,15 @@ object Dependencies { "com.typesafe.scala-logging" %% "scala-logging" % "3.4.0", "org.broadinstitute" %% "wdl4s" % wdl4sV, "org.apache.commons" % "commons-lang3" % "3.4", - "io.spray" %% "spray-json" % sprayJsonV, + "com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpV, "com.typesafe" % "config" % "1.3.0", "com.typesafe.akka" %% "akka-actor" % akkaV, "com.typesafe.akka" %% "akka-slf4j" % akkaV, "com.typesafe.akka" %% "akka-testkit" % akkaV % Test, "com.google.guava" % "guava" % "20.0", "com.google.auth" % "google-auth-library-oauth2-http" % "0.6.0", - "com.typesafe.akka" %% "akka-http-core" % akkaHttpV, - "com.typesafe.akka" %% "akka-stream-testkit" % akkaHttpV, - "com.chuusai" %% "shapeless" % "2.3.2", - "com.typesafe.akka" %% "akka-http-spray-json-experimental" % akkaHttpV + "com.typesafe.akka" %% "akka-stream-testkit" % akkaV, + "com.chuusai" %% "shapeless" % "2.3.2" ) ++ baseDependencies ++ googleApiClientDependencies ++ // TODO: We're not using the "F" in slf4j. Core only supports logback, specifically the WorkflowLogger. slf4jBindingDependencies @@ -145,6 +135,7 @@ object Dependencies { "com.github.pathikrit" %% "better-files" % betterFilesV % Test ) ++ liquibaseDependencies ++ dbmsDependencies + // FIXME: this needs to be cleaned up w/ 2.12 move val cromwellApiClientDependencies = List( "com.typesafe.akka" %% "akka-actor" % cromwellApiClientAkkaV, "com.typesafe.akka" %% "akka-http" % cromwellApiClientAkkaHttpV, @@ -154,27 +145,21 @@ object Dependencies { "org.pegdown" % "pegdown" % "1.6.0" % Test ) - val jesBackendDependencies = refinedTypeDependenciesList - - val tesBackendDependencies = List( - "io.spray" %% "spray-client" % sprayV - ) ++ sprayServerDependencies - - val sparkBackendDependencies = List( - "io.spray" %% "spray-client" % sprayV - ) ++ sprayServerDependencies - val engineDependencies = List( "commons-codec" % "commons-codec" % "1.10", "commons-io" % "commons-io" % "2.5", - "com.storm-enroute" %% "scalameter" % "0.8.2" + "com.storm-enroute" %% "scalameter" % "0.8.2" exclude("com.fasterxml.jackson.core", "jackson-databind") exclude("com.fasterxml.jackson.module", "jackson-module-scala"), "com.fasterxml.jackson.core" % "jackson-databind" % "2.7.9.1", "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.7.9", "io.swagger" % "swagger-parser" % "1.0.22" % Test, "org.yaml" % "snakeyaml" % "1.17" % Test - ) ++ sprayServerDependencies + ) ++ akkaHttpServerDependencies val rootDependencies = slf4jBindingDependencies + + val jesBackendDependencies = refinedTypeDependenciesList + val tesBackendDependencies = akkaHttpDependencies + val sparkBackendDependencies = akkaHttpDependencies } diff --git a/services/src/main/scala/cromwell/services/metadata/MetadataService.scala b/services/src/main/scala/cromwell/services/metadata/MetadataService.scala index 6106b6f04..be5f5dc42 100644 --- a/services/src/main/scala/cromwell/services/metadata/MetadataService.scala +++ b/services/src/main/scala/cromwell/services/metadata/MetadataService.scala @@ -16,11 +16,11 @@ object MetadataService { final val MetadataServiceName = "MetadataService" - case class WorkflowQueryResult(id: String, name: Option[String], status: Option[String], start: Option[OffsetDateTime], end: Option[OffsetDateTime]) + final case class WorkflowQueryResult(id: String, name: Option[String], status: Option[String], start: Option[OffsetDateTime], end: Option[OffsetDateTime]) - case class WorkflowQueryResponse(results: Seq[WorkflowQueryResult]) + final case class WorkflowQueryResponse(results: Seq[WorkflowQueryResult]) - case class QueryMetadata(page: Option[Int], pageSize: Option[Int], totalRecords: Option[Int]) + final case class QueryMetadata(page: Option[Int], pageSize: Option[Int], totalRecords: Option[Int]) trait MetadataServiceMessage /** @@ -70,7 +70,7 @@ object MetadataService { extends ReadAction case class GetMetadataQueryAction(key: MetadataQuery) extends ReadAction case class GetStatus(workflowId: WorkflowId) extends ReadAction - case class WorkflowQuery[A](uri: A, parameters: Seq[(String, String)]) extends ReadAction + case class WorkflowQuery(parameters: Seq[(String, String)]) extends ReadAction case class WorkflowOutputs(workflowId: WorkflowId) extends ReadAction case class GetLogs(workflowId: WorkflowId) extends ReadAction case object RefreshSummary extends MetadataServiceAction @@ -80,8 +80,8 @@ object MetadataService { def onUnrecognized(possibleWorkflowId: String): Unit def onFailure(possibleWorkflowId: String, throwable: Throwable): Unit } - final case class ValidateWorkflowIdAndExecute(possibleWorkflowId: String, - validationCallback: ValidationCallback) extends MetadataServiceAction + + final case class ValidateWorkflowId(possibleWorkflowId: WorkflowId) extends MetadataServiceAction /** * Responses @@ -91,15 +91,11 @@ object MetadataService { def reason: Throwable } - case class MetadataLookupResponse(query: MetadataQuery, eventList: Seq[MetadataEvent]) extends MetadataServiceResponse - case class MetadataServiceKeyLookupFailed(query: MetadataQuery, reason: Throwable) extends MetadataServiceFailure - - case class StatusLookupResponse(workflowId: WorkflowId, status: WorkflowState) extends MetadataServiceResponse - case class StatusLookupFailed(workflowId: WorkflowId, reason: Throwable) extends MetadataServiceFailure + final case class MetadataLookupResponse(query: MetadataQuery, eventList: Seq[MetadataEvent]) extends MetadataServiceResponse + final case class MetadataServiceKeyLookupFailed(query: MetadataQuery, reason: Throwable) extends MetadataServiceFailure - final case class WorkflowQuerySuccess[A](uri: A, response: WorkflowQueryResponse, meta: Option[QueryMetadata]) - extends MetadataServiceResponse - final case class WorkflowQueryFailure(reason: Throwable) extends MetadataServiceFailure + final case class StatusLookupResponse(workflowId: WorkflowId, status: WorkflowState) extends MetadataServiceResponse + final case class StatusLookupFailed(workflowId: WorkflowId, reason: Throwable) extends MetadataServiceFailure final case class WorkflowOutputsResponse(id: WorkflowId, outputs: Seq[MetadataEvent]) extends MetadataServiceResponse final case class WorkflowOutputsFailure(id: WorkflowId, reason: Throwable) extends MetadataServiceFailure @@ -110,6 +106,15 @@ object MetadataService { final case class MetadataWriteSuccess(events: Iterable[MetadataEvent]) extends MetadataServiceResponse final case class MetadataWriteFailure(reason: Throwable, events: Iterable[MetadataEvent]) extends MetadataServiceFailure + sealed abstract class WorkflowValidationResponse extends MetadataServiceResponse + case object RecognizedWorkflowId extends WorkflowValidationResponse + case object UnrecognizedWorkflowId extends WorkflowValidationResponse + final case class FailedToCheckWorkflowId(cause: Throwable) extends WorkflowValidationResponse + + sealed abstract class MetadataQueryResponse extends MetadataServiceResponse + final case class WorkflowQuerySuccess(response: WorkflowQueryResponse, meta: Option[QueryMetadata]) extends MetadataQueryResponse + final case class WorkflowQueryFailure(reason: Throwable) extends MetadataQueryResponse + def wdlValueToMetadataEvents(metadataKey: MetadataKey, wdlValue: WdlValue): Iterable[MetadataEvent] = wdlValue match { case WdlArray(_, valueSeq) => if (valueSeq.isEmpty) { diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala index 313a839db..6541a51ff 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala @@ -1,6 +1,5 @@ package cromwell.services.metadata.impl -import java.util.UUID import akka.actor.SupervisorStrategy.{Decider, Directive, Escalate, Resume} import akka.actor.{Actor, ActorContext, ActorInitializationException, ActorLogging, ActorRef, OneForOneStrategy, Props} @@ -16,7 +15,7 @@ import net.ceedubs.ficus.Ficus._ import scala.concurrent.duration._ import scala.language.postfixOps -import scala.util.{Failure, Success, Try} +import scala.util.{Failure, Success} object MetadataServiceActor { @@ -70,21 +69,11 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config) actor } - private def validateWorkflowId(validation: ValidateWorkflowIdAndExecute): Unit = { - val possibleWorkflowId = validation.possibleWorkflowId - val callback = validation.validationCallback - - Try(UUID.fromString(possibleWorkflowId)) match { - case Failure(t) => callback.onMalformed(possibleWorkflowId) - case Success(uuid) => - workflowExistsWithId(possibleWorkflowId) onComplete { - case Success(true) => - callback.onRecognized(WorkflowId(uuid)) - case Success(false) => - callback.onUnrecognized(possibleWorkflowId) - case Failure(t) => - callback.onFailure(possibleWorkflowId, t) - } + private def validateWorkflowId(possibleWorkflowId: WorkflowId, sender: ActorRef): Unit = { + workflowExistsWithId(possibleWorkflowId.toString) onComplete { + case Success(true) => sender ! RecognizedWorkflowId + case Success(false) => sender ! UnrecognizedWorkflowId + case Failure(e) => sender ! FailedToCheckWorkflowId(new RuntimeException(s"Failed lookup attempt for workflow ID $possibleWorkflowId", e)) } } @@ -92,7 +81,7 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config) case action@PutMetadataAction(events) => writeActor forward action case action@PutMetadataActionAndRespond(events, replyTo) => writeActor forward action case CheckPendingWrites => writeActor forward CheckPendingWrites - case v: ValidateWorkflowIdAndExecute => validateWorkflowId(v) + case v: ValidateWorkflowId => validateWorkflowId(v.possibleWorkflowId, sender()) case action: ReadAction => readActor forward action case RefreshSummary => summaryActor foreach { _ ! SummarizeMetadata(sender()) } case MetadataSummarySuccess => scheduleSummary() diff --git a/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataActor.scala index 5308d69f2..2ad5a9d5c 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataActor.scala @@ -27,7 +27,7 @@ class ReadMetadataActor extends Actor with ActorLogging with MetadataDatabaseAcc case GetMetadataQueryAction(query@MetadataQuery(_, _, _, _, _, _)) => queryAndRespond(query) case GetStatus(workflowId) => queryStatusAndRespond(workflowId) case GetLogs(workflowId) => queryLogsAndRespond(workflowId) - case query: WorkflowQuery[_] => queryWorkflowsAndRespond(query.uri, query.parameters) + case query: WorkflowQuery => queryWorkflowsAndRespond(query.parameters) case WorkflowOutputs(id) => queryWorkflowOutputsAndRespond(id) } @@ -50,7 +50,7 @@ class ReadMetadataActor extends Actor with ActorLogging with MetadataDatabaseAcc } } - private def queryWorkflowsAndRespond[A](uri: A, rawParameters: Seq[(String, String)]): Unit = { + private def queryWorkflowsAndRespond(rawParameters: Seq[(String, String)]): Unit = { def queryWorkflows: Future[(WorkflowQueryResponse, Option[QueryMetadata])] = { for { // Future/Try to wrap the exception that might be thrown from WorkflowQueryParameters.apply. @@ -62,7 +62,7 @@ class ReadMetadataActor extends Actor with ActorLogging with MetadataDatabaseAcc val sndr = sender() queryWorkflows onComplete { - case Success((response, metadata)) => sndr ! WorkflowQuerySuccess(uri, response, metadata) + case Success((response, metadata)) => sndr ! WorkflowQuerySuccess(response, metadata) case Failure(t) => sndr ! WorkflowQueryFailure(t) } } diff --git a/src/main/resources/application.conf b/src/main/resources/application.conf index a27527934..07b9a07eb 100644 --- a/src/main/resources/application.conf +++ b/src/main/resources/application.conf @@ -4,12 +4,11 @@ akka { actor.guardian-supervisor-strategy = "cromwell.core.CromwellUserGuardianStrategy" } -spray.can { +akka.http { server { request-timeout = 40s } client { - request-timeout = 40s connecting-timeout = 40s } } diff --git a/supportedBackends/spark/src/main/scala/cromwell/backend/impl/spark/SparkClusterProcess.scala b/supportedBackends/spark/src/main/scala/cromwell/backend/impl/spark/SparkClusterProcess.scala index 0edffad09..701cb21a2 100644 --- a/supportedBackends/spark/src/main/scala/cromwell/backend/impl/spark/SparkClusterProcess.scala +++ b/supportedBackends/spark/src/main/scala/cromwell/backend/impl/spark/SparkClusterProcess.scala @@ -1,14 +1,17 @@ package cromwell.backend.impl.spark import akka.actor.ActorSystem +import akka.http.scaladsl.Http +import akka.http.scaladsl.model.{HttpRequest, HttpResponse, StatusCodes} +import akka.http.scaladsl.unmarshalling.Unmarshal +import akka.stream.ActorMaterializer import com.typesafe.scalalogging.Logger import cromwell.backend.impl.spark.SparkClusterProcess.{SparkJobSubmissionResponse, TerminalStatus} import cromwell.core.path.Obsolete._ import cromwell.core.path.Path import org.slf4j.LoggerFactory -import spray.client.pipelining._ -import spray.http.{HttpRequest, HttpResponse, StatusCodes} import spray.json.{DefaultJsonProtocol, JsonParser} +import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future, Promise} @@ -34,12 +37,6 @@ object SparkClusterProcess { } -trait SparkClusterRestClient { - def sendAndReceive: SendReceive - - def makeHttpRequest(httpRequest: HttpRequest): Future[HttpResponse] -} - trait SparkClusterProcessMonitor { def startMonitoringSparkClusterJob(jobPath: Path, jsonFile: String): Future[TerminalStatus] @@ -55,20 +52,18 @@ trait SparkClusterJobParser { } class SparkClusterProcess(implicit system: ActorSystem) extends SparkProcess - with SparkClusterRestClient with SparkClusterJobParser with SparkClusterProcessMonitor { + with SparkClusterJobParser with SparkClusterProcessMonitor { import SparkClusterProcess._ import SparkClusterJsonProtocol._ - import spray.httpx.SprayJsonSupport._ implicit lazy val ec: ExecutionContext = system.dispatcher + implicit val materializer = ActorMaterializer() lazy val completionPromise = Promise[TerminalStatus]() lazy val monitorPromise = Promise[Unit]() val tag = this.getClass.getSimpleName lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName)) - override def sendAndReceive: SendReceive = sendReceive - override def startMonitoringSparkClusterJob(jobPath: Path, jsonFile: String): Future[TerminalStatus] = { Future(parseJsonForSubmissionIdAndStatus(jobPath.resolve(jsonFile))) onComplete { case Success(resp: SparkJobSubmissionResponse) => @@ -130,15 +125,13 @@ class SparkClusterProcess(implicit system: ActorSystem) extends SparkProcess } val request = sparkClusterMasterHostName match { - case Some(master) => - Get(s"http://$master:6066/v1/submissions/status/$subId") - case None => - Get(s"http://spark-master:6066/v1/submissions/status/$subId") + case Some(master) => HttpRequest(uri = s"http://$master:6066/v1/submissions/status/$subId") + case None => HttpRequest(uri = s"http://spark-master:6066/v1/submissions/status/$subId") } makeHttpRequest(request) flatMap { v => v.status match { - case StatusCodes.OK => Future(v ~> unmarshal[SparkDriverStateQueryResponse]) + case StatusCodes.OK => Unmarshal(v).to[SparkDriverStateQueryResponse] case _ => val msg = s"Unexpected response received in response from Spark rest api. Response: $v" logger.error("{} reason: {}", tag, msg) @@ -163,8 +156,8 @@ class SparkClusterProcess(implicit system: ActorSystem) extends SparkProcess JsonParser(line).convertTo[SparkJobSubmissionResponse] } - override def makeHttpRequest(httpRequest: HttpRequest): Future[HttpResponse] = { + def makeHttpRequest(httpRequest: HttpRequest): Future[HttpResponse] = { val headers = httpRequest.headers - sendAndReceive(httpRequest.withHeaders(headers)) + Http().singleRequest(httpRequest.withHeaders(headers)) } } diff --git a/supportedBackends/spark/src/test/scala/cromwell/backend/impl/spark/SparkClusterProcessSpec.scala b/supportedBackends/spark/src/test/scala/cromwell/backend/impl/spark/SparkClusterProcessSpec.scala index d03779f48..07301fad5 100644 --- a/supportedBackends/spark/src/test/scala/cromwell/backend/impl/spark/SparkClusterProcessSpec.scala +++ b/supportedBackends/spark/src/test/scala/cromwell/backend/impl/spark/SparkClusterProcessSpec.scala @@ -1,5 +1,6 @@ package cromwell.backend.impl.spark +import akka.http.scaladsl.model._ import akka.testkit.ImplicitSender import cromwell.backend.impl.spark.SparkClusterProcess.SparkClusterJsonProtocol._ import cromwell.backend.impl.spark.SparkClusterProcess._ @@ -13,7 +14,6 @@ import org.scalatest.concurrent.PatienceConfiguration.Timeout import org.scalatest.concurrent.ScalaFutures import org.scalatest.mockito.MockitoSugar import org.scalatest.{BeforeAndAfter, Matchers, WordSpecLike} -import spray.http._ import spray.json._ import scala.concurrent.duration._ @@ -73,9 +73,9 @@ class SparkClusterProcessSpec extends TestKitSuite("SparkClusterProcess") private val mockRunningClusterResponse = SparkDriverStateQueryResponse(action = "SubmissionStatusResponse", driverState = "RUNNING", serverSparkVersion = "1.6.1", submissionId = "driver-20160803181054-0000", success = true, workerHostPort = "10.0.1.55:43834", workerId = "worker-20160801162431-10.0.1.55-43834") - private val mockSuccessHttpResponse = HttpResponse(StatusCodes.OK, HttpEntity(ContentTypes.`application/json`, mockSuccessClusterResponse.toJson.toString)) - private val mockRunningHttpResponse = HttpResponse(StatusCodes.OK, HttpEntity(ContentTypes.`application/json`, mockRunningClusterResponse.toJson.toString)) - private val mockFailedHttpResponse = HttpResponse(StatusCodes.OK, HttpEntity(ContentTypes.`application/json`, mockFailedClusterResponse.toJson.toString)) + private val mockSuccessHttpResponse = HttpResponse(StatusCodes.OK, entity = HttpEntity(ContentTypes.`application/json`, mockSuccessClusterResponse.toJson.toString)) + private val mockRunningHttpResponse = HttpResponse(StatusCodes.OK, entity = HttpEntity(ContentTypes.`application/json`, mockRunningClusterResponse.toJson.toString)) + private val mockFailedHttpResponse = HttpResponse(StatusCodes.OK, entity = HttpEntity(ContentTypes.`application/json`, mockFailedClusterResponse.toJson.toString)) private val mockBadHttpResponse = HttpResponse(StatusCodes.BadRequest) "SparkCluster process" should { diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala index b7db6411d..774b54b88 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala @@ -2,18 +2,20 @@ package cromwell.backend.impl.tes import java.nio.file.FileAlreadyExistsException +import akka.http.scaladsl.Http +import akka.http.scaladsl.model._ import cromwell.backend.BackendJobLifecycleActor import cromwell.backend.async.{ExecutionHandle, FailedNonRetryableExecutionHandle, PendingExecutionHandle} import cromwell.backend.impl.tes.TesResponseJsonFormatter._ import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} import cromwell.core.path.{DefaultPathBuilder, Path} import cromwell.core.retry.SimpleExponentialBackoff -import spray.client.pipelining._ -import spray.http.HttpRequest -import spray.httpx.SprayJsonSupport._ -import spray.httpx.unmarshalling._ import wdl4s.expression.NoFunctions import wdl4s.values.WdlFile +import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ +import akka.http.scaladsl.marshalling.Marshal +import akka.http.scaladsl.unmarshalling.{Unmarshal, Unmarshaller} +import akka.stream.ActorMaterializer import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} @@ -42,6 +44,8 @@ object TesAsyncBackendJobExecutionActor { class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) extends BackendJobLifecycleActor with StandardAsyncExecutionActor with TesJobCachingActorHelper { + implicit val actorSystem = context.system + implicit val materializer = ActorMaterializer() override type StandardAsyncRunInfo = Any @@ -66,8 +70,6 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn override lazy val jobTag: String = jobDescriptor.key.tag - private def pipeline[T: FromResponseUnmarshaller]: HttpRequest => Future[T] = sendReceive ~> unmarshal[T] - // Utility for converting a WdlValue so that the path is localized to the // container's filesystem. override def mapCommandLineWdlFile(wdlFile: WdlFile): WdlFile = { @@ -118,14 +120,10 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn tesJobPaths.callExecutionRoot.createPermissionedDirectories() val taskMessage = createTaskMessage() - val submitTask = pipeline[CreateTaskResponse] - .apply(Post(tesEndpoint, taskMessage)) - - submitTask.map { - response => - val jobID = response.id - PendingExecutionHandle(jobDescriptor, StandardAsyncJob(jobID), None, previousStatus = None) - } + for { + entity <- Marshal(taskMessage).to[RequestEntity] + ctr <- makeRequest[CreateTaskResponse](HttpRequest(method = HttpMethods.POST, uri = tesEndpoint, entity = entity)) + } yield PendingExecutionHandle(jobDescriptor, StandardAsyncJob(ctr.id), None, previousStatus = None) } override def recoverAsync(jobId: StandardAsyncJob)(implicit ec: ExecutionContext) = executeAsync() @@ -142,19 +140,16 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn returnCodeTmp.delete(true) } - val abortRequest = pipeline[CancelTaskResponse] - .apply(Post(s"$tesEndpoint/${job.jobId}:cancel")) - abortRequest onComplete { + makeRequest[CancelTaskResponse](HttpRequest(method = HttpMethods.POST, uri = s"$tesEndpoint/${job.jobId}:cancel")) onComplete { case Success(_) => jobLogger.info("{} Aborted {}", tag: Any, job.jobId) case Failure(ex) => jobLogger.warn("{} Failed to abort {}: {}", tag, job.jobId, ex.getMessage) } + () } override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle)(implicit ec: ExecutionContext): Future[TesRunStatus] = { - val pollTask = pipeline[MinimalTaskView].apply(Get(s"$tesEndpoint/${handle.pendingJob.jobId}?view=MINIMAL")) - - pollTask.map { + makeRequest[MinimalTaskView](HttpRequest(uri = s"$tesEndpoint/${handle.pendingJob.jobId}?view=MINIMAL")) map { response => val state = response.state state match { @@ -205,4 +200,11 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn case _ => WdlFile(absPath.pathAsString) } } + + private def makeRequest[A](request: HttpRequest)(implicit um: Unmarshaller[ResponseEntity, A]): Future[A] = { + for { + response <- Http().singleRequest(request) + data <- Unmarshal(response.entity).to[A] + } yield data + } } From 3e9808241969ae1d13d09827ec09430648d00e23 Mon Sep 17 00:00:00 2001 From: Jeff Gentry Date: Wed, 5 Jul 2017 16:23:47 -0400 Subject: [PATCH 04/41] Scala 2.12 support for Cromwell (#2412) --- .travis.yml | 2 +- README.md | 6 +- .../standard/StandardAsyncExecutionActor.scala | 13 +--- .../standard/StandardSyncExecutionActor.scala | 1 - core/src/main/scala/cromwell/core/ConfigUtil.scala | 4 +- .../cromwell/core/path/BetterFileMethods.scala | 4 +- .../test/scala/cromwell/core/retry/RetrySpec.scala | 2 +- .../main/scala/cromwell/api/CromwellClient.scala | 2 +- .../src/main/scala/cromwell/api/model/Label.scala | 1 + .../migration/custom/BatchedTaskChange.scala | 3 +- .../slick/SummaryStatusSlickDatabase.scala | 3 +- .../cromwell/docker/local/DockerCliFlow.scala | 5 +- .../registryv2/flows/HttpFlowWithRetry.scala | 5 +- .../scala/cromwell/engine/io/gcs/GcsResponse.scala | 2 - .../execution/EngineJobExecutionActor.scala | 2 +- .../execution/callcaching/CallCache.scala | 3 +- .../scala/cromwell/server/CromwellRootActor.scala | 2 +- .../cromwell/webservice/CromwellApiService.scala | 2 +- .../cromwell/webservice/LabelsManagerActor.scala | 2 +- .../webservice/CromwellApiServiceSpec.scala | 15 ++--- .../filesystems/gcs/GoogleConfiguration.scala | 2 +- project/Dependencies.scala | 73 +++++++++++----------- project/Settings.scala | 47 +++++++++----- project/build.properties | 2 +- release/release_workflow.wdl | 6 +- .../metadata/impl/MetadataDatabaseAccess.scala | 1 + .../cromwell/services/ServicesStoreSpec.scala | 4 +- src/bin/travis/testCentaurJes.sh | 4 +- src/bin/travis/testCentaurLocal.sh | 2 +- src/bin/travis/testCentaurTes.sh | 2 +- .../jes/JesAsyncBackendJobExecutionActor.scala | 17 ++--- .../cromwell/backend/impl/jes/JesAttributes.scala | 4 +- .../cromwell/backend/impl/jes/JesJobPaths.scala | 3 +- .../jes/statuspolling/JesApiQueryManager.scala | 3 +- .../impl/jes/statuspolling/JesPollingActor.scala | 4 +- .../tes/TesAsyncBackendJobExecutionActor.scala | 8 +-- 36 files changed, 128 insertions(+), 133 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9c5941c5c..7fc3911b5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,7 @@ sudo: required dist: trusty language: scala scala: - - 2.11.8 + - 2.12.2 jdk: - oraclejdk8 cache: diff --git a/README.md b/README.md index 362c5e2fe..402c52896 100644 --- a/README.md +++ b/README.md @@ -120,13 +120,13 @@ There is a [Cromwell gitter channel](https://gitter.im/broadinstitute/cromwell) The following is the toolchain used for development of Cromwell. Other versions may work, but these are recommended. -* [Scala 2.11.8](http://www.scala-lang.org/news/2.11.8/) +* [Scala 2.12.2](http://www.scala-lang.org/news/2.12.1#scala-212-notes) * [SBT 0.13.12](https://github.com/sbt/sbt/releases/tag/v0.13.12) * [Java 8](http://www.oracle.com/technetwork/java/javase/overview/java8-2100321.html) # Building -`sbt assembly` will build a runnable JAR in `target/scala-2.11/` +`sbt assembly` will build a runnable JAR in `target/scala-2.12/` Tests are run via `sbt test`. Note that the tests do require Docker to be running. To test this out while downloading the Ubuntu image that is required for tests, run `docker pull ubuntu:latest` prior to running `sbt test` @@ -3788,7 +3788,7 @@ Essentially run `sbt doc` then commit the generated code into the `gh-pages` bra ``` $ sbt doc $ git co gh-pages -$ mv target/scala-2.11/api scaladoc +$ mv target/scala-2.12/api scaladoc $ git add scaladoc $ git commit -m "API Docs" $ git push origin gh-pages diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index c33c1733f..19b09fdff 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -251,9 +251,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta * * @return the execution handle for the job. */ - def executeAsync()(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - Future.fromTry(Try(execute())) - } + def executeAsync(): Future[ExecutionHandle] = Future.fromTry(Try(execute())) /** * Recovers the specified job id, or starts a new job. The default implementation simply calls execute(). @@ -269,9 +267,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta * @param jobId The previously recorded job id. * @return the execution handle for the job. */ - def recoverAsync(jobId: StandardAsyncJob)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - Future.fromTry(Try(recover(jobId))) - } + def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = Future.fromTry(Try(recover(jobId))) /** * Returns the run status for the job. @@ -289,10 +285,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta * @param handle The handle of the running job. * @return The status of the job. */ - def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle) - (implicit ec: ExecutionContext): Future[StandardAsyncRunStatus] = { - Future.fromTry(Try(pollStatus(handle))) - } + def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[StandardAsyncRunStatus] = Future.fromTry(Try(pollStatus(handle))) /** * Adds custom behavior invoked when polling fails due to some exception. By default adds nothing. diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala index c3cb05143..180f4d345 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala @@ -10,7 +10,6 @@ import cromwell.core.Dispatcher import cromwell.services.keyvalue.KeyValueServiceActor._ import scala.concurrent.{Future, Promise} -import scala.language.existentials trait StandardSyncExecutionActorParams extends StandardJobExecutionActorParams { /** The class for creating an async backend. */ diff --git a/core/src/main/scala/cromwell/core/ConfigUtil.scala b/core/src/main/scala/cromwell/core/ConfigUtil.scala index 881fec686..4098432c1 100644 --- a/core/src/main/scala/cromwell/core/ConfigUtil.scala +++ b/core/src/main/scala/cromwell/core/ConfigUtil.scala @@ -7,7 +7,7 @@ import cats.syntax.validated._ import com.typesafe.config.{Config, ConfigException, ConfigValue} import org.slf4j.LoggerFactory -import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ import scala.reflect.{ClassTag, classTag} object ConfigUtil { @@ -15,7 +15,7 @@ object ConfigUtil { val validationLogger = LoggerFactory.getLogger("ConfigurationValidation") implicit class EnhancedConfig(val config: Config) extends AnyVal { - def keys = config.entrySet().toSet map { v: java.util.Map.Entry[String, ConfigValue] => v.getKey } + def keys = config.entrySet().asScala.toSet map { v: java.util.Map.Entry[String, ConfigValue] => v.getKey } /** * For keys that are in the configuration but not in the reference keySet, log a warning. diff --git a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala index 5346ec70c..94c780a84 100644 --- a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala +++ b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala @@ -214,11 +214,11 @@ trait BetterFileMethods { betterFile.bufferedReader(codec) final def newBufferedWriter(implicit codec: Codec, openOptions: OpenOptions = OpenOptions.default): BufferedWriter = - betterFile.newBufferedWriter(codec) + betterFile.newBufferedWriter(codec, openOptions) final def bufferedWriter(implicit codec: Codec, openOptions: OpenOptions = OpenOptions.default): ManagedResource[BufferedWriter] = - betterFile.bufferedWriter(codec) + betterFile.bufferedWriter(codec, openOptions) final def newFileReader: FileReader = betterFile.newFileReader diff --git a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala index 27f24076c..f62b49d47 100644 --- a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala +++ b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala @@ -34,7 +34,7 @@ class RetrySpec extends TestKitSuite("retry-spec") with FlatSpecLike with Matche isFatal: Throwable => Boolean = Retry.throwableToFalse): Future[Int] = { withRetry( - f = work.doIt, + f = () => work.doIt(), maxRetries = Option(retries), isTransient = isTransient, isFatal = isFatal diff --git a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala index b8bbb4dd8..f9e3511e5 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala @@ -128,7 +128,7 @@ class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit acto private def decodeResponse(response: HttpResponse): Try[HttpResponse] = { decoders.get(response.encoding) map { decoder => - Try(decoder.decode(response)) + Try(decoder.decodeMessage(response)) } getOrElse Failure(UnsuccessfulRequestException(s"No decoder for ${response.encoding}", response)) } } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala index fb5e97669..fd9d88d21 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala @@ -1,6 +1,7 @@ package cromwell.api.model import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, RootJsonFormat} +import scala.language.postfixOps object LabelsJsonFormatter extends DefaultJsonProtocol { implicit object LabelJsonFormat extends RootJsonFormat[List[Label]] { diff --git a/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala b/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala index 17e844479..49db7e444 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala @@ -118,7 +118,8 @@ trait BatchedTaskChange extends MigrationTaskChange { resultBatch.close() val progress = Math.min((page + 1) * 100 / pageCount, 100) - logger.info(s"[$migrationName] $progress%") + val progressMessage = s"[$migrationName] $progress%" + logger.info(progressMessage) } if (batchMigrationCounter != 0) { diff --git a/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala index 6903a29e6..cc75511e2 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala @@ -9,8 +9,7 @@ trait SummaryStatusSlickDatabase { import dataAccess.driver.api._ - private[slick] def getSummaryStatusEntryMaximumId(summaryTableName: String, summarizedTableName: String) - (implicit ec: ExecutionContext): DBIO[Option[Long]] = { + private[slick] def getSummaryStatusEntryMaximumId(summaryTableName: String, summarizedTableName: String): DBIO[Option[Long]] = { dataAccess. maximumIdForSummaryTableNameSummarizedTableName((summaryTableName, summarizedTableName)). result.headOption diff --git a/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala b/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala index 055ef311e..94088beb4 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala @@ -4,7 +4,7 @@ import java.util.concurrent.TimeoutException import akka.actor.Scheduler import akka.stream.scaladsl.{Flow, GraphDSL, Merge, Partition} -import akka.stream.{ActorMaterializer, FlowShape} +import akka.stream.FlowShape import cromwell.docker.DockerHashActor._ import cromwell.docker.{DockerFlow, DockerHashActor, DockerHashResult, DockerImageIdentifierWithoutHash} @@ -15,8 +15,7 @@ import scala.util.{Failure, Success} /** * A docker flow using the CLI to return docker hashes. */ -class DockerCliFlow(implicit ec: ExecutionContext, materializer: ActorMaterializer, scheduler: Scheduler) - extends DockerFlow { +class DockerCliFlow(implicit ec: ExecutionContext, scheduler: Scheduler) extends DockerFlow { // If the docker cli hangs it would be difficult to debug. So timeout the first request after a short duration. // https://github.com/docker/docker/issues/18279 diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/HttpFlowWithRetry.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/HttpFlowWithRetry.scala index 29aa39ede..d5d1d5776 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/HttpFlowWithRetry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/HttpFlowWithRetry.scala @@ -13,7 +13,6 @@ import org.slf4j.LoggerFactory import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} -import scala.language.postfixOps import scala.util.Try object HttpFlowWithRetry { @@ -37,8 +36,6 @@ object HttpFlowWithRetry { } } - def defaultRequestBackoff(): Backoff = SimpleExponentialBackoff(1 second, 2 minutes, 3D) - /** * In order to allow for retries, the http context object needs to encapsulate the original request, * so that it can be re-submitted if necessary. @@ -69,7 +66,7 @@ object HttpFlowWithRetry { case class HttpFlowWithRetry[T]( httpClientFlow: RetryableHttpFlow[T], retryBufferSize: Int = 100, - requestBackoff: () => Backoff = defaultRequestBackoff, + requestBackoff: () => Backoff = () => SimpleExponentialBackoff(1 second, 2 minutes, 3D), maxAttempts: Int = 3 )(implicit val scheduler: Scheduler, ec: ExecutionContext, mat: ActorMaterializer) { diff --git a/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala b/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala index ca0a7e83e..937e2b660 100644 --- a/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala +++ b/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala @@ -2,8 +2,6 @@ package cromwell.engine.io.gcs import cromwell.engine.io.IoActor._ -import scala.language.existentials - /** * ADT used only inside the batch stream * @tparam T final type of the result of the Command diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/EngineJobExecutionActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/EngineJobExecutionActor.scala index c683b5d10..2d440ca34 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/EngineJobExecutionActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/EngineJobExecutionActor.scala @@ -570,7 +570,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, case AbortedResponse(_: BackendJobDescriptorKey) => log.debug("{}: Won't save aborted job response to JobStore", jobTag) forwardAndStop(updatedData.response) - case JobFailedNonRetryableResponse(jobKey: BackendJobDescriptorKey, throwable: Throwable, returnCode: Option[Int]) => + case JobFailedNonRetryableResponse(jobKey, throwable: Throwable, returnCode: Option[Int]) => publishHashesToMetadata(updatedData.hashes) writeToMetadata(Map(callCachingAllowReuseMetadataKey -> false)) saveUnsuccessfulJobResults(jobKey, returnCode, throwable, retryable = false) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala index 7b069509d..0afff7212 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala @@ -41,8 +41,7 @@ class CallCache(database: CallCachingSqlDatabase) { } private def buildCallCachingJoin(callCachingEntry: CallCachingEntry, callCacheHashes: CallCacheHashes, - result: Iterable[WdlValueSimpleton], jobDetritus: Map[String, Path]) - (implicit ec: ExecutionContext): CallCachingJoin = { + result: Iterable[WdlValueSimpleton], jobDetritus: Map[String, Path]): CallCachingJoin = { val hashesToInsert: Iterable[CallCachingHashEntry] = { callCacheHashes.hashes map { hash => CallCachingHashEntry(hash.hashKey.key, hash.hashValue.value) } diff --git a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala index 3520ce029..21b66b639 100644 --- a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala +++ b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala @@ -94,7 +94,7 @@ import scala.language.postfixOps lazy val googleFlow = new GoogleFlow(dockerHttpPool, dockerConf.gcrApiQueriesPer100Seconds)(ioEc, materializer, system.scheduler) lazy val dockerHubFlow = new DockerHubFlow(dockerHttpPool)(ioEc, materializer, system.scheduler) lazy val quayFlow = new QuayFlow(dockerHttpPool)(ioEc, materializer, system.scheduler) - lazy val dockerCliFlow = new DockerCliFlow()(ioEc, materializer, system.scheduler) + lazy val dockerCliFlow = new DockerCliFlow()(ioEc, system.scheduler) lazy val dockerFlows = dockerConf.method match { case DockerLocalLookup => Seq(dockerCliFlow) case DockerRemoteLookup => Seq(dockerHubFlow, googleFlow, quayFlow) diff --git a/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala b/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala index f59ee64d3..2d29bb4c5 100644 --- a/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala +++ b/engine/src/main/scala/cromwell/webservice/CromwellApiService.scala @@ -89,7 +89,7 @@ trait CromwellApiService { } ~ encodeResponseWith(Gzip, Deflate, NoCoding) { path("workflows" / Segment / Segment / "metadata") { (version, possibleWorkflowId) => - parameters('includeKey.*, 'excludeKey.*, 'expandSubWorkflows.as[Boolean].?) { (includeKeys, excludeKeys, expandSubWorkflowsOption) => + parameters(('includeKey.*, 'excludeKey.*, 'expandSubWorkflows.as[Boolean].?)) { (includeKeys, excludeKeys, expandSubWorkflowsOption) => val includeKeysOption = NonEmptyList.fromList(includeKeys.toList) val excludeKeysOption = NonEmptyList.fromList(excludeKeys.toList) val expandSubWorkflows = expandSubWorkflowsOption.getOrElse(false) diff --git a/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala b/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala index 7a27e9ce1..e8659a04d 100644 --- a/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala +++ b/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala @@ -34,7 +34,7 @@ object LabelsManagerActor { } def metadataEventsToLabels(events: Iterable[MetadataEvent]): Map[String, String] = { - events map { case MetadataEvent(MetadataKey(_, _, key), Some(MetadataValue(value, _)), _) => key.split("\\:").last -> value } toMap + events collect { case MetadataEvent(MetadataKey(_, _, key), Some(MetadataValue(value, _)), _) => key.split("\\:").last -> value } toMap } def labelsToMetadataEvents(labels: Labels, workflowId: WorkflowId): Iterable[MetadataEvent] = { diff --git a/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala index 2acfa1f89..654a333aa 100644 --- a/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/CromwellApiServiceSpec.scala @@ -29,7 +29,7 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with val akkaHttpService = new MockApiService() val version = "v1" - implicit def default(implicit system: ActorSystem) = RouteTestTimeout(5.seconds) + implicit def default = RouteTestTimeout(5.seconds) behavior of "REST API /status endpoint" @@ -322,7 +322,7 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with check { status should be(StatusCodes.OK) val decoder: Decoder = Gzip - val result = Await.result(Unmarshal(decoder.decode(response)).to[JsObject], 1.second) + val result = Await.result(Unmarshal(decoder.decodeMessage(response)).to[JsObject], 1.second) result.fields.keys should contain allOf("testKey1", "testKey2") result.fields.keys shouldNot contain("testKey3") result.fields("testKey1") should be(JsString("myValue1")) @@ -336,7 +336,7 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with check { status should be(StatusCodes.OK) val decoder: Decoder = Gzip - val result = Await.result(Unmarshal(decoder.decode(response)).to[JsObject], 1.second) + val result = Await.result(Unmarshal(decoder.decodeMessage(response)).to[JsObject], 1.second) result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") result.fields.keys should contain noneOf("testKey2b", "testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) @@ -351,7 +351,7 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with check { status should be(StatusCodes.OK) val decoder: Decoder = Gzip - val result = Await.result(Unmarshal(decoder.decode(response)).to[JsObject], 1.second) + val result = Await.result(Unmarshal(decoder.decodeMessage(response)).to[JsObject], 1.second) result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") result.fields.keys should contain noneOf("testKey2b", "testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) @@ -369,7 +369,7 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with } val decoder: Decoder = Gzip - Unmarshal(decoder.decode(response)).to[String] map { r => + Unmarshal(decoder.decodeMessage(response)).to[String] map { r => assertResult( s"""{ | "status": "fail", @@ -512,6 +512,7 @@ object CromwellApiServiceSpec { events.head.key.workflowId match { case CromwellApiServiceSpec.ExistingWorkflowId => sender ! MetadataWriteSuccess(events) case CromwellApiServiceSpec.AbortedWorkflowId => sender ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) + case WorkflowId(_) => throw new Exception("Something untoward happened, this situation is not believed to be possible at this time") } } } @@ -524,10 +525,10 @@ object CromwellApiServiceSpec { sender ! response case AbortWorkflow(id, manager) => val message = id match { - case ExistingWorkflowId => - WorkflowStoreEngineActor.WorkflowAborted(id) + case ExistingWorkflowId => WorkflowStoreEngineActor.WorkflowAborted(id) case AbortedWorkflowId => WorkflowAbortFailed(id, new IllegalStateException(s"Workflow ID '$id' is in terminal state 'Aborted' and cannot be aborted.")) + case WorkflowId(_) => throw new Exception("Something untoward happened") } sender ! message } diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleConfiguration.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleConfiguration.scala index a183ceae0..34c1bff53 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleConfiguration.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleConfiguration.scala @@ -57,7 +57,7 @@ object GoogleConfiguration { private val log = LoggerFactory.getLogger("GoogleConfiguration") - case class GoogleConfigurationException(errorMessages: List[String]) extends MessageAggregation { + final case class GoogleConfigurationException(errorMessages: List[String]) extends MessageAggregation { override val exceptionContext = "Google configuration" } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index c02a48d5d..86a34562e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,18 +4,19 @@ object Dependencies { lazy val lenthallV = "0.25" lazy val wdl4sV = "0.13" - lazy val akkaV = "2.4.16" - lazy val akkaHttpV = "10.0.5" + lazy val akkaV = "2.4.17" + lazy val akkaHttpV = "10.0.9" lazy val slickV = "3.2.0" - // TODO: Re-combine these when cromwell is 2.12: - lazy val cromwellApiClientAkkaV = "2.4.17" - lazy val cromwellApiClientAkkaHttpV = "10.0.6" + lazy val googleClientApiV = "1.22.0" lazy val googleGenomicsServicesApiV = "1.22.0" lazy val betterFilesV = "2.17.1" lazy val catsV = "0.9.0" - lazy val fs2V = "0.9.6" + lazy val fs2V = "0.9.7" + + lazy val pegdownV = "1.6.0" + lazy val scalatestV = "3.0.2" // Internal collections of dependencies @@ -23,32 +24,32 @@ object Dependencies { private val catsDependencies = List( "org.typelevel" %% "cats" % catsV, - "com.github.benhutchison" %% "mouse" % "0.6" + "com.github.benhutchison" %% "mouse" % "0.9" ) map (_ /* Exclude test framework cats-laws and its transitive dependency scalacheck. If sbt detects scalacheck, it tries to run it. Explicitly excluding the two problematic artifacts instead of including the three (or four?). https://github.com/typelevel/cats/tree/v0.7.2#getting-started - Re "_2.11", see also: https://github.com/sbt/sbt/issues/1518 + Re "_2.12", see also: https://github.com/sbt/sbt/issues/1518 */ - exclude("org.typelevel", "cats-laws_2.11") - exclude("org.typelevel", "cats-kernel-laws_2.11") + exclude("org.typelevel", "cats-laws_2.12") + exclude("org.typelevel", "cats-kernel-laws_2.12") ) private val baseDependencies = List( "org.broadinstitute" %% "lenthall" % lenthallV, - "com.iheart" %% "ficus" % "1.3.0", - "org.scalatest" %% "scalatest" % "3.0.0" % Test, - "org.pegdown" % "pegdown" % "1.6.0" % Test, - "org.specs2" %% "specs2-mock" % "3.8.5" % Test + "com.iheart" %% "ficus" % "1.4.1", + "org.scalatest" %% "scalatest" % scalatestV % Test, + "org.pegdown" % "pegdown" % pegdownV % Test, + "org.specs2" %% "specs2-mock" % "3.8.9" % Test // 3.9.X doesn't enjoy the spark backend or refined ) ++ catsDependencies :+ fs2Test private val slf4jBindingDependencies = List( // http://logback.qos.ch/dependencies.html - "ch.qos.logback" % "logback-classic" % "1.2.1", - "ch.qos.logback" % "logback-access" % "1.2.1", - "org.codehaus.janino" % "janino" % "3.0.1" + "ch.qos.logback" % "logback-classic" % "1.2.3", + "ch.qos.logback" % "logback-access" % "1.2.3", + "org.codehaus.janino" % "janino" % "3.0.7" ) private val slickDependencies = List( @@ -73,7 +74,7 @@ object Dependencies { private val googleApiClientDependencies = List( // Used by swagger, but only in tests. This overrides an older 2.1.3 version of jackson-core brought in by // these Google dependencies, but which isn't properly evicted by IntelliJ's sbt integration. - "com.fasterxml.jackson.core" % "jackson-core" % "2.8.2", + "com.fasterxml.jackson.core" % "jackson-core" % "2.8.9", // The exclusions prevent guava 13 from colliding at assembly time with guava 18 brought in elsewhere. "com.google.api-client" % "google-api-client-java6" % googleClientApiV exclude("com.google.guava", "guava-jdk5"), "com.google.api-client" % "google-api-client-jackson2" % googleClientApiV exclude("com.google.guava", "guava-jdk5") @@ -81,11 +82,11 @@ object Dependencies { private val googleCloudDependencies = List( "com.google.apis" % "google-api-services-genomics" % ("v1alpha2-rev64-" + googleGenomicsServicesApiV), - "com.google.cloud" % "google-cloud-nio" % "0.17.2-alpha" + "com.google.cloud" % "google-cloud-nio" % "0.20.1-alpha" exclude("com.google.api.grpc", "grpc-google-common-protos") exclude("com.google.cloud.datastore", "datastore-v1-protos") exclude("org.apache.httpcomponents", "httpclient"), - "org.apache.httpcomponents" % "httpclient" % "4.5.2" + "org.apache.httpcomponents" % "httpclient" % "4.5.3" ) private val dbmsDependencies = List( @@ -103,7 +104,7 @@ object Dependencies { private val refinedTypeDependenciesList = List( "org.scala-lang" % "scala-compiler" % Settings.ScalaVersion, - "eu.timepit" %% "refined" % "0.7.0" + "eu.timepit" %% "refined" % "0.8.2" ) // Sub-project dependencies, added in addition to any dependencies inherited from .dependsOn(). @@ -115,16 +116,16 @@ object Dependencies { val databaseSqlDependencies = baseDependencies ++ slickDependencies ++ dbmsDependencies ++ refinedTypeDependenciesList val coreDependencies = List( - "com.typesafe.scala-logging" %% "scala-logging" % "3.4.0", + "com.typesafe.scala-logging" %% "scala-logging" % "3.6.0", "org.broadinstitute" %% "wdl4s" % wdl4sV, - "org.apache.commons" % "commons-lang3" % "3.4", + "org.apache.commons" % "commons-lang3" % "3.6", "com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpV, - "com.typesafe" % "config" % "1.3.0", + "com.typesafe" % "config" % "1.3.1", "com.typesafe.akka" %% "akka-actor" % akkaV, "com.typesafe.akka" %% "akka-slf4j" % akkaV, "com.typesafe.akka" %% "akka-testkit" % akkaV % Test, - "com.google.guava" % "guava" % "20.0", - "com.google.auth" % "google-auth-library-oauth2-http" % "0.6.0", + "com.google.guava" % "guava" % "22.0", + "com.google.auth" % "google-auth-library-oauth2-http" % "0.7.0", "com.typesafe.akka" %% "akka-stream-testkit" % akkaV, "com.chuusai" %% "shapeless" % "2.3.2" ) ++ baseDependencies ++ googleApiClientDependencies ++ @@ -135,14 +136,13 @@ object Dependencies { "com.github.pathikrit" %% "better-files" % betterFilesV % Test ) ++ liquibaseDependencies ++ dbmsDependencies - // FIXME: this needs to be cleaned up w/ 2.12 move val cromwellApiClientDependencies = List( - "com.typesafe.akka" %% "akka-actor" % cromwellApiClientAkkaV, - "com.typesafe.akka" %% "akka-http" % cromwellApiClientAkkaHttpV, - "com.typesafe.akka" %% "akka-http-spray-json" % cromwellApiClientAkkaHttpV, - "com.github.pathikrit" %% "better-files" % "3.0.0", - "org.scalatest" %% "scalatest" % "3.0.1" % Test, - "org.pegdown" % "pegdown" % "1.6.0" % Test + "com.typesafe.akka" %% "akka-actor" % akkaV, + "com.typesafe.akka" %% "akka-http" % akkaHttpV, + "com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpV, + "com.github.pathikrit" %% "better-files" % betterFilesV, + "org.scalatest" %% "scalatest" % scalatestV % Test, + "org.pegdown" % "pegdown" % pegdownV % Test ) val engineDependencies = List( @@ -150,9 +150,10 @@ object Dependencies { "commons-io" % "commons-io" % "2.5", "com.storm-enroute" %% "scalameter" % "0.8.2" exclude("com.fasterxml.jackson.core", "jackson-databind") - exclude("com.fasterxml.jackson.module", "jackson-module-scala"), - "com.fasterxml.jackson.core" % "jackson-databind" % "2.7.9.1", - "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.7.9", + exclude("com.fasterxml.jackson.module", "jackson-module-scala") + exclude("org.scala-tools.testing", "test-interface"), + "com.fasterxml.jackson.core" % "jackson-databind" % "2.8.9", + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.8.9", "io.swagger" % "swagger-parser" % "1.0.22" % Test, "org.yaml" % "snakeyaml" % "1.17" % Test ) ++ akkaHttpServerDependencies diff --git a/project/Settings.scala b/project/Settings.scala index 3a1dc01b6..b59c23041 100644 --- a/project/Settings.scala +++ b/project/Settings.scala @@ -25,17 +25,16 @@ object Settings { https://github.com/sbt/sbt-assembly/issues/69 https://github.com/scala/pickling/issues/10 - Other fancy flags from - - http://blog.threatstack.com/useful-scalac-options-for-better-scala-development-part-1 - - and - - https://tpolecat.github.io/2014/04/11/scalac-flags.html + Other fancy flags from https://tpolecat.github.io/2017/04/25/scalac-flags.html. + The following aren't used (yet), and in general are an exercise in pain for 2.12 with Cromwell. They'd + certainly be nice to have, but params causes a world of hurt and patvars is just going to be a big time sink. + Interested parties are encouraged to take a stab at it. + "-Ywarn-unused:params", // Warn if a value parameter is unused. + "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused. */ val compilerSettings = List( - "-Xlint", + "-explaintypes", "-feature", "-Xmax-classfile-name", "200", "-target:jvm-1.8", @@ -43,12 +42,32 @@ object Settings { "-unchecked", "-deprecation", "-Xfuture", + "-Xlint:adapted-args", + "-Xlint:by-name-right-associative", + "-Xlint:constant", + "-Xlint:delayedinit-select", + "-Xlint:doc-detached", + "-Xlint:inaccessible", + "-Xlint:infer-any", + "-Xlint:missing-interpolator", + "-Xlint:nullary-override", + "-Xlint:nullary-unit", + "-Xlint:option-implicit", + "-Xlint:package-object-classes", + "-Xlint:poly-implicit-overload", + "-Xlint:private-shadow", + "-Xlint:stars-align", + "-Xlint:type-parameter-shadow", + "-Xlint:unsound-match", "-Yno-adapted-args", "-Ywarn-dead-code", "-Ywarn-numeric-widen", "-Ywarn-value-discard", - "-Ywarn-unused", - "-Ywarn-unused-import", + "-Ywarn-inaccessible", + "-Ywarn-unused:implicits", + "-Ywarn-unused:imports", + "-Ywarn-unused:privates", + "-Ywarn-unused:locals", "-Xfatal-warnings" ) @@ -100,7 +119,7 @@ object Settings { ) ) - val ScalaVersion = "2.11.8" + val ScalaVersion = "2.12.2" val commonSettings = ReleasePlugin.projectSettings ++ testSettings ++ assemblySettings ++ dockerSettings ++ cromwellVersionWithGit ++ publishingSettings ++ List( organization := "org.broadinstitute", @@ -139,11 +158,9 @@ object Settings { name := "cromwell-api-client", libraryDependencies ++= cromwellApiClientDependencies, organization := "org.broadinstitute", - scalaVersion := "2.12.1", + scalaVersion := ScalaVersion, + scalacOptions ++= compilerSettings, resolvers ++= commonResolvers - // scalacOptions ++= compilerSettings, - // scalacOptions in (Compile, doc) ++= docSettings, - // parallelExecution := false ) ++ ReleasePlugin.projectSettings ++ testSettings ++ assemblySettings ++ cromwellVersionWithGit ++ publishingSettings diff --git a/project/build.properties b/project/build.properties index 35c88bab7..64317fdae 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.12 +sbt.version=0.13.15 diff --git a/release/release_workflow.wdl b/release/release_workflow.wdl index 5bd0b004e..49a490465 100644 --- a/release/release_workflow.wdl +++ b/release/release_workflow.wdl @@ -316,8 +316,8 @@ workflow release_cromwell { dependencyCommands = cromwellDependencyCommands.updateCommand } - call wait_for_artifactory as waitForLenthall { input: repo = "lenthall_2.11", version = release_lenthall.version } - call wait_for_artifactory as waitForWdl4s { input: repo = "wdl4s_2.11", version = release_wdl4s.version } + call wait_for_artifactory as waitForLenthall { input: repo = "lenthall_2.12", version = release_lenthall.version } + call wait_for_artifactory as waitForWdl4s { input: repo = "wdl4s_2.12", version = release_wdl4s.version } # Generates commands to update wdl4s dependencies scatter(wdl4sDependency in wdl4sDependencies) { @@ -355,7 +355,7 @@ workflow release_cromwell { } } - File cromwellJar = release_cromwell.executionDir + "/target/scala-2.11/cromwell-" + cromwellPrep.version + ".jar" + File cromwellJar = release_cromwell.executionDir + "/target/scala-2.12/cromwell-" + cromwellPrep.version + ".jar" # Version that was just released Int cromwellVersionAsInt = cromwellPrep.version # Previous version diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala index 6ad24c9e9..6e1b356df 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala @@ -125,6 +125,7 @@ trait MetadataDatabaseAccess { case MetadataQuery(_, None, None, Some(includeKeys), Some(excludeKeys), _) => Future.failed( new IllegalArgumentException( s"Include/Exclude keys may not be mixed: include = $includeKeys, exclude = $excludeKeys")) + case _ => Future.failed(new IllegalArgumentException(s"Invalid MetadataQuery: $query")) } futureMetadata map metadataToMetadataEvents(query.workflowId) diff --git a/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala b/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala index 603bf2906..b2a50ae6f 100644 --- a/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala +++ b/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala @@ -482,9 +482,7 @@ object ServicesStoreSpec { (referenceProfile: ReferenceProfile, referenceDatabase: ReferenceProfile#Backend#Database, comparisonProfile: ComparisonProfile, - comparisonDatabase: ComparisonProfile#Backend#Database)(block: DiffResult => T) - (implicit executor: ExecutionContext): T = { - + comparisonDatabase: ComparisonProfile#Backend#Database)(block: DiffResult => T): T = { withConnections(referenceProfile, referenceDatabase, comparisonProfile, comparisonDatabase) { LiquibaseUtils.compare(_, _)(block) } diff --git a/src/bin/travis/testCentaurJes.sh b/src/bin/travis/testCentaurJes.sh index 0d46b7694..aa398e2b2 100755 --- a/src/bin/travis/testCentaurJes.sh +++ b/src/bin/travis/testCentaurJes.sh @@ -108,9 +108,9 @@ fi # Upload the built Cromwell jar to GCS so we can use it in our centaur test. Set an exit trap to clean it up on failure JAR_GCS_PATH=gs://cloud-cromwell-dev/travis-centaur/${CROMWELL_JAR} -gsutil cp target/scala-2.11/cromwell-*.jar "${JAR_GCS_PATH}" +gsutil cp target/scala-2.12/cromwell-*.jar "${JAR_GCS_PATH}" -java -Dconfig.file=./jes.conf -jar target/scala-2.11/cromwell-*.jar run src/bin/travis/resources/centaur.wdl src/bin/travis/resources/centaur.inputs | tee log.txt +java -Dconfig.file=./jes.conf -jar target/scala-2.12/cromwell-*.jar run src/bin/travis/resources/centaur.wdl src/bin/travis/resources/centaur.inputs | tee log.txt EXIT_CODE="${PIPESTATUS[0]}" # The perl code below is to remove our lovely color highlighting diff --git a/src/bin/travis/testCentaurLocal.sh b/src/bin/travis/testCentaurLocal.sh index c7802b30f..ba8138f2c 100755 --- a/src/bin/travis/testCentaurLocal.sh +++ b/src/bin/travis/testCentaurLocal.sh @@ -87,7 +87,7 @@ mysql -u root -e "CREATE USER 'travis'@'localhost' IDENTIFIED BY '';" mysql -u root -e "GRANT ALL PRIVILEGES ON cromwell_test . * TO 'travis'@'localhost';" sbt assembly -CROMWELL_JAR=$(find "$(pwd)/target/scala-2.11" -name "cromwell-*.jar") +CROMWELL_JAR=$(find "$(pwd)/target/scala-2.12" -name "cromwell-*.jar") LOCAL_CONF="$(pwd)/src/bin/travis/resources/local_centaur.conf" git clone https://github.com/broadinstitute/centaur.git cd centaur diff --git a/src/bin/travis/testCentaurTes.sh b/src/bin/travis/testCentaurTes.sh index bce555e12..166a7be61 100755 --- a/src/bin/travis/testCentaurTes.sh +++ b/src/bin/travis/testCentaurTes.sh @@ -42,7 +42,7 @@ mysql -u root -e "GRANT ALL PRIVILEGES ON cromwell_test . * TO 'travis'@'localho WORKDIR=$(pwd) sbt assembly -CROMWELL_JAR=$(find "$(pwd)/target/scala-2.11" -name "cromwell-*.jar") +CROMWELL_JAR=$(find "$(pwd)/target/scala-2.12" -name "cromwell-*.jar") TES_CENTAUR_CONF="$(pwd)/src/bin/travis/resources/tes_centaur.conf" git clone https://github.com/broadinstitute/centaur.git cd centaur diff --git a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAsyncBackendJobExecutionActor.scala b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAsyncBackendJobExecutionActor.scala index f617e89ac..5e4869854 100644 --- a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAsyncBackendJobExecutionActor.scala @@ -31,7 +31,7 @@ import wdl4s.values._ import scala.collection.JavaConverters._ import scala.concurrent.duration._ -import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.Future import scala.language.postfixOps import scala.util.{Success, Try} @@ -285,15 +285,11 @@ class JesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn override def isTransient(throwable: Throwable): Boolean = isTransientJesException(throwable) - override def executeAsync()(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - runWithJes(None) - } + override def executeAsync(): Future[ExecutionHandle] = runWithJes(None) val futureKvJobKey = KvJobKey(jobDescriptor.key.call.fullyQualifiedName, jobDescriptor.key.index, jobDescriptor.key.attempt + 1) - override def recoverAsync(jobId: StandardAsyncJob)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - runWithJes(Option(jobId)) - } + override def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = runWithJes(Option(jobId)) private def runWithJes(jobForResumption: Option[StandardAsyncJob]): Future[ExecutionHandle] = { // Want to force runtimeAttributes to evaluate so we can fail quickly now if we need to: @@ -329,12 +325,7 @@ class JesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn } } - - override def pollStatusAsync(handle: JesPendingExecutionHandle) - (implicit ec: ExecutionContext): Future[RunStatus] = { - super[JesStatusRequestClient].pollStatus(handle.runInfo.get) - } - + override def pollStatusAsync(handle: JesPendingExecutionHandle): Future[RunStatus] = super[JesStatusRequestClient].pollStatus(handle.runInfo.get) override def customPollStatusFailure: PartialFunction[(ExecutionHandle, Exception), ExecutionHandle] = { case (oldHandle: JesPendingExecutionHandle@unchecked, e: GoogleJsonResponseException) if e.getStatusCode == 404 => diff --git a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAttributes.scala b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAttributes.scala index 08e57d87c..3bd1a2c04 100644 --- a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAttributes.scala +++ b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesAttributes.scala @@ -18,7 +18,7 @@ import net.ceedubs.ficus.Ficus._ import net.ceedubs.ficus.readers.{StringReader, ValueReader} import org.slf4j.{Logger, LoggerFactory} -import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ case class JesAttributes(project: String, computeServiceAccount: String, @@ -60,7 +60,7 @@ object JesAttributes { implicit val urlReader: ValueReader[URL] = StringReader.stringValueReader.map { URI.create(_).toURL } def apply(googleConfig: GoogleConfiguration, backendConfig: Config): JesAttributes = { - val configKeys = backendConfig.entrySet().toSet map { entry: java.util.Map.Entry[String, ConfigValue] => entry.getKey } + val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => entry.getKey } warnNotRecognized(configKeys, jesKeys, context, Logger) def warnDeprecated(keys: Set[String], deprecated: Map[String, String], context: String, logger: Logger) = { diff --git a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesJobPaths.scala b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesJobPaths.scala index 3ce09f9e4..e3c349ec0 100644 --- a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesJobPaths.scala +++ b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/JesJobPaths.scala @@ -1,6 +1,5 @@ package cromwell.backend.impl.jes -import akka.actor.ActorSystem import cromwell.backend.BackendJobDescriptorKey import cromwell.backend.io.JobPaths import cromwell.core.path.Path @@ -11,7 +10,7 @@ object JesJobPaths { val GcsExecPathKey = "gcsExec" } -case class JesJobPaths(override val workflowPaths: JesWorkflowPaths, jobKey: BackendJobDescriptorKey)(implicit actorSystem: ActorSystem) extends JobPaths { +final case class JesJobPaths(override val workflowPaths: JesWorkflowPaths, jobKey: BackendJobDescriptorKey) extends JobPaths { val jesLogBasename = { val index = jobKey.index.map(s => s"-$s").getOrElse("") diff --git a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesApiQueryManager.scala b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesApiQueryManager.scala index 53a55b368..dd65d8225 100644 --- a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesApiQueryManager.scala +++ b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesApiQueryManager.scala @@ -78,7 +78,8 @@ class JesApiQueryManager(val qps: Int Refined Positive) extends Actor with Actor workQueue = beheaded.newWorkQueue } - private final case class BeheadedWorkQueue(workToDo: Option[NonEmptyList[JesApiQuery]], newWorkQueue: Queue[JesApiQuery]) + // Intentionally not final, this runs afoul of SI-4440 (I believe) + private case class BeheadedWorkQueue(workToDo: Option[NonEmptyList[JesApiQuery]], newWorkQueue: Queue[JesApiQuery]) private def beheadWorkQueue(maxBatchSize: Int): BeheadedWorkQueue = { val head = workQueue.take(maxBatchSize).toList diff --git a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesPollingActor.scala b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesPollingActor.scala index 4f0aff53d..0d953d251 100644 --- a/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesPollingActor.scala +++ b/supportedBackends/jes/src/main/scala/cromwell/backend/impl/jes/statuspolling/JesPollingActor.scala @@ -14,7 +14,7 @@ import eu.timepit.refined.numeric._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} import scala.concurrent.duration._ -import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ /** * Sends batched requests to JES as a worker to the JesApiQueryManager @@ -91,7 +91,7 @@ class JesPollingActor(val pollingManager: ActorRef, val qps: Int Refined Positiv () } - private[statuspolling] def mkErrorString(e: GoogleJsonError) = e.getErrors.toList.mkString(", ") + private[statuspolling] def mkErrorString(e: GoogleJsonError) = e.getErrors.asScala.toList.mkString(", ") } object JesPollingActor { diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala index 774b54b88..58a8a0753 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala @@ -18,7 +18,7 @@ import akka.http.scaladsl.unmarshalling.{Unmarshal, Unmarshaller} import akka.stream.ActorMaterializer import scala.concurrent.duration._ -import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.Future import scala.language.postfixOps import scala.util.{Failure, Success} @@ -115,7 +115,7 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn ) } - override def executeAsync()(implicit ec: ExecutionContext): Future[ExecutionHandle] = { + override def executeAsync(): Future[ExecutionHandle] = { // create call exec dir tesJobPaths.callExecutionRoot.createPermissionedDirectories() val taskMessage = createTaskMessage() @@ -126,7 +126,7 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn } yield PendingExecutionHandle(jobDescriptor, StandardAsyncJob(ctr.id), None, previousStatus = None) } - override def recoverAsync(jobId: StandardAsyncJob)(implicit ec: ExecutionContext) = executeAsync() + override def recoverAsync(jobId: StandardAsyncJob) = executeAsync() override def tryAbort(job: StandardAsyncJob): Unit = { @@ -148,7 +148,7 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn () } - override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle)(implicit ec: ExecutionContext): Future[TesRunStatus] = { + override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[TesRunStatus] = { makeRequest[MinimalTaskView](HttpRequest(uri = s"$tesEndpoint/${handle.pendingJob.jobId}?view=MINIMAL")) map { response => val state = response.state From a30c701fee1b8ec4a5ac7676f6df330b3977e4e1 Mon Sep 17 00:00:00 2001 From: Thib Date: Wed, 5 Jul 2017 20:37:17 -0400 Subject: [PATCH 05/41] Rebased (#2406) --- README.md | 4 +- .../execution/callcaching/CallCacheDiffActor.scala | 63 +++++++++++++++++----- .../callcaching/CallCacheDiffActorSpec.scala | 44 ++++++++++++++- 3 files changed, 93 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 402c52896..57a03b777 100644 --- a/README.md +++ b/README.md @@ -3627,7 +3627,7 @@ Server: spray-can/1.3.3 { "status": "error", - "message": "Cannot find a cache entry for 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1" + "message": "Cannot find call 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1" } ``` @@ -3643,7 +3643,7 @@ Server: spray-can/1.3.3 { "status": "error", - "message": "Cannot find cache entries for 5174842-4a44-4355-a3a9-3a711ce556f1:wf_hello.hello:-1, 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1" + "message": "Cannot find calls 5174842-4a44-4355-a3a9-3a711ce556f1:wf_hello.hello:-1, 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1" } ``` diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala index 19f02bc54..9a484dba3 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala @@ -24,9 +24,10 @@ object CallCacheDiffActor { override def getMessage = message } - private val CallAAndBNotFoundException = CachedCallNotFoundException("callA and callB were run on a previous version of Cromwell on which this endpoint was not supported.") - private val CallANotFoundException = CachedCallNotFoundException("callA was run on a previous version of Cromwell on which this endpoint was not supported.") - private val CallBNotFoundException = CachedCallNotFoundException("callB was run on a previous version of Cromwell on which this endpoint was not supported.") + // Exceptions when calls exist but have no hashes in their metadata, indicating they were run pre-28 + private val HashesForCallAAndBNotFoundException = new Exception("callA and callB were run on a previous version of Cromwell on which this endpoint was not supported.") + private val HashesForCallANotFoundException = new Exception("callA was run on a previous version of Cromwell on which this endpoint was not supported.") + private val HashesForCallBNotFoundException = new Exception("callB was run on a previous version of Cromwell on which this endpoint was not supported.") sealed trait CallCacheDiffActorState case object Idle extends CallCacheDiffActorState @@ -109,15 +110,23 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call responseB: MetadataLookupResponse, replyTo: ActorRef) = { - val response = diffHashes(responseA.eventList, responseB.eventList) match { - case Success(diff) => - val diffObject = MetadataObject(Map( - "callA" -> makeCallInfo(queryA, responseA.eventList), - "callB" -> makeCallInfo(queryB, responseB.eventList), - "hashDifferential" -> diff + lazy val buildResponse = { + diffHashes(responseA.eventList, responseB.eventList) match { + case Success(diff) => + val diffObject = MetadataObject(Map( + "callA" -> makeCallInfo(queryA, responseA.eventList), + "callB" -> makeCallInfo(queryB, responseB.eventList), + "hashDifferential" -> diff )) - BuiltCallCacheDiffResponse(metadataComponentJsonWriter.write(diffObject).asJsObject) - case Failure(f) => FailedCallCacheDiffResponse(f) + + BuiltCallCacheDiffResponse(metadataComponentJsonWriter.write(diffObject).asJsObject) + case Failure(f) => FailedCallCacheDiffResponse(f) + } + } + + val response = checkCallsExistence(queryA, queryB, responseA, responseB) match { + case Some(msg) => FailedCallCacheDiffResponse(CachedCallNotFoundException(msg)) + case None => buildResponse } replyTo ! response @@ -127,6 +136,32 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call } /** + * Returns an error message if one or both of the calls are not found, or None if it does + */ + private def checkCallsExistence(queryA: MetadataQuery, + queryB: MetadataQuery, + responseA: MetadataLookupResponse, + responseB: MetadataLookupResponse): Option[String] = { + import cromwell.core.ExecutionIndex._ + + def makeTag(query: MetadataQuery) = { + s"${query.workflowId}:${query.jobKey.get.callFqn}:${query.jobKey.get.index.fromIndex}" + } + + def makeNotFoundMessage(queries: NonEmptyList[MetadataQuery]) = { + val plural = if (queries.tail.nonEmpty) "s" else "" + s"Cannot find call$plural ${queries.map(makeTag).toList.mkString(", ")}" + } + + (responseA.eventList, responseB.eventList) match { + case (a, b) if a.isEmpty && b.isEmpty => Option(makeNotFoundMessage(NonEmptyList.of(queryA, queryB))) + case (a, _) if a.isEmpty => Option(makeNotFoundMessage(NonEmptyList.of(queryA))) + case (_, b) if b.isEmpty => Option(makeNotFoundMessage(NonEmptyList.of(queryB))) + case _ => None + } + } + + /** * Generates the "info" section of callA or callB */ private def makeCallInfo(query: MetadataQuery, eventList: Seq[MetadataEvent]): MetadataComponent = { @@ -208,9 +243,9 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call val hashesB: Map[String, Option[MetadataValue]] = collectHashes(eventsB) (hashesA.isEmpty, hashesB.isEmpty) match { - case (true, true) => Failure(CallAAndBNotFoundException) - case (true, false) => Failure(CallANotFoundException) - case (false, true) => Failure(CallBNotFoundException) + case (true, true) => Failure(HashesForCallAAndBNotFoundException) + case (true, false) => Failure(HashesForCallANotFoundException) + case (false, true) => Failure(HashesForCallBNotFoundException) case (false, false) => Success(diffHashEvents(hashesA, hashesB)) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala index a5e9c73fe..c726bd2d4 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala @@ -208,7 +208,7 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher expectTerminated(actor) } - it should "Respond with a CachedCallNotFoundException if hashes are missing" in { + it should "Respond with an appropriate message if hashes are missing" in { import scala.concurrent.duration._ import scala.language.postfixOps @@ -222,9 +222,49 @@ class CallCacheDiffActorSpec extends TestKitSuite with FlatSpecLike with Matcher actor ! MetadataLookupResponse(queryA, eventsA.filterNot(_.key.key.contains("hashes"))) expectMsgPF(1 second) { - case FailedCallCacheDiffResponse(e: CachedCallNotFoundException) => + case FailedCallCacheDiffResponse(e) => e.getMessage shouldBe "callA and callB were run on a previous version of Cromwell on which this endpoint was not supported." } expectTerminated(actor) } + + it should "Respond with CachedCallNotFoundException if a call is missing" in { + import scala.concurrent.duration._ + import scala.language.postfixOps + + val mockServiceRegistryActor = TestProbe() + val actor = TestFSMRef(new CallCacheDiffActor(mockServiceRegistryActor.ref)) + watch(actor) + val responseB = MetadataLookupResponse(queryB, eventsB.filterNot(_.key.key.contains("hashes"))) + + actor.setState(WaitingForMetadata, CallCacheDiffWithRequest(queryA, queryB, None, Option(responseB), self)) + + actor ! MetadataLookupResponse(queryA, List.empty) + + expectMsgPF(1 second) { + case FailedCallCacheDiffResponse(e) => + e.getMessage shouldBe "Cannot find call 971652a6-139c-4ef3-96b5-aeb611a40dbf:callFqnA:1" + } + expectTerminated(actor) + } + + it should "Respond with CachedCallNotFoundException if both calls are missing" in { + import scala.concurrent.duration._ + import scala.language.postfixOps + + val mockServiceRegistryActor = TestProbe() + val actor = TestFSMRef(new CallCacheDiffActor(mockServiceRegistryActor.ref)) + watch(actor) + val responseB = MetadataLookupResponse(queryB, List.empty) + + actor.setState(WaitingForMetadata, CallCacheDiffWithRequest(queryA, queryB, None, Option(responseB), self)) + + actor ! MetadataLookupResponse(queryA, List.empty) + + expectMsgPF(1 second) { + case FailedCallCacheDiffResponse(e) => + e.getMessage shouldBe "Cannot find calls 971652a6-139c-4ef3-96b5-aeb611a40dbf:callFqnA:1, bb85b3ec-e179-4f12-b90f-5191216da598:callFqnB:-1" + } + expectTerminated(actor) + } } From 13691aa5599952806bb5712c35a045d3954c1e86 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Fri, 30 Jun 2017 16:52:32 -0400 Subject: [PATCH 06/41] Remove pre-PBE tables. --- database/migration/src/main/resources/changelog.xml | 1 + .../resources/changesets/remove_pre_pbe_tables.xml | 18 ++++++++++++++++++ .../scala/cromwell/services/ServicesStoreSpec.scala | 14 -------------- 3 files changed, 19 insertions(+), 14 deletions(-) create mode 100644 database/migration/src/main/resources/changesets/remove_pre_pbe_tables.xml diff --git a/database/migration/src/main/resources/changelog.xml b/database/migration/src/main/resources/changelog.xml index 03883abb7..9bc6b932b 100644 --- a/database/migration/src/main/resources/changelog.xml +++ b/database/migration/src/main/resources/changelog.xml @@ -66,6 +66,7 @@ +