diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
index b15d7f2ad..de31e99ed 100644
--- a/.idea/inspectionProfiles/Project_Default.xml
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -3,4 +3,4 @@
-
\ No newline at end of file
+
diff --git a/.pullapprove.yml b/.pullapprove.yml
index 8c6155895..c26c94ba2 100644
--- a/.pullapprove.yml
+++ b/.pullapprove.yml
@@ -1,19 +1,28 @@
-approve_by_comment: true
-approve_regex: ':\+1:'
-reset_on_push: false
-author_approval: ignored
-reviewers:
+# enabling version 2 turns github reviews on by default
+version: 2
+group_defaults:
+ approve_by_comment:
+ enabled: true
+ approve_regex: ':\+1:'
+ reset_on_push:
+ enabled: false
+groups:
+ reviewers:
required: 2
- members:
+ github_reviews:
+ enabled: true
+ author_approval:
+ ignored: true
+ users:
- Horneth
- cjllanwarne
- francares
- gauravs90
- geoffjentry
- - jainh
- jsotobroad
- katevoss
- kcibul
- kshakir
- mcovarr
- ruchim
+ - danbills
diff --git a/.travis.yml b/.travis.yml
index 1be54097a..7b8dfaa2f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,8 +1,10 @@
sudo: required
dist: trusty
+services:
+ - docker
language: scala
scala:
- - 2.11.8
+ - 2.12.2
jdk:
- oraclejdk8
cache:
@@ -16,10 +18,12 @@ before_cache:
- find $HOME/.ivy2 -name "ivydata-*.properties" -delete
- find $HOME/.sbt -name "*.lock" -delete
before_install:
- - openssl aes-256-cbc -K "$encrypted_5ebd3ff04788_key" -iv "$encrypted_5ebd3ff04788_iv" -in src/bin/travis/resources/jesConf.tar.enc -out jesConf.tar -d || true
+ # https://github.com/travis-ci/travis-ci/issues/7940#issuecomment-310759657
+ - sudo rm -f /etc/boto.cfg
env:
global:
- CENTAUR_BRANCH=develop
+ - INTEGRATION_TESTS_DIR=src/main/resources/integrationTestCases
matrix:
# Setting this variable twice will cause the 'script' section to run twice with the respective env var invoked
- BUILD_TYPE=sbt
@@ -36,3 +40,12 @@ deploy:
script: src/bin/travis/publishRelease.sh
on:
tags: true
+notifications:
+ slack:
+ rooms:
+ - secure: B5KYcnhk/ujAUWlHsjzP7ROLm6MtYhaGikdYf6JYINovhMbVKnZCTlZEy7rqT3L2T5uJ25iefD500VQGk1Gn7puQ1sNq50wqjzQaj20PWEiBwoWalcV/nKBcQx1TyFT13LJv8fbFnVPxFCkC3YXoHedx8qAhDs8GH/tT5J8XOC8=
+ template:
+ - "Build <%{build_url}|#%{build_number}> (<%{compare_url}|%{commit}>) of %{repository}@%{branch} by %{author} %{result} in %{duration}"
+ on_success: change
+ on_failure: change
+ on_pull_requests: false
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2174c82f1..794a378b3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,246 @@
# Cromwell Change Log
+## 29
+
+### Breaking Changes
+
+* Request timeouts for HTTP requests on the REST API now return a 503 status code instead of 500. The response for a request timeout is no longer in JSON format.
+* The metadata endpoint no longer returns gzipped responses by default. This now needs to be explicitly requested with an `Accept-Encoding: gzip` header
+
+* Command line usage has been extensively revised for Cromwell 29. Please see the
+[README](https://github.com/broadinstitute/cromwell#command-line-usage) for details.
+
+* The engine endpoints are now served under `/engine`. Previousely engine endpoints were available under
+`/api/engine`. Workflow endpoints are still served under `/api/workflows`. The setting `api.routeUnwrapped` has been
+retired at the same time.
+
+* The response format of the [callcaching/diff](https://github.com/broadinstitute/cromwell#get-apiworkflowsversioncallcachingdiff) endpoint has been updated.
+
+### Cromwell Server
+
+* Cromwell now attempts to gracefully shutdown when running in server mode and receiving a `SIGINT` (`Ctrl-C`) or `SIGTERM` (`kill`) signal. This includes waiting for all pending Database writes before exiting.
+A detailed explanation and information about how to configure this feature can be found in the [Cromwell Wiki](https://github.com/broadinstitute/cromwell/wiki/DevZone#graceful-server-shutdown).
+
+## 28
+
+### Bug Fixes
+
+#### WDL write_* functions add a final newline
+
+The following WDL functions now add a newline after the final line of output (the previous behavior of not adding this
+newline was inadvertent):
+- `write_lines`
+- `write_map`
+- `write_object`
+- `write_objects`
+- `write_tsv`
+
+For example:
+
+```
+task writer {
+ Array[String] a = ["foo", "bar"]
+ command {
+ # used to output: "foo\nbar"
+ # now outputs: "foo\nbar\n"
+ cat write_lines(a)
+ }
+}
+```
+
+#### `ContinueWhilePossible`
+
+A workflow utilizing the WorkflowFailureMode Workflow Option `ContinueWhilePossible` will now successfully reach a terminal state once all runnable jobs have completed.
+#### `FailOnStderr`
+When `FailOnStderr` is set to false, Cromwell no longer checks for the existence of a stderr file for that task.
+
+### WDL Functions
+
+#### New functions: floor, ceil and round:
+
+Enables the `floor`, `ceil` and `round` functions in WDL to convert floating point numbers to integers.
+
+For example we can now use the size of an input file to influence the amount of memory the task is given. In the example below a 500MB input file will result in a request for a VM with 2GB of memory:
+
+```
+task foo {
+ File in_file
+ command { ... }
+ runtime {
+ docker: "..."
+ memory: ceil(size(in_file)) * 4
+ }
+}
+```
+
+### Call Caching
+
+* Hash values calculated by Cromwell for a call when call caching is enabled are now published to the metadata.
+It is published even if the call failed. However if the call is attempted multiple times (because it has been preempted for example),
+since hash values are strictly identical for all attempts, they will only be published in the last attempt section of the metadata for this call.
+If the hashes fail to be calculated, the reason is indicated in a `hashFailures` field in the `callCaching` section of the call metadata.
+*Important*: Hashes are not retroactively published to the metadata. Which means only workflows run on Cromwell 28+ will have hashes in their metadata.
+
+See the [README](https://github.com/broadinstitute/cromwell#get-apiworkflowsversionidmetadata) for an example metadata response.
+
+* New endpoint returning the hash differential for 2 calls.
+
+`GET /api/workflows/:version/callcaching/diff`
+
+See the [README](https://github.com/broadinstitute/cromwell#get-apiworkflowsversioncallcachingdiff) for more details.
+
+### Workflow Submission
+
+* The workflow submission parameters `wdlSource` and `wdlDependencies` have been deprecated in favor of `workflowSource` and
+`workflowDependencies` respectively. The older names are still supported in Cromwell 28 with deprecation warnings but will
+be removed in a future version of Cromwell.
+
+### Labels
+* A new `/labels` endpoint has been added to update labels for an existing workflow. See the [README](README.md#patch-apiworkflowsversionidlabels) for more information.
+* Label formatting requirements have been updated, please check the [README](README.md#label-format) for more detailed documentation.
+
+
+### JES Backend
+
+The JES backend now supports a `filesystems.gcs.caching.duplication-strategy` configuration entry.
+It can be set to specify the desired behavior of Cromwell regarding call outputs when a call finds a hit in the cache.
+The default value is `copy` which will copy all output files to the new call directory.
+A second value is allowed, `reference`, that will instead point to the original output files, without copying them.
+
+
+```hocon
+filesystems {
+ gcs {
+ auth = "application-default"
+
+ caching {
+ duplication-strategy = "reference"
+ }
+ }
+}
+```
+
+A placeholder file will be placed in the execution folder of the cached call to explain the absence of output files and point to the location of the original ones.
+
+
+### Metadata Write Batching
+
+Metadata write batching works the same as in previous versions of Cromwell, but the default batch size has been changed from 1 to 200. It's possible that 200 is too high in some environments, but 200 is more likely to be an appropriate value
+than the previous default.
+
+
+## 27
+
+### Migration
+
+* Call Caching has been improved in this version of Cromwell, specifically the time needed to determine whether or not a job can be cached
+ has drastically decreased. To achieve that the database schema has been modified and a migration is required in order to preserve the pre-existing cached jobs.
+ This migration is relatively fast compared to previous migrations. To get an idea of the time needed, look at the size of your `CALL_CACHING_HASH_ENTRY` table.
+ As a benchmark, it takes 1 minute for a table with 6 million rows.
+ The migration will only be executed on MySQL. Other databases will lose their previous cached jobs.
+ In order to run properly on MySQL, **the following flag needs to be adjusted**: https://dev.mysql.com/doc/refman/5.5/en/server-system-variables.html#sysvar_group_concat_max_len
+ The following query will give you a minimum to set the group_concat_max_len value to:
+
+ ```sql
+SELECT MAX(aggregated) as group_concat_max_len FROM
+ (
+ SELECT cche.CALL_CACHING_ENTRY_ID, SUM(LENGTH(CONCAT(cche.HASH_KEY, cche.HASH_VALUE))) AS aggregated
+ FROM CALL_CACHING_HASH_ENTRY cche
+ GROUP BY cche.CALL_CACHING_ENTRY_ID
+ ) aggregation
+ ```
+
+ Here is the SQL command to run to set the group_concat_max_len flag to the proper value:
+
+ ```sql
+SET GLOBAL group_concat_max_len = value
+ ```
+
+ Where `value` is replaced with the value you want to set it to.
+
+ Note that the migration will fail if the flag is not set properly.
+
+### Breaking Changes
+
+* The update to Slick 3.2 requires a database stanza to
+[switch](http://slick.lightbend.com/doc/3.2.0/upgrade.html#profiles-vs-drivers) from using `driver` to `profile`.
+
+```hocon
+database {
+ #driver = "slick.driver.MySQLDriver$" #old
+ profile = "slick.jdbc.MySQLProfile$" #new
+ db {
+ driver = "com.mysql.jdbc.Driver"
+ url = "jdbc:mysql://host/cromwell?rewriteBatchedStatements=true"
+ user = "user"
+ password = "pass"
+ connectionTimeout = 5000
+ }
+}
+```
+
+### Call Caching
+
+Cromwell now supports call caching with floating Docker tags (e.g. `docker: "ubuntu:latest"`). Note it is still considered
+a best practice to specify Docker images as hashes where possible, especially for production usages.
+
+Within a single workflow Cromwell will attempt to resolve all floating tags to the same Docker hash, even if Cromwell is restarted
+during the execution of a workflow. In call metadata the `docker` runtime attribute is now the same as the
+value that actually appeared in the WDL:
+
+```
+ "runtimeAttributes": {
+ "docker": "ubuntu:latest",
+ "failOnStderr": "false",
+ "continueOnReturnCode": "0"
+ }
+```
+
+Previous versions of Cromwell rewrote the `docker` value to the hash of the Docker image.
+
+There is a new call-level metadata value `dockerImageUsed` which captures the hash of the Docker image actually used to
+run the call:
+
+```
+ "dockerImageUsed": "library/ubuntu@sha256:382452f82a8bbd34443b2c727650af46aced0f94a44463c62a9848133ecb1aa8"
+```
+
+### Docker
+
+* The Docker section of the configuration has been slightly reworked
+An option to specify how a Docker hash should be looked up has been added. Two methods are available.
+ "local" will try to look for the image on the machine where cromwell is running. If it can't be found, Cromwell will try to `pull` the image and use the hash from the retrieved image.
+ "remote" will try to look up the image hash directly on the remote repository where the image is located (Docker Hub and GCR are supported)
+Note that the "local" option will require docker to be installed on the machine running cromwell, in order for it to call the docker CLI.
+* Adds hash lookup support for public [quay.io](https://quay.io/) images.
+
+### WDL Feature Support
+* Added support for the new WDL `basename` function. Allows WDL authors to get just the file name from a File (i.e. removing the directory path)
+* Allows coercion of `Map` objects into `Array`s of `Pair`s. This also allows WDL authors to directly scatter over WDL `Map`s.
+
+### Miscellaneous
+* Adds support for JSON file format for google service account credentials. As of Cromwell 27, PEM credentials for PAPI are deprecated and support might be removed in a future version.
+
+```
+google {
+
+ application-name = "cromwell"
+
+ auths = [
+ {
+ name = "service-account"
+ scheme = "service_account"
+ json-file = "/path/to/file.json"
+ }
+ ]
+}
+```
+
+### General Changes
+
+* The `/query` endpoint now supports querying by `label`. See the [README](README.md#get-apiworkflowsversionquery) for more information.
+* The `read_X` standard library functions limit accepted filesizes. These differ by type, e.g. read_bool has a smaller limit than read_string. See reference.conf for default settings.
+
## 26
### Breaking Changes
@@ -43,17 +284,6 @@ system.io {
}
```
-* Added a `script-epilogue` configuration option to adjust the logic that runs at the end of the scripts which wrap call executions.
- This option is adjustable on a per-backend basis. If unspecified, the default value is `sync`.
-
-### WDL Features
-
-With Cromwell 26, Cromwell will support `if x then y else z` expressions (see: https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#if-then-else). For example:
-```
-Boolean b = true
-String s = if b then "value if True" else "value if False"
-```
-
## 25
### External Contributors
diff --git a/NOTICE b/NOTICE
deleted file mode 100644
index 15d3c2b54..000000000
--- a/NOTICE
+++ /dev/null
@@ -1,4 +0,0 @@
-cromwell.webservice/PerRequest.scala (https://github.com/NET-A-PORTER/spray-actor-per-request)
-is distributed with this software under the Apache License, Version 2.0 (see the LICENSE-ASL file). In accordance
-with that license, that software comes with the following notices:
- Copyright (C) 2011-2012 Ian Forsey
diff --git a/README.md b/README.md
index 989a6c609..97040254e 100644
--- a/README.md
+++ b/README.md
@@ -18,9 +18,6 @@ A [Workflow Management System](https://en.wikipedia.org/wiki/Workflow_management
* [Installing](#installing)
* [Upgrading from 0.19 to 0.21](#upgrading-from-019-to-021)
* [Command Line Usage](#command-line-usage)
- * [run](#run)
- * [server](#server)
- * [version](#version)
* [Getting Started with WDL](#getting-started-with-wdl)
* [WDL Support](#wdl-support)
* [Configuring Cromwell](#configuring-cromwell)
@@ -88,6 +85,7 @@ A [Workflow Management System](https://en.wikipedia.org/wiki/Workflow_management
* [POST /api/workflows/:version/batch](#post-apiworkflowsversionbatch)
* [GET /api/workflows/:version/query](#get-apiworkflowsversionquery)
* [POST /api/workflows/:version/query](#post-apiworkflowsversionquery)
+ * [PATCH /api/workflows/:version/:id/labels](#patch-apiworkflowsversionidlabels)
* [GET /api/workflows/:version/:id/status](#get-apiworkflowsversionidstatus)
* [GET /api/workflows/:version/:id/outputs](#get-apiworkflowsversionidoutputs)
* [GET /api/workflows/:version/:id/timing](#get-apiworkflowsversionidtiming)
@@ -95,8 +93,9 @@ A [Workflow Management System](https://en.wikipedia.org/wiki/Workflow_management
* [GET /api/workflows/:version/:id/metadata](#get-apiworkflowsversionidmetadata)
* [POST /api/workflows/:version/:id/abort](#post-apiworkflowsversionidabort)
* [GET /api/workflows/:version/backends](#get-apiworkflowsversionbackends)
- * [GET /api/engine/:version/stats](#get-apiengineversionstats)
- * [GET /api/engine/:version/version](#get-apiengineversionversion)
+ * [GET /api/workflows/:version/callcaching/diff](#get-apiworkflowsversioncallcachingdiff)
+ * [GET /engine/:version/stats](#get-engineversionstats)
+ * [GET /engine/:version/version](#get-engineversionversion)
* [Error handling](#error-handling)
* [Developer](#developer)
* [Generating table of contents on Markdown files](#generating-table-of-contents-on-markdown-files)
@@ -121,13 +120,13 @@ There is a [Cromwell gitter channel](https://gitter.im/broadinstitute/cromwell)
The following is the toolchain used for development of Cromwell. Other versions may work, but these are recommended.
-* [Scala 2.11.8](http://www.scala-lang.org/news/2.11.8/)
+* [Scala 2.12.2](http://www.scala-lang.org/news/2.12.1#scala-212-notes)
* [SBT 0.13.12](https://github.com/sbt/sbt/releases/tag/v0.13.12)
* [Java 8](http://www.oracle.com/technetwork/java/javase/overview/java8-2100321.html)
# Building
-`sbt assembly` will build a runnable JAR in `target/scala-2.11/`
+`sbt assembly` will build a runnable JAR in `target/scala-2.12/`
Tests are run via `sbt test`. Note that the tests do require Docker to be running. To test this out while downloading the Ubuntu image that is required for tests, run `docker pull ubuntu:latest` prior to running `sbt test`
@@ -141,170 +140,203 @@ See the [migration document](MIGRATION.md) for more details.
# Command Line Usage
-Run the JAR file with no arguments to get the usage message:
+For built-in documentation of Cromwell command line usage, run the Cromwell JAR file with no arguments:
```
+$ java -jar cromwell-.jar
+```
+For example, `$ java -jar cromwell-29.jar`. You will get a usage message like the following:
-$ java -jar cromwell.jar
-java -jar cromwell.jar
-
-Actions:
-run [] []
- [] [] []
+```
+cromwell 29
+Usage: java -jar /path/to/cromwell.jar [server|run] [options] ...
- Given a WDL file and JSON file containing the value of the
- workflow inputs, this will run the workflow locally and
- print out the outputs in JSON format. The workflow
- options file specifies some runtime configuration for the
- workflow (see README for details). The workflow metadata
- output is an optional file path to output the metadata. The
- directory of WDL files is optional. However, it is required
- if the primary workflow imports workflows that are outside
- of the root directory of the Cromwell project.
+ --help Cromwell - Workflow Execution Engine
+ --version
+Command: server
+Starts a web server on port 8000. See the web server documentation for more details about the API endpoints.
+Command: run [options] workflow-source
+Run the workflow and print out the outputs in JSON format.
+ workflow-source Workflow source file.
+ -i, --inputs Workflow inputs file.
+ -o, --options Workflow options file.
+ -t, --type Workflow type.
+ -v, --type-version
+ Workflow type version.
+ -l, --labels Workflow labels file.
+ -p, --imports A directory or zipfile to search for workflow imports.
+ -m, --metadata-output
+ An optional directory path to output metadata.
+```
- Use a single dash ("-") to skip optional files. Ex:
- run noinputs.wdl - - metadata.json -
+## --version
- server
+The `--version` option prints the version of Cromwell and exits.
- Starts a web server on port 8000. See the web server
- documentation for more details about the API endpoints.
+## --help
- -version
+The `--help` option prints the full help text above and exits.
- Returns the version of the Cromwell engine.
+## server
-```
+The `server` command runs Cromwell as a web server. No arguments are accepted.
+See the documentation for Cromwell's REST endpoints [here](#rest-api).
## run
-Given a WDL file and a JSON inputs file (see `inputs` subcommand), Run the workflow and print the outputs:
-
-```
-$ java -jar cromwell.jar run 3step.wdl inputs.json
-... play-by-play output ...
-{
- "three_step.ps.procs": "/var/folders/kg/c7vgxnn902lc3qvc2z2g81s89xhzdz/T/stdout1272284837004786003.tmp",
- "three_step.cgrep.count": 0,
- "three_step.wc.count": 13
-}
-```
-
-The JSON inputs can be left off if there's a file with the same name as the WDL file but with a `.inputs` extension. For example, this will assume that `3step.inputs` exists:
+The `run` command executes a single workflow in Cromwell.
-```
-$ java -jar cromwell.jar run 3step.wdl
-```
+### workflow-source
+The `run` command requires a single argument for the workflow source file.
+
+### --inputs
+An optional file of workflow inputs. Although optional, it is a best practice to use an inputs file to satisfy workflow
+requirements rather than hardcoding inputs directly into a workflow source file.
-If your workflow has no inputs, you can specify `-` as the value for the inputs parameter:
+### --options
+An optional file of workflow options. Some options are global (supported by all backends), while others are backend-specific.
+See the [workflow options](#workflow-options) documentation for more details.
-```
-$ java -jar cromwell.jar run my_workflow.wdl -
-```
+### --type
+An optional parameter to specify the language for the workflow source. Any value specified for this parameter is currently
+ignored and internally the value `WDL` is used.
-The third, optional parameter to the 'run' subcommand is a JSON file of workflow options. By default, the command line will look for a file with the same name as the WDL file but with the extension `.options`. But one can also specify a value of `-` manually to specify that there are no workflow options.
+### --type-version
+An optional parameter to specify the version of the language for the workflow source. Currently any specified value is ignored.
-See the section [workflow options](#workflow-options) for more details.
+### --labels
+An optional parameter to specify a file of JSON key-value label pairs to associate with the workflow.
-```
-$ java -jar cromwell.jar run my_jes_wf.wdl my_jes_wf.json wf_options.json
-```
+### --imports
+You have the option of importing WDL workflows or tasks to use within your workflow, known as sub-workflows.
+If you use sub-workflows within your primary workflow then you must include a zip file with the WDL import files.
-The fourth, optional parameter to the 'run' subcommand is a path where the workflow metadata will be written. By default, no workflow metadata will be written.
+For example, say you have a directory of WDL files:
```
-$ java -jar cromwell.jar run my_wf.wdl - - my_wf.metadata.json
-... play-by-play output ...
-$ cat my_wf.metadata.json
-{
- "workflowName": "w",
- "calls": {
- "w.x": [{
- "executionStatus": "Done",
- "stdout": "/Users/jdoe/projects/cromwell/cromwell-executions/w/a349534f-137b-4809-9425-1893ac272084/call-x/stdout",
- "shardIndex": -1,
- "outputs": {
- "o": "local\nremote"
- },
- "runtimeAttributes": {
- "failOnStderr": "false",
- "continueOnReturnCode": "0"
- },
- "cache": {
- "allowResultReuse": true
- },
- "inputs": {
- "remote": "/Users/jdoe/remote.txt",
- "local": "local.txt"
- },
- "returnCode": 0,
- "backend": "Local",
- "end": "2016-07-11T10:27:56.074-04:00",
- "stderr": "/Users/jdoe/projects/cromwell/cromwell-executions/w/a349534f-137b-4809-9425-1893ac272084/call-x/stderr",
- "callRoot": "cromwell-executions/w/a349534f-137b-4809-9425-1893ac272084/call-x",
- "attempt": 1,
- "start": "2016-07-11T10:27:55.992-04:00"
- }]
- },
- "outputs": {
- "w.x.o": "local\nremote"
- },
- "workflowRoot": "cromwell-executions/w/a349534f-137b-4809-9425-1893ac272084",
- "id": "a349534f-137b-4809-9425-1893ac272084",
- "inputs": {
- "w.x.remote": "/Users/jdoe/remote.txt",
- "w.x.local": "local.txt"
- },
- "submission": "2016-07-11T10:27:54.907-04:00",
- "status": "Succeeded",
- "end": "2016-07-11T10:27:56.108-04:00",
- "start": "2016-07-11T10:27:54.919-04:00"
-}
-```
-
-The fifth, optional parameter to the 'run' subcommand is a zip file which contains WDL source files. This zip file can be passed
-and your primary workflow can import any WDL's from that collection and re-use those tasks.
-
-For example, consider you have a directory of WDL files:
-```
-my_WDLs
+wdl_library
└──cgrep.wdl
└──ps.wdl
└──wc.wdl
```
-If you zip that directory to my_WDLs.zip, you have the option to pass it in as the last parameter in your run command
-and be able to reference these WDLs as imports in your primary WDL. For example, your primary WDL can look like this:
+If you zip that directory into `wdl_library.zip`, then you can reference and use these WDLs within your primary WDL.
+
+This could be your primary WDL:
+
```
import "ps.wdl" as ps
import "cgrep.wdl"
import "wc.wdl" as wordCount
-workflow threestep {
+workflow my_wf {
call ps.ps as getStatus
call cgrep.cgrep { input: str = getStatus.x }
call wordCount { input: str = ... }
}
-
```
-The command to run this WDL, without needing any inputs, workflow options or metadata files would look like:
-```
-$ java -jar cromwell.jar run threestep.wdl - - - /path/to/my_WDLs.zip
-```
+Then to run this WDL without any inputs, workflow options, or metadata files, you would enter:
-The sixth optional parameter is a path to a labels file. See [Labels](#labels) for information and the expected format.
+`$ java -jar cromwell-.jar run my_wf.wdl --imports /path/to/wdl_library.zip`
-## server
+### --metadata-output
-Start a server on port 8000, the API for the server is described in the [REST API](#rest-api) section.
+You can include a path where Cromwell will write the workflow metadata JSON, such as start/end timestamps, status, inputs, and outputs. By default, Cromwell does not write workflow metadata.
-## version
+This example includes a metadata path called `/path/to/my_wf.metadata`:
-Returns the version of Cromwell engine.
+```
+$ java -jar cromwell-.jar run my_wf.wdl --metadata-output /path/to/my_wf.metadata
+```
+
+Again, Cromwell is very verbose. Here is the metadata output in my_wf.metadata:
+
+```
+{
+ "workflowName": "my_wf",
+ "submittedFiles": {
+ "inputs": "{\"my_wf.hello.addressee\":\"m'Lord\"}",
+ "workflow": "\ntask hello {\n String addressee\n command {\n echo \"Hello ${addressee}!\"\n }\n output {\n String salutation = read_string(stdout())\n }\n runtime {\n
+\n }\n}\n\nworkflow my_wf {\n call hello\n output {\n hello.salutation\n }\n}\n",
+ "options": "{\n\n}"
+ },
+ "calls": {
+ "my_wf.hello": [
+ {
+ "executionStatus": "Done",
+ "stdout": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068/call-hello/execution/stdout",
+ "backendStatus": "Done",
+ "shardIndex": -1,
+ "outputs": {
+ "salutation": "Hello m'Lord!"
+ },
+ "runtimeAttributes": {
+ "continueOnReturnCode": "0",
+ "failOnStderr": "false"
+ },
+ "callCaching": {
+ "allowResultReuse": false,
+ "effectiveCallCachingMode": "CallCachingOff"
+ },
+ "inputs": {
+ "addressee": "m'Lord"
+ },
+ "returnCode": 0,
+ "jobId": "28955",
+ "backend": "Local",
+ "end": "2017-04-19T10:53:25.045-04:00",
+ "stderr": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068/call-hello/execution/stderr",
+ "callRoot": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068/call-hello",
+ "attempt": 1,
+ "executionEvents": [
+ {
+ "startTime": "2017-04-19T10:53:23.570-04:00",
+ "description": "PreparingJob",
+ "endTime": "2017-04-19T10:53:23.573-04:00"
+ },
+ {
+ "startTime": "2017-04-19T10:53:23.569-04:00",
+ "description": "Pending",
+ "endTime": "2017-04-19T10:53:23.570-04:00"
+ },
+ {
+ "startTime": "2017-04-19T10:53:25.040-04:00",
+ "description": "UpdatingJobStore",
+ "endTime": "2017-04-19T10:53:25.045-04:00"
+ },
+ {
+ "startTime": "2017-04-19T10:53:23.570-04:00",
+ "description": "RequestingExecutionToken",
+ "endTime": "2017-04-19T10:53:23.570-04:00"
+ },
+ {
+ "startTime": "2017-04-19T10:53:23.573-04:00",
+ "description": "RunningJob",
+ "endTime": "2017-04-19T10:53:25.040-04:00"
+ }
+ ],
+ "start": "2017-04-19T10:53:23.569-04:00"
+ }
+ ]
+ },
+ "outputs": {
+ "my_wf.hello.salutation": "Hello m'Lord!"
+ },
+ "workflowRoot": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068",
+ "id": "cd0fe94a-984e-4a19-ab4c-8f7f07038068",
+ "inputs": {
+ "my_wf.hello.addressee": "m'Lord"
+ },
+ "submission": "2017-04-19T10:53:19.565-04:00",
+ "status": "Succeeded",
+ "end": "2017-04-19T10:53:25.063-04:00",
+ "start": "2017-04-19T10:53:23.535-04:00"
+}
+```
# Getting Started with WDL
@@ -368,14 +400,23 @@ For many examples on how to use WDL see [the WDL site](https://github.com/broadi
* [File stdout()](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#file-stdout)
* [File stderr()](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#file-stderr)
* [Array\[String\] read_lines(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arraystring-read_linesstringfile)
+ * File reads are limited to 128 KB. Configurable via conf file.
* [Array\[Array\[String\]\] read_tsv(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arrayarraystring-read_tsvstringfile)
+ * File reads are limited to 128 KB. Configurable via conf file.
* [Map\[String, String\] read_map(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#mapstring-string-read_mapstringfile)
+ * File reads are limited to 128 KB. Configurable via conf file.
* [Object read_object(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#object-read_objectstringfile)
+ * File reads are limited to 128 KB. Configurable via conf file.
* [Array\[Object\] read_objects(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arrayobject-read_objectsstringfile)
+ * File reads are limited to 128 KB. Configurable via conf file.
* [Int read_int(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#int-read_intstringfile)
+ * File reads are limited to 19 B. Configurable via conf file.
* [String read_string(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#string-read_stringstringfile)
+ * File reads are limited to 128 KB. Configurable via conf file.
* [Float read_float(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#float-read_floatstringfile)
+ * File reads are limited to 50 B. Configurable via conf file.
* [Boolean read_boolean(String|File)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#boolean-read_booleanstringfile)
+ * File reads are limited to 7 B. Configurable via conf file.
* [File write_lines(Array\[String\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#file-write_linesarraystring)
* [File write_tsv(Array\[Array\[String\]\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#file-write_tsvarrayarraystring)
* [File write_map(Map\[String, String\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#file-write_mapmapstring-string)
@@ -385,8 +426,8 @@ For many examples on how to use WDL see [the WDL site](https://github.com/broadi
* [String sub(String, String, String)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#string-substring-string-string)
* [Array\[Int\] range(Int)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arrayint-rangeint)
* [Array\[Array\[X\]\] transpose(Array\[Array\[X\]\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arrayarrayx-transposearrayarrayx)
- * [Pair(X,Y) zip(X,Y)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#pairxy-zipxy)
- * [Pair(X,Y) cross(X,Y)](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#pairxy-crossxy)
+ * [Array\[Pair\[X,Y\]\] zip(Array\[X\], Array\[Y\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arraypairxy-ziparrayx-arrayy)
+ * [Array\[Pair\[X,Y\]\] cross(Array\[X\], Array\[Y\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arraypairxy-crossarrayx-arrayy)
* [Integer length(Array\[X\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#integer-lengtharrayx)
* [Array\[String\] prefix(String, Array\[X\])](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#arraystring-prefixstring-arrayx)
* [Data Types & Serialization](https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#data-types--serialization)
@@ -497,8 +538,7 @@ Then, edit the configuration file `database` stanza, as follows:
```
database {
-
- driver = "slick.driver.MySQLDriver$"
+ profile = "slick.jdbc.MySQLProfile$"
db {
driver = "com.mysql.jdbc.Driver"
url = "jdbc:mysql://host/cromwell?rewriteBatchedStatements=true"
@@ -506,10 +546,6 @@ database {
password = "pass"
connectionTimeout = 5000
}
-
- test {
- ...
- }
}
```
@@ -527,7 +563,7 @@ system {
Or, via `-Dsystem.abort-jobs-on-terminate=true` command line option.
-By default, this value is false when running `java -jar cromwell.jar server`, and true when running `java -jar cromwell.jar run `.
+By default, this value is false when running `java -jar cromwell.jar server`, and true when running `java -jar cromwell.jar run `.
# Security
@@ -643,7 +679,7 @@ When Cromwell runs a workflow, it first creates a directory `//call-`. This is the ``. For example, having a `stdout` and `stderr` file is common among both backends and they both write a shell script file to the `` as well. See the descriptions below for details about backend-specific files that are written to these directories.
-An example of a workflow output directory for a three-step WDL file might look like this:
+An example of a workflow output directory for a three-step workflow might look like this:
```
cromwell-executions/
@@ -1007,7 +1043,7 @@ backend {
TES {
actor-factory = "cromwell.backend.impl.tes.TesBackendLifecycleActorFactory"
config {
- endpoint = "https:///v1/jobs"
+ endpoint = "https:///v1/tasks"
root = "cromwell-executions"
dockerRoot = "/cromwell-executions"
concurrent-job-limit = 1000
@@ -1025,16 +1061,13 @@ This backend supports the following optional runtime attributes / workflow optio
* docker: Docker image to use such as "Ubuntu".
* dockerWorkingDir: defines the working directory in the container.
-Outputs:
-It will use `dockerOutputDir` runtime attribute / workflow option to resolve the folder in which the execution results will placed. If there is no `dockerWorkingDir` defined it will use `/cromwell-executions//call-/execution`.
-
### CPU, Memory and Disk
This backend supports CPU, memory and disk size configuration through the use of the following runtime attributes / workflow options:
-* cpu: defines the amount of CPU to use. Default value: 1. Type: Integer. Ex: 4.
-* memory: defines the amount of memory to use. Default value: "2 GB". Type: String. Ex: "4 GB" or "4096 MB"
-* disk: defines the amount of disk to use. Default value: "2 GB". Type: String. Ex: "1 GB" or "1024 MB"
+* cpu: defines the amount of CPU to use. Type: Integer. Ex: 4.
+* memory: defines the amount of memory to use. Type: String. Ex: "4 GB" or "4096 MB"
+* disk: defines the amount of disk to use. Type: String. Ex: "1 GB" or "1024 MB"
-It they are not set, the TES backend will use default values.
+If they are not set, the TES backend may use default values.
## Sun GridEngine Backend
@@ -1267,78 +1300,117 @@ The `job-id-regex` should contain one capture group while matching against the w
Allows to execute jobs using HTCondor which is a specialized workload management system for compute-intensive jobs created by the Center for High Throughput Computing in the Department of Computer Sciences at the University of Wisconsin-Madison (UW-Madison).
-This backend creates six files in the `` (see previous section):
+The backend is specified via the actor factory `ConfigBackendLifecycleActorFactory`:
-* `script` - A shell script of the job to be run. This contains the user's command from the `command` section of the WDL code.
-* `stdout` - The standard output of the process
-* `stderr` - The standard error of the process
-* `submitfile` - A submit file that HtCondor understands in order to submit a job
-* `submitfile.stdout` - The standard output of the submit file
-* `submitfile.stderr` - The standard error of the submit file
+```
+backend {
+ providers {
+ HtCondor {
+ config {
+ actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
+ # ... other configuration
+ }
+ }
+ }
+}
+```
-The `script` file contains:
+This backend makes the same assumption about the filesystem that the local backend does: the Cromwell process and the jobs both have read/write access to the CWD of the job.
+
+The CWD will contain a `script.sh` file which will contain the same contents as the Local backend:
```
+#!/bin/sh
cd
echo $? > rc
```
-The `submitfile` file contains:
+The job is launched with a configurable script command such as:
```
-executable=cromwell-executions/test/e950e07d-4132-4fe0-8d86-ab6925dd94ad/call-merge_files/script
-output=cromwell-executions/test/e950e07d-4132-4fe0-8d86-ab6925dd94ad/call-merge_files/stdout
-error=cromwell-executions/test/e950e07d-4132-4fe0-8d86-ab6925dd94ad/call-merge_files/stderr
-log=cromwell-executions/test/e950e07d-4132-4fe0-8d86-ab6925dd94ad/call-merge_files/merge_files.log
+chmod 755 ${script}
+cat > ${cwd}/execution/submitFile < rc`, the backend will wait for the existence of this file, parse out the return code and determine success or failure and then subsequently post-process.
-```
+The command used to submit the job is specified under the configuration key `backend.providers.HtCondor.config.submit`. It uses the same syntax as a command in WDL, and will be provided the variables:
-* provider: it defines the provider to use based on CacheActorFactory and CacheActor interfaces.
-* enabled: enables or disables cache.
-* forceRewrite: it allows to invalidate the cache entry and store result again.
-* db section: configuration related to MongoDB provider. It may not exist for other implementations.
+* `script` - A shell script of the job to be run. This contains the user's command from the `command` section of the WDL code.
+* `cwd` - The path where the script should be run.
+* `out` - The path to the stdout.
+* `err` - The path to the stderr.
+* `job_name` - A unique name for the job.
-### Docker
-This backend supports the following optional runtime attributes / workflow options for working with Docker:
-* docker: Docker image to use such as "Ubuntu".
-* dockerWorkingDir: defines the working directory in the container.
-* dockerOutputDir: defiles the output directory in the container when there is the need to define a volume for outputs within the container. By default if this attribute is not set, dockerOutputDir will be the job working directory.
+This backend also supports docker as optional feature. Configuration key `backend.providers.HtCondor.config.submit-docker` is specified for this end. When the WDL contains a docker runtime attribute, this command will be provided with two additional variables:
-Inputs:
-HtCondor backend analyzes all inputs and do a distinct of the folders in order to mount input folders into the container.
+* `docker` - The docker image name.
+* `docker_cwd` - The path where `cwd` should be mounted within the docker container.
+
+```
+chmod 755 ${script}
+cat > ${cwd}/execution/dockerScript < ${cwd}/execution/submitFile <= 64"]
+ cpu = 2
+ memory = "1GB"
+ disk = "1GB"
+ nativeSpecs: "TARGET.Arch == \"INTEL\" && TARGET.Memory >= 64"
}
```
-nativeSpecs attribute needs to be specified as an array of strings to work.
+nativeSpecs attribute needs to be specified as String.
## Spark Backend
-This backend adds support for execution of spark jobs in a workflow using the existing wdl format.
+This backend adds support for execution of spark jobs in a workflow.
It supports the following Spark deploy modes:
@@ -1428,7 +1503,7 @@ Supported runtime attributes for a Spark Job is as follows:
* appMainClass ( Spark app/job entry point)
* numberOfExecutors ( Specific to cluster deploy mode)
-Sample usage :
+Sample usage:
```wdl
task sparkjob_with_yarn_cluster {
@@ -1454,8 +1529,8 @@ Supported File Systems as follows:
* Network File System
* Distributed file system
-### Sample Wdl
-Next, create a Wdl, and it's json input like so:
+### Sample WDL
+Next, create a WDL, and its json input like so:
```wdl
task sparkjob_with_yarn_cluster {
@@ -1809,16 +1884,15 @@ Valid keys and their meanings:
* **google_project** - (JES backend only) Specifies which google project to execute this workflow.
* **refresh_token** - (JES backend only) Only used if `localizeWithRefreshToken` is specified in the [configuration file](#configuring-cromwell).
* **auth_bucket** - (JES backend only) defaults to the the value in **jes_gcs_root**. This should represent a GCS URL that only Cromwell can write to. The Cromwell account is determined by the `google.authScheme` (and the corresponding `google.userAuth` and `google.serviceAuth`)
- * **monitoring_script** - (JES backend only) Specifies a GCS URL to a script that will be invoked prior to the WDL command being run. For example, if the value for monitoring_script is "gs://bucket/script.sh", it will be invoked as `./script.sh > monitoring.log &`. The value `monitoring.log` file will be automatically de-localized.
+ * **monitoring_script** - (JES backend only) Specifies a GCS URL to a script that will be invoked prior to the user command being run. For example, if the value for monitoring_script is "gs://bucket/script.sh", it will be invoked as `./script.sh > monitoring.log &`. The value `monitoring.log` file will be automatically de-localized.
# Labels
-Every call in Cromwell is labelled by Cromwell so that it can be queried about later. The current label set automatically applied is:
+Every call run on the JES backend is given certain labels by default, so that Google resources can be queried by these labels later. The current default label set automatically applied is:
| Key | Value | Example | Notes |
|-----|-------|---------|-------|
| cromwell-workflow-id | The Cromwell ID given to the root workflow (i.e. the ID returned by Cromwell on submission) | cromwell-d4b412c5-bf3d-4169-91b0-1b635ce47a26 | To fit the required [format](#label-format), we prefix with 'cromwell-' |
-| cromwell-workflow-name | The name of the root workflow | my-root-workflow | |
| cromwell-sub-workflow-name | The name of this job's sub-workflow | my-sub-workflow | Only present if the task is called in a subworkflow. |
| wdl-task-name | The name of the WDL task | my-task | |
| wdl-call-alias | The alias of the WDL call that created this job | my-task-1 | Only present if the task was called with an alias. |
@@ -1836,10 +1910,15 @@ Custom labels can also be applied to every call in a workflow by specifying a cu
## Label Format
-To fit in with the Google schema for labels, label key and value strings must match the regex `[a-z]([-a-z0-9]*[a-z0-9])?` and be between 1 and 63 characters in length.
-
-For custom labels, Cromwell will reject any request which is made containing invalid label strings. For automatically applied labels, Cromwell will modify workflow/task/call names to fit the schema, according to the following rules:
+When labels are supplied to Cromwell, it will fail any request containing invalid label strings. Below are the requirements for a valid label key/value pair in Cromwell:
+- Label keys and values can't contain characters other than `[a-z]`, `[0-9]` or `-`.
+- Label keys must start with `[a-z]` and end with `[a-z]` or `[0-9]`.
+- Label values must start and end with `[a-z]` or `[0-9]`.
+- Label keys may not be empty but label values may be empty.
+- Label key and values have a max char limit of 63.
+Google has a different schema for labels, where label key and value strings must match the regex `[a-z]([-a-z0-9]*[a-z0-9])?` and be no more than 63 characters in length.
+For automatically applied labels, Cromwell will modify workflow/task/call names to fit the schema, according to the following rules:
- Any capital letters are lowercased.
- Any character which is not one of `[a-z]`, `[0-9]` or `-` will be replaced with `-`.
- If the start character does not match `[a-z]` then prefix with `x--`
@@ -1878,18 +1957,18 @@ Cromwell also accepts two [workflow option](#workflow-options) related to call c
* If call caching is enabled, but one wishes to run a workflow but not add any of the calls into the call cache when they finish, the `write_to_cache` option can be set to `false`. This value defaults to `true`.
* If call caching is enabled, but you don't want to check the cache for any `call` invocations, set the option `read_from_cache` to `false`. This value also defaults to `true`
-> **Note:** If call caching is disabled, the to workflow options `read_from_cache` and `write_to_cache` will be ignored and the options will be treated as though they were 'false'.
+> **Note:** If call caching is disabled, the workflow options `read_from_cache` and `write_to_cache` will be ignored and the options will be treated as though they were 'false'.
## Docker Tags
Docker tags are a convenient way to point to a version of an image (ubuntu:14.04), or even the latest version (ubuntu:latest).
For that purpose, tags are mutable, meaning that the image they point to can change, while the tag name stays the same.
-While this is very convenient in some cases, using mutable, or "floating" tags in WDL affects the reproducibility of the WDL file: the same WDL using "ubuntu:latest" run now, and a year, or even a month from now will actually run with different docker images.
+While this is very convenient in some cases, using mutable, or "floating" tags in tasks affects the reproducibility of a workflow: the same workflow using "ubuntu:latest" run now, and a year, or even a month from now will actually run with different docker images.
This has an even bigger impact when Call Caching is turned on in Cromwell, and could lead to unpredictable behaviors if a tag is updated in the middle of a workflow or even a scatter for example.
Docker provides another way of identifying an image version, using the specific digest of the image. The digest is guaranteed to be different if 2 images have different byte content. For more information see https://docs.docker.com/registry/spec/api/#/content-digests
A docker image with digest can be referenced as follows : **ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950**
The above image refers to a specific image of ubuntu, that does not depend on a floating tag.
-A WDL containing this Docker image run now and a year from now will run in the exact same container.
+A workflow containing this Docker image run now and a year from now will run in the exact same container.
In order to remove unpredictable behaviors, Cromwell takes the following approach regarding floating docker tags.
@@ -1899,22 +1978,33 @@ When Cromwell finds a job ready to be run, it will first look at its docker runt
* The job does specify a docker runtime attribute:
* The docker image uses a hash: All call caching settings apply normally
* The docker image uses a floating tag:
- Call caching `reading` will be disabled for this job. Specifically, Cromwell will *not* attempt to find an entry in the cache for this job.
- Additionally, cromwell will attempt to look up the hash of the image. Upon success, it will replace the user's docker value with the hash.
- This mechanism ensures that as long as Cromwell is able to lookup the hash, the job is guaranteed to have run on the container with that hash.
- The docker value with the hash used for the job will be reported in the runtime attributes section of the metadata.
- If Cromwell fails to lookup the hash (unsupported registry, wrong credentials, ...) it will run the job with the user provided floating tag.
- If call caching writing is turned on, Cromwell will still write the job in the cache database, using:
- * the hash if the lookup succeeded
- * the floating tag otherwise.
-
-Docker registry and access levels supported by Cromwell for docker hash lookup:
-
-| | DockerHub || GCR ||
-|:-----:|:---------:|:-------:|:------:|:-------:|
-| | Public | Private | Public | Private |
-| JES | X | X | X | X |
-| Other | X | | X | |
+ * Cromwell will attempt to look up the hash of the image. Upon success it will pass both the floating tag and this hash value to the backend.
+ * All backends currently included with Cromwell will utilize this hash value to run the job.
+ * Within a single workflow all floating tags will resolve to the same hash value even if Cromwell is restarted when the workflow is running.
+ * If Cromwell fails to lookup the hash (unsupported registry, wrong credentials, ...) it will run the job with the user provided floating tag.
+ * The actual Docker image (floating tag or hash) used for the job will be reported in the `dockerImageUsed` attribute of the call metadata.
+
+### Docker Lookup
+
+Cromwell provides 2 methods to lookup a docker hash from a docker tag:
+
+* Local
+ In this mode, cromwell will first attempt to find the image on the local machine where it's running using the docker CLI. If the image is present, then its hash will be used.
+ If it's not present, cromwell will execute a `docker pull` to try and retrieve it. If this succeeds, the newly retrieved hash will be used. Otherwise the lookup will be considered failed.
+ Note that cromwell runs the `docker` CLI the same way a human would. This means two things:
+ * The machine Cromwell is running on needs to have docker installed and a docker daemon running.
+ * Whichever credentials (and only those) are available on that machine will be available to pull the image.
+
+* Remote
+ In this mode, cromwell will attempt to retrieve the hash by contacting the remote docker registry where the image is stored. This currently supports Docker Hub and GCR.
+
+ Docker registry and access levels supported by Cromwell for docker hash lookup in "remote" mode:
+
+ | | DockerHub || GCR ||
+ |:-----:|:---------:|:-------:|:------:|:-------:|
+ | | Public | Private | Public | Private |
+ | JES | X | X | X | X |
+ | Other | X | | X | |
## Local Filesystem Options
When running a job on the Config (Shared Filesystem) backend, Cromwell provides some additional options in the backend's config section:
@@ -1950,7 +2040,7 @@ When running a job on the Config (Shared Filesystem) backend, Cromwell provides
```
# Imports
-Import statements inside of a WDL file are supported by Cromwell when running in Server mode as well as Single Workflow Runner Mode.
+Import statements inside of a workflow file are supported by Cromwell when running in Server mode as well as Single Workflow Runner Mode.
In Single Workflow Runner Mode, you pass in a zip file which includes the WDL files referenced by the import statements. Cromwell requires the zip file to be passed in as a command line argument, as explained by the section [run](#run).
@@ -1959,7 +2049,7 @@ For example, given a workflow `wf.wdl` and an imports directory `WdlImports.zip`
java -jar cromwell.jar wf.wdl wf.inputs - - WdlImports.zip
```
-In Server Mode, you pass in a zip file using the parameter `wdlDependencies` via the [POST /api/workflows/:version](#post-apiworkflowsversion) endpoint.
+In Server Mode, you pass in a zip file using the parameter `workflowDependencies` via the [POST /api/workflows/:version](#post-apiworkflowsversion) endpoint.
# Sub Workflows
@@ -2420,7 +2510,7 @@ It's also possible to set the URL query parameter `expandSubWorkflows` to `true`
# REST API
-The `server` subcommand on the executable JAR will start an HTTP server which can accept WDL files to run as well as check status and output of existing workflows.
+The `server` subcommand on the executable JAR will start an HTTP server which can accept workflow files to run as well as check status and output of existing workflows.
The following sub-sections define which HTTP Requests the web server can accept and what they will return. Example HTTP requests are given in [HTTPie](https://github.com/jkbrzt/httpie) and [cURL](https://curl.haxx.se/)
@@ -2432,12 +2522,12 @@ All web server requests include an API version in the url. The current version i
This endpoint accepts a POST request with a `multipart/form-data` encoded body. The form fields that may be included are:
-* `wdlSource` - *Required* Contains the WDL file to submit for execution.
-* `workflowInputs` - *Optional* JSON file containing the inputs. A skeleton file can be generated from [wdltool](https://github.com/broadinstitute/wdltool) using the "inputs" subcommand.
+* `workflowSource` - *Required* Contains the workflow source file to submit for execution.
+* `workflowInputs` - *Optional* JSON file containing the inputs. For WDL workflows a skeleton file can be generated from [wdltool](https://github.com/broadinstitute/wdltool) using the "inputs" subcommand.
* `workflowInputs_n` - *Optional* Where `n` is an integer. JSON file containing the 'n'th set of auxiliary inputs.
* `workflowOptions` - *Optional* JSON file containing options for this workflow execution. See the [run](#run) CLI sub-command for some more information about this.
* `customLabels` - *Optional* JSON file containing a set of custom labels to apply to this workflow. See [Labels](#labels) for the expected format.
-* `wdlDependencies` - *Optional* ZIP file containing WDL files that are used to resolve import statements.
+* `workflowDependencies` - *Optional* ZIP file containing workflow source files that are used to resolve import statements.
Regarding the workflowInputs parameter, in case of key conflicts between multiple input JSON files, higher values of x in workflowInputs_x override lower values. For example, an input specified in workflowInputs_3 will override an input with the same name in workflowInputs or workflowInputs_2.
Similarly, an input key specified in workflowInputs_5 will override an identical input key in any other input file.
@@ -2448,13 +2538,13 @@ Additionally, although Swagger has a limit of 5 JSON input files, the REST endpo
cURL:
```
-$ curl -v "localhost:8000/api/workflows/v1" -F wdlSource=@src/main/resources/3step.wdl -F workflowInputs=@test.json
+$ curl -v "localhost:8000/api/workflows/v1" -F workflowSource=@src/main/resources/3step.wdl -F workflowInputs=@test.json
```
HTTPie:
```
-$ http --print=hbHB --form POST localhost:8000/api/workflows/v1 wdlSource=@src/main/resources/3step.wdl workflowInputs@inputs.json
+$ http --print=hbHB --form POST localhost:8000/api/workflows/v1 workflowSource=@src/main/resources/3step.wdl workflowInputs@inputs.json
```
Request:
@@ -2470,7 +2560,7 @@ Host: localhost:8000
User-Agent: HTTPie/0.9.2
--64128d499e9e4616adea7d281f695dca
-Content-Disposition: form-data; name="wdlSource"
+Content-Disposition: form-data; name="workflowSource"
task ps {
command {
@@ -2540,13 +2630,13 @@ To specify workflow options as well:
cURL:
```
-$ curl -v "localhost:8000/api/workflows/v1" -F wdlSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0.json -F workflowOptions=@options.json
+$ curl -v "localhost:8000/api/workflows/v1" -F workflowSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0.json -F workflowOptions=@options.json
```
HTTPie:
```
-http --print=HBhb --form POST http://localhost:8000/api/workflows/v1 wdlSource=@wdl/jes0.wdl workflowInputs@wdl/jes0.json workflowOptions@options.json
+http --print=HBhb --form POST http://localhost:8000/api/workflows/v1 workflowSource=@wdl/jes0.wdl workflowInputs@wdl/jes0.json workflowOptions@options.json
```
Request (some parts truncated for brevity):
@@ -2562,7 +2652,7 @@ Host: localhost:8000
User-Agent: HTTPie/0.9.2
--f3fd038395644de596c460257626edd7
-Content-Disposition: form-data; name="wdlSource"
+Content-Disposition: form-data; name="workflowSource"
task x { ... }
task y { ... }
@@ -2598,28 +2688,28 @@ Content-Disposition: form-data; name="workflowOptions"; filename="options.json"
This endpoint accepts a POST request with a `multipart/form-data`
encoded body. The form fields that may be included are:
-* `wdlSource` - *Required* Contains the WDL file to submit for
+* `workflowSource` - *Required* Contains the workflow source file to submit for
execution.
* `workflowInputs` - *Required* JSON file containing the inputs in a
-JSON array. A skeleton file for a single inputs json element can be
+JSON array. For WDL workflows a skeleton file for a single inputs json element can be
generated from [wdltool](https://github.com/broadinstitute/wdltool)
using the "inputs" subcommand. The orderded endpoint responses will
contain one workflow submission response for each input, respectively.
* `workflowOptions` - *Optional* JSON file containing options for this
workflow execution. See the [run](#run) CLI sub-command for some more
information about this.
-* `wdlDependencies` - *Optional* ZIP file containing WDL files that are used to resolve import statements. Applied equally to all workflowInput sets.
+* `workflowDependencies` - *Optional* ZIP file containing workflow source files that are used to resolve import statements. Applied equally to all workflowInput sets.
cURL:
```
-$ curl -v "localhost:8000/api/workflows/v1/batch" -F wdlSource=@src/main/resources/3step.wdl -F workflowInputs=@test_array.json
+$ curl -v "localhost:8000/api/workflows/v1/batch" -F workflowSource=@src/main/resources/3step.wdl -F workflowInputs=@test_array.json
```
HTTPie:
```
-$ http --print=hbHB --form POST localhost:8000/api/workflows/v1/batch wdlSource=@src/main/resources/3step.wdl workflowInputs@inputs_array.json
+$ http --print=hbHB --form POST localhost:8000/api/workflows/v1/batch workflowSource=@src/main/resources/3step.wdl workflowInputs@inputs_array.json
```
Request:
@@ -2635,7 +2725,7 @@ Host: localhost:8000
User-Agent: HTTPie/0.9.2
--64128d499e9e4616adea7d281f695dcb
-Content-Disposition: form-data; name="wdlSource"
+Content-Disposition: form-data; name="workflowSource"
task ps {
command {
@@ -2716,13 +2806,13 @@ To specify workflow options as well:
cURL:
```
-$ curl -v "localhost:8000/api/workflows/v1/batch" -F wdlSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0_array.json -F workflowOptions=@options.json
+$ curl -v "localhost:8000/api/workflows/v1/batch" -F workflowSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0_array.json -F workflowOptions=@options.json
```
HTTPie:
```
-http --print=HBhb --form POST http://localhost:8000/api/workflows/v1/batch wdlSource=@wdl/jes0.wdl workflowInputs@wdl/jes0_array.json workflowOptions@options.json
+http --print=HBhb --form POST http://localhost:8000/api/workflows/v1/batch workflowSource=@wdl/jes0.wdl workflowInputs@wdl/jes0_array.json workflowOptions@options.json
```
Request (some parts truncated for brevity):
@@ -2738,7 +2828,7 @@ Host: localhost:8000
User-Agent: HTTPie/0.9.2
--f3fd038395644de596c460257626edd8
-Content-Disposition: form-data; name="wdlSource"
+Content-Disposition: form-data; name="workflowSource"
task x { ... }
task y { ... }
@@ -2781,6 +2871,7 @@ This endpoint allows for querying workflows based on the following criteria:
* `name`
* `id`
* `status`
+* `label`
* `start` (start datetime with mandatory offset)
* `end` (end datetime with mandatory offset)
* `page` (page of results)
@@ -2790,9 +2881,13 @@ Names, ids, and statuses can be given multiple times to include
workflows with any of the specified names, ids, or statuses. When
multiple names are specified, any workflow matching one of the names
will be returned. The same is true for multiple ids or statuses. When
-different types of criteria are specified, for example names and
-statuses, the results must match both the one of the specified names and
-one of the statuses. Using page and pagesize will enable server side pagination.
+more than one label is specified, only workflows associated to all of
+the given labels will be returned.
+
+When a combination of criteria are specified, for example querying by
+names and statuses, the results must return workflows that match one of
+the specified names and one of the statuses. Using page and pagesize will
+enable server side pagination.
Valid statuses are `Submitted`, `Running`, `Aborting`, `Aborted`, `Failed`, and `Succeeded`. `start` and `end` should
be in [ISO8601 datetime](https://en.wikipedia.org/wiki/ISO_8601) format with *mandatory offset* and `start` cannot be after `end`.
@@ -2844,6 +2939,54 @@ Server: spray-can/1.3.3
}
```
+Labels have to be queried in key and value pairs separated by a colon, i.e. `label-key:label-value`. For example, if a batch of workflows was submitted with the following labels JSON:
+```
+{
+ "label-key-1" : "label-value-1",
+ "label-key-2" : "label-value-2"
+}
+```
+
+A request to query for succeeded workflows with both labels would be:
+
+cURL:
+```
+$ curl "http://localhost:8000/api/workflows/v1/query?status=Succeeded&label=label-key-1:label-value-1&label=label-key-2:label-value-2
+```
+
+HTTPie:
+```
+$ http "http://localhost:8000/api/workflows/v1/query?status=Succeeded&label=label-key-1:label-value-1&label=label-key-2:label-value-2
+```
+
+Response:
+```
+HTTP/1.1 200 OK
+Content-Length: 608
+Content-Type: application/json; charset=UTF-8
+Date: Tue, 9 May 2017 20:24:33 GMT
+Server: spray-can/1.3.3
+
+{
+ "results": [
+ {
+ "end": "2017-05-09T16:07:30.515-04:00",
+ "id": "83fc23d5-48d1-456e-997a-087e55cd2e06",
+ "name": "wf_hello",
+ "start": "2017-05-09T16:01:51.940-04:00",
+ "status": "Succeeded"
+ },
+ {
+ "end": "2017-05-09T16:07:13.174-04:00",
+ "id": "7620a5c6-a5c6-466c-994b-dd8dca917b9b",
+ "name": "wf_goodbye",
+ "start": "2017-05-09T16:01:51.939-04:00",
+ "status": "Succeeded"
+ }
+ ]
+}
+```
+
Query data is refreshed from raw data periodically according to the configuration value `services.MetadataService.metadata-summary-refresh-interval`.
This interval represents the duration between the end of one summary refresh sweep and the beginning of the next sweep. If not specified the
refresh interval will default to 2 seconds. To turn off metadata summary refresh, specify an infinite refresh interval value with "Inf".
@@ -2915,6 +3058,37 @@ Server: spray-can/1.3.3
}
```
+## PATCH /api/workflows/:version/:id/labels
+
+This endpoint is used to update multiple labels for an existing workflow. When supplying a label with a key unique to the workflow submission, a new label key/value entry is appended to that workflow's metadata. When supplying a label with a key that is already associated to the workflow submission, the original label value is updated with the new value for that workflow's metadata.
+
+The [labels](#labels) must be a mapping of key/value pairs in JSON format that are sent via the PATCH body. The request content type must be
+`application/json`.
+
+cURL:
+
+```
+$ curl -X PATCH --header "Content-Type: application/json" -d "{\"label-key-1\":\"label-value-1\", \"label-key-2\": \"label-value-2\"}" "http://localhost:8000/api/workflows/v1/c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5/labels"
+```
+
+HTTPie:
+
+```
+$ echo '{"label-key-1":"label-value-1", "label-key-2": "label-value-2"}' | http PATCH "http://localhost:8000/api/workflows/v1/c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5/labels"
+```
+
+Response:
+```
+{ "id": "c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5",
+ "labels":
+ {
+ "label-key-1": "label-value-1",
+ "label-key-2": "label-value-2"
+ }
+}
+```
+
+
## GET /api/workflows/:version/:id/status
cURL:
@@ -3077,11 +3251,18 @@ Content-Type: application/json; charset=UTF-8
Content-Length: 7286
{
"workflowName": "sc_test",
+ "submittedFiles": {
+ "inputs": "{}",
+ "workflow": "task do_prepare {\n File input_file\n command {\n split -l 1 ${input_file} temp_ && ls -1 temp_?? > files.list\n }\n output {\n Array[File] split_files = read_lines(\"files.list\")\n }\n}\n# count the number of words in the input file, writing the count to an output file overkill in this case, but simulates a real scatter-gather that would just return an Int (map)\ntask do_scatter {\n File input_file\n command {\n wc -w ${input_file} > output.txt\n }\n output {\n File count_file = \"output.txt\"\n }\n}\n# aggregate the results back together (reduce)\ntask do_gather {\n Array[File] input_files\n command <<<\n cat ${sep = ' ' input_files} | awk '{s+=$$1} END {print s}'\n >>>\n output {\n Int sum = read_int(stdout())\n }\n}\nworkflow sc_test {\n call do_prepare\n scatter(f in do_prepare.split_files) {\n call do_scatter {\n input: input_file = f\n }\n }\n call do_gather {\n input: input_files = do_scatter.count_file\n }\n}",
+ "options": "{\n\n}",
+ "workflowType": "WDL"
+ },
"calls": {
"sc_test.do_prepare": [
{
"executionStatus": "Done",
"stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_prepare/stdout",
+ "backendStatus": "Done",
"shardIndex": -1,
"outputs": {
"split_files": [
@@ -3096,6 +3277,30 @@ Content-Length: 7286
"failOnStderr": "true",
"continueOnReturnCode": "0"
},
+ "callCaching": {
+ "allowResultReuse": true,
+ "hit": false,
+ "result": "Cache Miss",
+ "hashes": {
+ "output count": "C4CA4238A0B923820DCC509A6F75849B",
+ "runtime attribute": {
+ "docker": "N/A",
+ "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA",
+ "failOnStderr": "68934A3E9455FA72420237EB05902327"
+ },
+ "output expression": {
+ "Array": "D856082E6599CF6EC9F7F42013A2EC4C"
+ },
+ "input count": "C4CA4238A0B923820DCC509A6F75849B",
+ "backend name": "509820290D57F333403F490DDE7316F4",
+ "command template": "9F5F1F24810FACDF917906BA4EBA807D",
+ "input": {
+ "File input_file": "11fa6d7ed15b42f2f73a455bf5864b49"
+ }
+ },
+ "effectiveCallCachingMode": "ReadAndWriteCache"
+ },
+ "jobId": "34479",
"returnCode": 0,
"backend": "Local",
"end": "2016-02-04T13:47:56.000-05:00",
@@ -3109,15 +3314,40 @@ Content-Length: 7286
{
"executionStatus": "Preempted",
"stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/stdout",
+ "backendStatus": "Preempted",
"shardIndex": 0,
"outputs": {},
"runtimeAttributes": {
"failOnStderr": "true",
"continueOnReturnCode": "0"
},
+ "callCaching": {
+ "allowResultReuse": true,
+ "hit": false,
+ "result": "Cache Miss",
+ "hashes": {
+ "output count": "C4CA4238A0B923820DCC509A6F75849B",
+ "runtime attribute": {
+ "docker": "N/A",
+ "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA",
+ "failOnStderr": "68934A3E9455FA72420237EB05902327"
+ },
+ "output expression": {
+ "File count_file": "EF1B47FFA9990E8D058D177073939DF7"
+ },
+ "input count": "C4CA4238A0B923820DCC509A6F75849B",
+ "backend name": "509820290D57F333403F490DDE7316F4",
+ "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7",
+ "input": {
+ "File input_file": "a53794d214dc5dedbcecdf827bf683a2"
+ }
+ },
+ "effectiveCallCachingMode": "ReadAndWriteCache"
+ },
"inputs": {
"input_file": "f"
},
+ "jobId": "34496",
"backend": "Local",
"end": "2016-02-04T13:47:56.000-05:00",
"stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/stderr",
@@ -3128,6 +3358,7 @@ Content-Length: 7286
{
"executionStatus": "Done",
"stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/stdout",
+ "backendStatus": "Done",
"shardIndex": 0,
"outputs": {
"count_file": "/home/jdoe/cromwell/cromwell-test-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/output.txt"
@@ -3136,10 +3367,34 @@ Content-Length: 7286
"failOnStderr": "true",
"continueOnReturnCode": "0"
},
+ "callCaching": {
+ "allowResultReuse": true,
+ "hit": false,
+ "result": "Cache Miss",
+ "hashes": {
+ "output count": "C4CA4238A0B923820DCC509A6F75849B",
+ "runtime attribute": {
+ "docker": "N/A",
+ "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA",
+ "failOnStderr": "68934A3E9455FA72420237EB05902327"
+ },
+ "output expression": {
+ "File count_file": "EF1B47FFA9990E8D058D177073939DF7"
+ },
+ "input count": "C4CA4238A0B923820DCC509A6F75849B",
+ "backend name": "509820290D57F333403F490DDE7316F4",
+ "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7",
+ "input": {
+ "File input_file": "a53794d214dc5dedbcecdf827bf683a2"
+ }
+ },
+ "effectiveCallCachingMode": "ReadAndWriteCache"
+ },
"inputs": {
"input_file": "f"
},
"returnCode": 0,
+ "jobId": "34965",
"end": "2016-02-04T13:47:56.000-05:00",
"stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/stderr",
"attempt": 2,
@@ -3149,6 +3404,7 @@ Content-Length: 7286
{
"executionStatus": "Done",
"stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-1/stdout",
+ "backendStatus": "Done",
"shardIndex": 1,
"outputs": {
"count_file": "/home/jdoe/cromwell/cromwell-test-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-1/output.txt"
@@ -3157,10 +3413,34 @@ Content-Length: 7286
"failOnStderr": "true",
"continueOnReturnCode": "0"
},
+ "callCaching": {
+ "allowResultReuse": true,
+ "hit": false,
+ "result": "Cache Miss",
+ "hashes": {
+ "output count": "C4CA4238A0B923820DCC509A6F75849B",
+ "runtime attribute": {
+ "docker": "N/A",
+ "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA",
+ "failOnStderr": "68934A3E9455FA72420237EB05902327"
+ },
+ "output expression": {
+ "File count_file": "EF1B47FFA9990E8D058D177073939DF7"
+ },
+ "input count": "C4CA4238A0B923820DCC509A6F75849B",
+ "backend name": "509820290D57F333403F490DDE7316F4",
+ "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7",
+ "input": {
+ "File input_file": "d3410ade53df34c78488544285cf743c"
+ }
+ },
+ "effectiveCallCachingMode": "ReadAndWriteCache"
+ },
"inputs": {
"input_file": "f"
},
"returnCode": 0,
+ "jobId": "34495",
"backend": "Local",
"end": "2016-02-04T13:47:56.000-05:00",
"stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-1/stderr",
@@ -3173,6 +3453,7 @@ Content-Length: 7286
{
"executionStatus": "Done",
"stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_gather/stdout",
+ "backendStatus": "Done",
"shardIndex": -1,
"outputs": {
"sum": 12
@@ -3181,6 +3462,29 @@ Content-Length: 7286
"failOnStderr": "true",
"continueOnReturnCode": "0"
},
+ "callCaching": {
+ "allowResultReuse": true,
+ "hit": false,
+ "result": "Cache Miss",
+ "hashes": {
+ "output count": "C4CA4238A0B923820DCC509A6F75849B",
+ "runtime attribute": {
+ "docker": "N/A",
+ "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA",
+ "failOnStderr": "68934A3E9455FA72420237EB05902327"
+ },
+ "output expression": {
+ "File count_file": "EF1B47FFA9990E8D058D177073939DF7"
+ },
+ "input count": "C4CA4238A0B923820DCC509A6F75849B",
+ "backend name": "509820290D57F333403F490DDE7316F4",
+ "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7",
+ "input": {
+ "File input_file": "e0ef752ab4824939d7947f6012b7c141"
+ }
+ },
+ "effectiveCallCachingMode": "ReadAndWriteCache"
+ },
"inputs": {
"input_files": [
"/home/jdoe/cromwell/cromwell-test-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/output.txt",
@@ -3188,6 +3492,7 @@ Content-Length: 7286
]
},
"returnCode": 0,
+ "jobId": "34494",
"backend": "Local",
"end": "2016-02-04T13:47:57.000-05:00",
"stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_gather/stderr",
@@ -3212,6 +3517,10 @@ Content-Length: 7286
"inputs": {
"sc_test.do_prepare.input_file": "/home/jdoe/cromwell/11.txt"
},
+ "labels": {
+ "cromwell-workflow-name": "sc_test",
+ "cromwell-workflow-id": "cromwell-17633f21-11a9-414f-a95b-2e21431bd67d"
+ },
"submission": "2016-02-04T13:47:55.000-05:00",
"status": "Succeeded",
"end": "2016-02-04T13:47:57.000-05:00",
@@ -3330,6 +3639,16 @@ The `call` and `workflow` may optionally contain failures shaped like this:
]
```
+### Compressing the metadata response
+
+The response from the metadata endpoint can be quite large depending on the workflow. To help with this Cromwell supports gzip encoding the metadata prior to sending it back to the client. In order to enable this, make sure your client is sending the `Accept-Encoding: gzip` header.
+
+For instance, with cURL:
+
+```
+$ curl -H "Accept-Encoding: gzip" http://localhost:8000/api/workflows/v1/b3e45584-9450-4e73-9523-fc3ccf749848/metadata
+```
+
## POST /api/workflows/:version/:id/abort
cURL:
@@ -3386,18 +3705,186 @@ Server: spray-can/1.3.3
}
```
-## GET /api/engine/:version/stats
+## GET /api/workflows/:version/callcaching/diff
+
+**Disclaimer**: This endpoint depends on hash values being published to the metadata, which only happens as of Cromwell 28.
+Workflows run with prior versions of Cromwell cannot be used with this endpoint.
+A `404 NotFound` will be returned when trying to use this endpoint if either workflow has been run on a prior version.
+
+This endpoint returns the hash differences between 2 *completed* (successfully or not) calls.
+The following query parameters are supported:
+
+| Parameter | Description | Required |
+|:---------:|:-----------------------------------------------------------------------------------------:|:--------:|
+| workflowA | Workflow ID of the first call | yes |
+| callA | Fully qualified name of the first call. **Including workflow name**. (see example below) | yes |
+| indexA | Shard index of the first call | depends |
+| workflowB | Workflow ID of the second call | yes |
+| callB | Fully qualified name of the second call. **Including workflow name**. (see example below) | yes |
+| indexB | Shard index of the second call | depends |
+
+About the `indexX` parameters: It is required if the call was in a scatter. Otherwise it should *not* be specified.
+If an index parameter is wrongly specified, the call will not be found and the request will result in a 404 response.
+
+cURL:
+
+```
+$ curl "http://localhost:8000/api/workflows/v1/callcaching/diff?workflowA=85174842-4a44-4355-a3a9-3a711ce556f1&callA=wf_hello.hello&workflowB=7479f8a8-efa4-46e4-af0d-802addc66e5d&callB=wf_hello.hello"
+```
+
+HTTPie:
+
+```
+$ http "http://localhost:8000/api/workflows/v1/callcaching/diff?workflowA=85174842-4a44-4355-a3a9-3a711ce556f1&callA=wf_hello.hello&workflowB=7479f8a8-efa4-46e4-af0d-802addc66e5d&callB=wf_hello.hello"
+```
+
+Response:
+```
+HTTP/1.1 200 OK
+Content-Length: 1274
+Content-Type: application/json; charset=UTF-8
+Date: Tue, 06 Jun 2017 16:44:33 GMT
+Server: spray-can/1.3.3
+
+{
+ "callA": {
+ "executionStatus": "Done",
+ "workflowId": "85174842-4a44-4355-a3a9-3a711ce556f1",
+ "callFqn": "wf_hello.hello",
+ "jobIndex": -1,
+ "allowResultReuse": true
+ },
+ "callB": {
+ "executionStatus": "Done",
+ "workflowId": "7479f8a8-efa4-46e4-af0d-802addc66e5d",
+ "callFqn": "wf_hello.hello",
+ "jobIndex": -1,
+ "allowResultReuse": true
+ },
+ "hashDifferential": [
+ {
+ "hashKey": "command template",
+ "callA": "4EAADE3CD5D558C5A6CFA4FD101A1486",
+ "callB": "3C7A0CA3D7A863A486DBF3F7005D4C95"
+ },
+ {
+ "hashKey": "input count",
+ "callA": "C4CA4238A0B923820DCC509A6F75849B",
+ "callB": "C81E728D9D4C2F636F067F89CC14862C"
+ },
+ {
+ "hashKey": "input: String addressee",
+ "callA": "D4CC65CB9B5F22D8A762532CED87FE8D",
+ "callB": "7235E005510D99CB4D5988B21AC97B6D"
+ },
+ {
+ "hashKey": "input: String addressee2",
+ "callA": "116C7E36B4AE3EAFD07FA4C536CE092F",
+ "callB": null
+ }
+ ]
+}
+```
+
+The response is a JSON object with 3 fields:
+
+- `callA` reports information about the first call, including its `allowResultReuse` value that will be used to determine whether or not this call can be cached to.
+- `callB` reports information about the second call, including its `allowResultReuse` value that will be used to determine whether or not this call can be cached to.
+- `hashDifferential` is an array in which each element represents a difference between the hashes of `callA` and `callB`.
+
+*If this array is empty, `callA` and `callB` have the same hashes*.
+
+Differences can be of 3 kinds:
+
+- `callA` and `callB` both have the same hash key but their values are different.
+For instance, in the example above,
+
+```json
+{
+ "hashKey": "input: String addressee",
+ "callA": "D4CC65CB9B5F22D8A762532CED87FE8D",
+ "callB": "7235E005510D99CB4D5988B21AC97B6D"
+}
+```
+
+indicates that both `callA` and `callB` have a `String` input called `addressee`, but different values were used at runtime, resulting in different MD5 hashes.
+
+- `callA` has a hash key that `callB` doesn't have
+For instance, in the example above,
+
+```json
+{
+ "hashKey": "input: String addressee2",
+ "callA": "116C7E36B4AE3EAFD07FA4C536CE092F",
+ "callB": null
+}
+```
+
+indicates that `callA` has a `String` input called `addressee2` that doesn't exist in `callB`. For that reason the value of the second field is `null`.
+
+- `callB` has a hash key that `callA` doesn't have. This is the same case as above but reversed.
+
+If no cache entry for `callA` or `callB` can be found, the response will be in the following format:
+
+```
+HTTP/1.1 404 NotFound
+Content-Length: 178
+Content-Type: application/json; charset=UTF-8
+Date: Tue, 06 Jun 2017 17:02:15 GMT
+Server: spray-can/1.3.3
+
+{
+ "status": "error",
+ "message": "Cannot find call 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1"
+}
+```
+
+If neither `callA` nor `callB` can be found, the response will be in the following format:
+
+
+```
+HTTP/1.1 404 NotFound
+Content-Length: 178
+Content-Type: application/json; charset=UTF-8
+Date: Tue, 06 Jun 2017 17:02:15 GMT
+Server: spray-can/1.3.3
+
+{
+ "status": "error",
+ "message": "Cannot find calls 5174842-4a44-4355-a3a9-3a711ce556f1:wf_hello.hello:-1, 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1"
+}
+```
+
+If the query is malformed and required parameters are missing, the response will be in the following format:
+
+```
+HTTP/1.1 400 BadRequest
+Content-Length: 178
+Content-Type: application/json; charset=UTF-8
+Date: Tue, 06 Jun 2017 17:02:15 GMT
+Server: spray-can/1.3.3
+{
+ "status": "fail",
+ "message": "Wrong parameters for call cache diff query:\nmissing workflowA query parameter\nmissing callB query parameter",
+ "errors": [
+ "missing workflowA query parameter",
+ "missing callB query parameter"
+ ]
+}
+```
+
+## GET /engine/:version/stats
This endpoint returns some basic statistics on the current state of the engine. At the moment that includes the number of running workflows and the number of active jobs.
cURL:
```
-$ curl http://localhost:8000/api/engine/v1/stats
+$ curl http://localhost:8000/engine/v1/stats
```
HTTPie:
```
-$ http http://localhost:8000/api/engine/v1/stats
+$ http http://localhost:8000/engine/v1/stats
```
Response:
@@ -3413,18 +3900,18 @@ Response:
}
```
-## GET /api/engine/:version/version
+## GET /engine/:version/version
This endpoint returns the version of the Cromwell engine.
cURL:
```
-$ curl http://localhost:8000/api/engine/v1/version
+$ curl http://localhost:8000/engine/v1/version
```
HTTPie:
```
-$ http http://localhost:8000/api/engine/v1/version
+$ http http://localhost:8000/engine/v1/version
```
Response:
@@ -3439,8 +3926,6 @@ Response:
}
```
-
-
## Error handling
Requests that Cromwell can't process return a failure in the form of a JSON response respecting the following JSON schema:
@@ -3494,25 +3979,3 @@ e.g.
The `message` field contains a short description of the error.
The `errors` field is optional and may contain additional information about why the request failed.
-
-# Developer
-
-## Generating table of contents on Markdown files
-
-```
-$ pip install mdtoc
-$ mdtoc --check-links README.md
-```
-
-## Generating and Hosting ScalaDoc
-
-Essentially run `sbt doc` then commit the generated code into the `gh-pages` branch on this repository
-
-```
-$ sbt doc
-$ git co gh-pages
-$ mv target/scala-2.11/api scaladoc
-$ git add scaladoc
-$ git commit -m "API Docs"
-$ git push origin gh-pages
-```
diff --git a/backend/CHANGELOG.MD b/backend/CHANGELOG.MD
deleted file mode 100644
index 6e6cfdff0..000000000
--- a/backend/CHANGELOG.MD
+++ /dev/null
@@ -1,5 +0,0 @@
-# Cromwell-Backend Change Log
-
-1.0:
-=====
-
diff --git a/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala b/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala
index 3fa6e9c19..c6ddcbcc4 100644
--- a/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala
+++ b/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala
@@ -3,9 +3,9 @@ package cromwell.backend
import cromwell.backend.io.JobPaths
import cromwell.core.path.Path
import cromwell.core.{JobKey, WorkflowId}
-import wdl4s.Workflow
+import wdl4s.wdl.WdlWorkflow
-case class BackendJobBreadCrumb(workflow: Workflow, id: WorkflowId, jobKey: JobKey) {
+case class BackendJobBreadCrumb(workflow: WdlWorkflow, id: WorkflowId, jobKey: JobKey) {
def toPath(root: Path): Path = {
val workflowPart = root.resolve(workflow.unqualifiedName).resolve(id.toString)
JobPaths.callPathBuilder(workflowPart, jobKey)
diff --git a/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala b/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala
index ffeccd7cd..068859172 100644
--- a/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala
+++ b/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala
@@ -7,8 +7,8 @@ import cromwell.backend.BackendLifecycleActor._
import cromwell.backend.wdl.OutputEvaluator
import cromwell.core.path.Path
import cromwell.core.{CallOutputs, ExecutionEvent, JobKey}
-import wdl4s.expression.WdlStandardLibraryFunctions
-import wdl4s.values.WdlValue
+import wdl4s.wdl.expression.WdlStandardLibraryFunctions
+import wdl4s.wdl.values.WdlValue
import scala.concurrent.Future
import scala.util.{Success, Try}
@@ -24,7 +24,7 @@ object BackendJobExecutionActor {
sealed trait BackendJobExecutionActorResponse extends BackendWorkflowLifecycleActorResponse
sealed trait BackendJobExecutionResponse extends BackendJobExecutionActorResponse { def jobKey: JobKey }
- case class JobSucceededResponse(jobKey: BackendJobDescriptorKey, returnCode: Option[Int], jobOutputs: CallOutputs, jobDetritusFiles: Option[Map[String, Path]], executionEvents: Seq[ExecutionEvent]) extends BackendJobExecutionResponse
+ case class JobSucceededResponse(jobKey: BackendJobDescriptorKey, returnCode: Option[Int], jobOutputs: CallOutputs, jobDetritusFiles: Option[Map[String, Path]], executionEvents: Seq[ExecutionEvent], dockerImageUsed: Option[String]) extends BackendJobExecutionResponse
case class AbortedResponse(jobKey: BackendJobDescriptorKey) extends BackendJobExecutionResponse
sealed trait BackendJobFailedResponse extends BackendJobExecutionResponse { def throwable: Throwable; def returnCode: Option[Int] }
case class JobFailedNonRetryableResponse(jobKey: JobKey, throwable: Throwable, returnCode: Option[Int]) extends BackendJobFailedResponse
diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala
index a6a09cff4..5c1031726 100644
--- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala
+++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala
@@ -3,7 +3,7 @@ package cromwell.backend
import akka.actor.{Actor, ActorRef}
import cromwell.backend.BackendLifecycleActor._
import cromwell.core.logging.{JobLogging, WorkflowLogging}
-import wdl4s.TaskCall
+import wdl4s.wdl.WdlTaskCall
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
@@ -65,7 +65,7 @@ trait BackendWorkflowLifecycleActor extends BackendLifecycleActor with WorkflowL
/**
* The subset of calls which this backend will be expected to run
*/
- protected def calls: Set[TaskCall]
+ protected def calls: Set[WdlTaskCall]
}
trait BackendJobLifecycleActor extends BackendLifecycleActor with JobLogging {
diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala
index d0da2c2c5..c255fdc12 100644
--- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala
+++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala
@@ -6,8 +6,8 @@ import cromwell.backend.io.WorkflowPathsWithDocker
import cromwell.core.CallOutputs
import cromwell.core.JobExecutionToken.JobExecutionTokenType
import cromwell.core.path.Path
-import wdl4s.TaskCall
-import wdl4s.expression.{PureStandardLibraryFunctions, WdlStandardLibraryFunctions}
+import wdl4s.wdl.WdlTaskCall
+import wdl4s.wdl.expression.{PureStandardLibraryFunctions, WdlStandardLibraryFunctions}
trait BackendLifecycleActorFactory {
@@ -18,8 +18,9 @@ trait BackendLifecycleActorFactory {
def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor,
ioActor: ActorRef,
- calls: Set[TaskCall],
- serviceRegistryActor: ActorRef): Option[Props] = None
+ calls: Set[WdlTaskCall],
+ serviceRegistryActor: ActorRef,
+ restarting: Boolean): Option[Props] = None
/* ****************************** */
/* Job Execution */
@@ -39,7 +40,7 @@ trait BackendLifecycleActorFactory {
def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor,
ioActor: ActorRef,
- calls: Set[TaskCall],
+ calls: Set[WdlTaskCall],
jobExecutionMap: JobExecutionMap,
workflowOutputs: CallOutputs,
initializationData: Option[BackendInitializationData]): Option[Props] = None
diff --git a/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala b/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala
index 5bf1d05fd..18e341ad7 100644
--- a/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala
+++ b/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala
@@ -9,10 +9,10 @@ import cromwell.backend.validation.ContinueOnReturnCodeValidation
import cromwell.core.{WorkflowMetadataKeys, WorkflowOptions}
import cromwell.services.metadata.MetadataService.PutMetadataAction
import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue}
-import wdl4s._
-import wdl4s.expression.PureStandardLibraryFunctions
-import wdl4s.types._
-import wdl4s.values.WdlValue
+import wdl4s.wdl._
+import wdl4s.wdl.expression.PureStandardLibraryFunctions
+import wdl4s.wdl.types._
+import wdl4s.wdl.values.WdlValue
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
@@ -38,7 +38,7 @@ object BackendWorkflowInitializationActor {
trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor with ActorLogging {
def serviceRegistryActor: ActorRef
- def calls: Set[TaskCall]
+ def calls: Set[WdlTaskCall]
/**
* This method is meant only as a "pre-flight check" validation of runtime attribute expressions during workflow
@@ -69,7 +69,7 @@ trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor w
* return `true` in both cases.
*/
protected def continueOnReturnCodePredicate(valueRequired: Boolean)(wdlExpressionMaybe: Option[WdlValue]): Boolean = {
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(wdlExpressionMaybe)
+ ContinueOnReturnCodeValidation.default(configurationDescriptor.backendRuntimeConfig).validateOptionalExpression(wdlExpressionMaybe)
}
protected def runtimeAttributeValidators: Map[String, Option[WdlValue] => Boolean]
@@ -125,7 +125,7 @@ trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor w
defaultRuntimeAttributes.get(name)
}
- def badRuntimeAttrsForTask(task: Task) = {
+ def badRuntimeAttrsForTask(task: WdlTask) = {
runtimeAttributeValidators map { case (attributeName, validator) =>
val value = task.runtimeAttributes.attrs.get(attributeName) orElse defaultRuntimeAttribute(attributeName)
attributeName -> ((value, validator(value)))
diff --git a/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala b/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala
index 0c9bf8727..7e548df2f 100644
--- a/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala
+++ b/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala
@@ -3,9 +3,9 @@ package cromwell.backend
import cromwell.core.WorkflowOptions
import cromwell.util.JsonFormatting.WdlValueJsonFormatter
import lenthall.util.TryUtil
-import wdl4s.{WdlExpressionException, _}
-import wdl4s.expression.WdlStandardLibraryFunctions
-import wdl4s.values.WdlValue
+import wdl4s.wdl.{WdlExpressionException, _}
+import wdl4s.wdl.expression.WdlStandardLibraryFunctions
+import wdl4s.wdl.values.WdlValue
import scala.util.{Success, Try}
@@ -47,7 +47,6 @@ object RuntimeAttributeDefinition {
case (runtimeAttributeDefinition, Success(jsValue)) => runtimeAttributeDefinition.name -> jsValue.convertTo[WdlValue]
case (RuntimeAttributeDefinition(name, Some(factoryDefault), _), _) => name -> factoryDefault
}
-
specifiedAttributes ++ defaults
}
}
diff --git a/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala b/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala
index 96f8587c6..509f30286 100644
--- a/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala
+++ b/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala
@@ -34,6 +34,8 @@ object AsyncBackendJobExecutionActor {
trait AsyncBackendJobExecutionActor { this: Actor with ActorLogging =>
+ def dockerImageUsed: Option[String]
+
// The scala package object (scala/package.scala) contains a neat list of runtime errors that are always going to be fatal.
// We also consider any Error as fatal, and include the CromwellFatalExceptionMarker so we can mark our own fatal exceptions.
def isFatal(throwable: Throwable): Boolean = throwable match {
@@ -83,7 +85,7 @@ trait AsyncBackendJobExecutionActor { this: Actor with ActorLogging =>
context.system.scheduler.scheduleOnce(pollBackOff.backoffMillis.millis, self, IssuePollRequest(handle))
()
case Finish(SuccessfulExecutionHandle(outputs, returnCode, jobDetritusFiles, executionEvents, _)) =>
- completionPromise.success(JobSucceededResponse(jobDescriptor.key, Some(returnCode), outputs, Option(jobDetritusFiles), executionEvents))
+ completionPromise.success(JobSucceededResponse(jobDescriptor.key, Some(returnCode), outputs, Option(jobDetritusFiles), executionEvents, dockerImageUsed))
context.stop(self)
case Finish(FailedNonRetryableExecutionHandle(throwable, returnCode)) =>
completionPromise.success(JobFailedNonRetryableResponse(jobDescriptor.key, throwable, returnCode))
diff --git a/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala b/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala
index 79503c927..dd6091f42 100644
--- a/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala
+++ b/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala
@@ -2,7 +2,7 @@ package cromwell.backend.async
import cromwell.core.path.Path
import lenthall.exception.ThrowableAggregation
-import wdl4s.values.WdlValue
+import wdl4s.wdl.values.WdlValue
abstract class KnownJobFailureException extends Exception {
def stderrPath: Option[Path]
diff --git a/backend/src/main/scala/cromwell/backend/backend.scala b/backend/src/main/scala/cromwell/backend/backend.scala
index 5839f268b..527a514f9 100644
--- a/backend/src/main/scala/cromwell/backend/backend.scala
+++ b/backend/src/main/scala/cromwell/backend/backend.scala
@@ -2,19 +2,19 @@ package cromwell.backend
import com.typesafe.config.Config
import cromwell.core.WorkflowOptions.WorkflowOption
-import cromwell.core.callcaching.CallCachingEligibility
+import cromwell.core.callcaching.MaybeCallCachingEligible
import cromwell.core.labels.Labels
import cromwell.core.{CallKey, WorkflowId, WorkflowOptions}
import cromwell.services.keyvalue.KeyValueServiceActor.KvResponse
-import wdl4s._
-import wdl4s.values.WdlValue
+import wdl4s.wdl._
+import wdl4s.wdl.values.WdlValue
import scala.util.Try
/**
* For uniquely identifying a job which has been or will be sent to the backend.
*/
-case class BackendJobDescriptorKey(call: TaskCall, index: Option[Int], attempt: Int) extends CallKey {
+case class BackendJobDescriptorKey(call: WdlTaskCall, index: Option[Int], attempt: Int) extends CallKey {
def scope = call
private val indexString = index map { _.toString } getOrElse "NA"
val tag = s"${call.fullyQualifiedName}:$indexString:$attempt"
@@ -28,7 +28,7 @@ case class BackendJobDescriptor(workflowDescriptor: BackendWorkflowDescriptor,
key: BackendJobDescriptorKey,
runtimeAttributes: Map[LocallyQualifiedName, WdlValue],
inputDeclarations: EvaluatedTaskInputs,
- callCachingEligibility: CallCachingEligibility,
+ maybeCallCachingEligible: MaybeCallCachingEligible,
prefetchedKvStoreEntries: Map[String, KvResponse]) {
val fullyQualifiedInputs = inputDeclarations map { case (declaration, value) => declaration.fullyQualifiedName -> value }
val call = key.call
@@ -37,7 +37,7 @@ case class BackendJobDescriptor(workflowDescriptor: BackendWorkflowDescriptor,
object BackendWorkflowDescriptor {
def apply(id: WorkflowId,
- workflow: Workflow,
+ workflow: WdlWorkflow,
knownValues: Map[FullyQualifiedName, WdlValue],
workflowOptions: WorkflowOptions,
customLabels: Labels) = {
@@ -49,7 +49,7 @@ object BackendWorkflowDescriptor {
* For passing to a BackendActor construction time
*/
case class BackendWorkflowDescriptor(id: WorkflowId,
- workflow: Workflow,
+ workflow: WdlWorkflow,
knownValues: Map[FullyQualifiedName, WdlValue],
workflowOptions: WorkflowOptions,
customLabels: Labels,
@@ -65,7 +65,11 @@ case class BackendWorkflowDescriptor(id: WorkflowId,
/**
* For passing to a BackendActor construction time
*/
-case class BackendConfigurationDescriptor(backendConfig: Config, globalConfig: Config)
+case class BackendConfigurationDescriptor(backendConfig: Config, globalConfig: Config) {
+
+ lazy val backendRuntimeConfig = if (backendConfig.hasPath("default-runtime-attributes"))
+ Option(backendConfig.getConfig("default-runtime-attributes")) else None
+}
final case class AttemptedLookupResult(name: String, value: Try[WdlValue]) {
def toPair = name -> value
diff --git a/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala b/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala
index 9ece33cdd..7af985153 100644
--- a/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala
+++ b/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala
@@ -2,15 +2,15 @@ package cromwell.backend.io
import cromwell.backend.BackendJobDescriptor
import cromwell.core.CallContext
-import wdl4s.TaskCall
-import wdl4s.expression.{NoFunctions, PureStandardLibraryFunctionsLike}
-import wdl4s.values._
+import wdl4s.wdl.WdlTaskCall
+import wdl4s.wdl.expression.{NoFunctions, PureStandardLibraryFunctionsLike}
+import wdl4s.wdl.values._
trait GlobFunctions extends PureStandardLibraryFunctionsLike {
def callContext: CallContext
- def findGlobOutputs(call: TaskCall, jobDescriptor: BackendJobDescriptor): Set[WdlGlobFile] = {
+ def findGlobOutputs(call: WdlTaskCall, jobDescriptor: BackendJobDescriptor): Set[WdlGlobFile] = {
val globOutputs = call.task.findOutputFiles(jobDescriptor.fullyQualifiedInputs, NoFunctions) collect {
case glob: WdlGlobFile => glob
}
diff --git a/backend/src/main/scala/cromwell/backend/io/JobPaths.scala b/backend/src/main/scala/cromwell/backend/io/JobPaths.scala
index 2e0dfab43..72057c202 100644
--- a/backend/src/main/scala/cromwell/backend/io/JobPaths.scala
+++ b/backend/src/main/scala/cromwell/backend/io/JobPaths.scala
@@ -24,16 +24,17 @@ object JobPaths {
}
}
-trait JobPaths { this: WorkflowPaths =>
+trait JobPaths {
import JobPaths._
+ def workflowPaths: WorkflowPaths
def returnCodeFilename: String = "rc"
def stdoutFilename: String = "stdout"
def stderrFilename: String = "stderr"
def scriptFilename: String = "script"
def jobKey: JobKey
- lazy val callRoot = callPathBuilder(workflowRoot, jobKey)
+ lazy val callRoot = callPathBuilder(workflowPaths.workflowRoot, jobKey)
lazy val callExecutionRoot = callRoot
lazy val stdout = callExecutionRoot.resolve(stdoutFilename)
lazy val stderr = callExecutionRoot.resolve(stderrFilename)
diff --git a/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala b/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala
index 9524e8231..2febef209 100644
--- a/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala
+++ b/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala
@@ -4,15 +4,21 @@ import com.typesafe.config.Config
import cromwell.backend.{BackendJobDescriptorKey, BackendWorkflowDescriptor}
import cromwell.core.path.{Path, PathBuilder}
-class JobPathsWithDocker(val jobKey: BackendJobDescriptorKey,
- workflowDescriptor: BackendWorkflowDescriptor,
- config: Config,
- pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) extends WorkflowPathsWithDocker(
- workflowDescriptor, config, pathBuilders) with JobPaths {
+object JobPathsWithDocker {
+ def apply(jobKey: BackendJobDescriptorKey,
+ workflowDescriptor: BackendWorkflowDescriptor,
+ config: Config,
+ pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) = {
+ val workflowPaths = new WorkflowPathsWithDocker(workflowDescriptor, config, pathBuilders)
+ new JobPathsWithDocker(workflowPaths, jobKey)
+ }
+}
+
+case class JobPathsWithDocker private[io] (override val workflowPaths: WorkflowPathsWithDocker, jobKey: BackendJobDescriptorKey) extends JobPaths {
import JobPaths._
-
+
override lazy val callExecutionRoot = { callRoot.resolve("execution") }
- val callDockerRoot = callPathBuilder(dockerWorkflowRoot, jobKey)
+ val callDockerRoot = callPathBuilder(workflowPaths.dockerWorkflowRoot, jobKey)
val callExecutionDockerRoot = callDockerRoot.resolve("execution")
val callInputsRoot = callRoot.resolve("inputs")
@@ -30,7 +36,7 @@ class JobPathsWithDocker(val jobKey: BackendJobDescriptorKey,
*
* TODO: this assumes that p.startsWith(localExecutionRoot)
*/
- val subpath = p.subpath(executionRoot.getNameCount, p.getNameCount)
+ val subpath = p.subpath(workflowPaths.executionRoot.getNameCount, p.getNameCount)
WorkflowPathsWithDocker.DockerRoot.resolve(subpath)
}
}
diff --git a/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala b/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala
index 8a784a1ae..fc7c1a960 100644
--- a/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala
+++ b/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala
@@ -45,5 +45,13 @@ trait WorkflowPaths extends PathFactory {
* @param jobWorkflowDescriptor The workflow descriptor for the job.
* @return The paths for the job.
*/
- def toJobPaths(jobKey: BackendJobDescriptorKey, jobWorkflowDescriptor: BackendWorkflowDescriptor): JobPaths
+ def toJobPaths(jobKey: BackendJobDescriptorKey, jobWorkflowDescriptor: BackendWorkflowDescriptor): JobPaths = {
+ // If the descriptors are the same, no need to create a new WorkflowPaths
+ if (workflowDescriptor == jobWorkflowDescriptor) toJobPaths(this, jobKey)
+ else toJobPaths(withDescriptor(jobWorkflowDescriptor), jobKey)
+ }
+
+ protected def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): JobPaths
+
+ protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths
}
diff --git a/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala b/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala
index 4e7811feb..1809283b4 100644
--- a/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala
+++ b/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala
@@ -8,11 +8,12 @@ object WorkflowPathsWithDocker {
val DockerRoot: Path = DefaultPathBuilder.get("/cromwell-executions")
}
-class WorkflowPathsWithDocker(val workflowDescriptor: BackendWorkflowDescriptor, val config: Config, val pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) extends WorkflowPaths {
+final case class WorkflowPathsWithDocker(workflowDescriptor: BackendWorkflowDescriptor, config: Config, pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) extends WorkflowPaths {
val dockerWorkflowRoot: Path = workflowPathBuilder(WorkflowPathsWithDocker.DockerRoot)
- override def toJobPaths(jobKey: BackendJobDescriptorKey,
- jobWorkflowDescriptor: BackendWorkflowDescriptor): JobPathsWithDocker = {
- new JobPathsWithDocker(jobKey, jobWorkflowDescriptor, config, pathBuilders)
+ override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): JobPathsWithDocker = {
+ new JobPathsWithDocker(workflowPaths.asInstanceOf[WorkflowPathsWithDocker], jobKey)
}
+
+ override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor)
}
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala
index 1446ce92b..0d572de10 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala
@@ -19,8 +19,8 @@ import cromwell.services.keyvalue.KvClient
import cromwell.services.metadata.CallMetadataKeys
import lenthall.util.TryUtil
import net.ceedubs.ficus.Ficus._
-import wdl4s._
-import wdl4s.values.{WdlFile, WdlGlobFile, WdlSingleFile, WdlValue}
+import wdl4s.wdl._
+import wdl4s.wdl.values._
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future, Promise}
import scala.util.{Failure, Success, Try}
@@ -251,9 +251,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta
*
* @return the execution handle for the job.
*/
- def executeAsync()(implicit ec: ExecutionContext): Future[ExecutionHandle] = {
- Future.fromTry(Try(execute()))
- }
+ def executeAsync(): Future[ExecutionHandle] = Future.fromTry(Try(execute()))
/**
* Recovers the specified job id, or starts a new job. The default implementation simply calls execute().
@@ -269,9 +267,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta
* @param jobId The previously recorded job id.
* @return the execution handle for the job.
*/
- def recoverAsync(jobId: StandardAsyncJob)(implicit ec: ExecutionContext): Future[ExecutionHandle] = {
- Future.fromTry(Try(recover(jobId)))
- }
+ def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = Future.fromTry(Try(recover(jobId)))
/**
* Returns the run status for the job.
@@ -289,10 +285,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta
* @param handle The handle of the running job.
* @return The status of the job.
*/
- def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle)
- (implicit ec: ExecutionContext): Future[StandardAsyncRunStatus] = {
- Future.fromTry(Try(pollStatus(handle)))
- }
+ def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[StandardAsyncRunStatus] = Future.fromTry(Try(pollStatus(handle)))
/**
* Adds custom behavior invoked when polling fails due to some exception. By default adds nothing.
@@ -637,7 +630,9 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta
val stderrSizeAndReturnCode = for {
returnCodeAsString <- contentAsStringAsync(jobPaths.returnCode)
- stderrSize <- sizeAsync(jobPaths.stderr)
+ // Only check stderr size if we need to, otherwise this results in a lot of unnecessary I/O that
+ // may fail due to race conditions on quickly-executing jobs.
+ stderrSize <- if (failOnStdErr) sizeAsync(jobPaths.stderr) else Future.successful(0L)
} yield (stderrSize, returnCodeAsString)
stderrSizeAndReturnCode flatMap {
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala b/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala
index cb76541a1..c6c0b5183 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala
@@ -8,7 +8,7 @@ import cromwell.backend.validation.{RuntimeAttributesValidation, ValidatedRuntim
import cromwell.core.logging.JobLogging
import cromwell.core.path.Path
import cromwell.services.metadata.CallMetadataKeys
-import wdl4s.TaskCall
+import wdl4s.wdl.WdlTaskCall
import scala.util.Try
@@ -36,7 +36,7 @@ trait StandardCachingActorHelper extends JobCachingActorHelper {
def serviceRegistryActor: ActorRef
// So... JobPaths doesn't extend WorkflowPaths, but does contain a self-type
- lazy val workflowPaths: WorkflowPaths = jobPaths.asInstanceOf[WorkflowPaths]
+ lazy val workflowPaths: WorkflowPaths = jobPaths.workflowPaths
def getPath(str: String): Try[Path] = workflowPaths.getPath(str)
@@ -46,7 +46,7 @@ trait StandardCachingActorHelper extends JobCachingActorHelper {
*/
lazy val workflowDescriptor: BackendWorkflowDescriptor = jobDescriptor.workflowDescriptor
- lazy val call: TaskCall = jobDescriptor.key.call
+ lazy val call: WdlTaskCall = jobDescriptor.key.call
lazy val standardInitializationData: StandardInitializationData = BackendInitializationData.
as[StandardInitializationData](backendInitializationDataOption)
@@ -79,7 +79,7 @@ trait StandardCachingActorHelper extends JobCachingActorHelper {
}
/**
- * Returns any custom medatata for the backend.
+ * Returns any custom metadata for the backend.
*
* @return any custom metadata for the backend.
*/
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala b/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala
index 3b29b796d..010240319 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala
@@ -4,8 +4,8 @@ import cromwell.backend.io.GlobFunctions
import cromwell.backend.wdl.{ReadLikeFunctions, WriteFunctions}
import cromwell.core.CallContext
import cromwell.core.path.{Path, PathBuilder}
-import wdl4s.expression.PureStandardLibraryFunctionsLike
-import wdl4s.values.{WdlFile, WdlValue}
+import wdl4s.wdl.expression.PureStandardLibraryFunctionsLike
+import wdl4s.wdl.values.{WdlFile, WdlValue}
import scala.util.{Success, Try}
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala
index 6acbe6cb7..09d80a77e 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala
@@ -6,14 +6,14 @@ import cromwell.backend.io.WorkflowPaths
import cromwell.core.CallOutputs
import cromwell.core.Dispatcher.IoDispatcher
import cromwell.core.path.{Path, PathCopier}
-import wdl4s.TaskCall
+import wdl4s.wdl.WdlTaskCall
import scala.concurrent.Future
trait StandardFinalizationActorParams {
def workflowDescriptor: BackendWorkflowDescriptor
- def calls: Set[TaskCall]
+ def calls: Set[WdlTaskCall]
def jobExecutionMap: JobExecutionMap
@@ -27,7 +27,7 @@ trait StandardFinalizationActorParams {
case class DefaultStandardFinalizationActorParams
(
workflowDescriptor: BackendWorkflowDescriptor,
- calls: Set[TaskCall],
+ calls: Set[WdlTaskCall],
jobExecutionMap: JobExecutionMap,
workflowOutputs: CallOutputs,
initializationDataOption: Option[BackendInitializationData],
@@ -45,7 +45,7 @@ class StandardFinalizationActor(val standardParams: StandardFinalizationActorPar
extends BackendWorkflowFinalizationActor {
override lazy val workflowDescriptor: BackendWorkflowDescriptor = standardParams.workflowDescriptor
- override lazy val calls: Set[TaskCall] = standardParams.calls
+ override lazy val calls: Set[WdlTaskCall] = standardParams.calls
lazy val initializationDataOption: Option[BackendInitializationData] = standardParams.initializationDataOption
lazy val jobExecutionMap: JobExecutionMap = standardParams.jobExecutionMap
lazy val workflowOutputs: CallOutputs = standardParams.workflowOutputs
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala
index d3389bcc7..c5f2c5ed8 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala
@@ -7,8 +7,8 @@ import cromwell.backend.wfs.WorkflowPathBuilder
import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor, BackendWorkflowInitializationActor}
import cromwell.core.WorkflowOptions
import cromwell.core.path.{DefaultPathBuilder, PathBuilder}
-import wdl4s.TaskCall
-import wdl4s.values.WdlValue
+import wdl4s.wdl.WdlTaskCall
+import wdl4s.wdl.values.WdlValue
import scala.concurrent.Future
import scala.util.Try
@@ -16,7 +16,7 @@ import scala.util.Try
trait StandardInitializationActorParams {
def workflowDescriptor: BackendWorkflowDescriptor
- def calls: Set[TaskCall]
+ def calls: Set[WdlTaskCall]
def serviceRegistryActor: ActorRef
@@ -27,9 +27,10 @@ case class DefaultInitializationActorParams
(
workflowDescriptor: BackendWorkflowDescriptor,
ioActor: ActorRef,
- calls: Set[TaskCall],
+ calls: Set[WdlTaskCall],
serviceRegistryActor: ActorRef,
- configurationDescriptor: BackendConfigurationDescriptor
+ configurationDescriptor: BackendConfigurationDescriptor,
+ restarting: Boolean
) extends StandardInitializationActorParams
/**
@@ -44,21 +45,21 @@ class StandardInitializationActor(val standardParams: StandardInitializationActo
override lazy val serviceRegistryActor: ActorRef = standardParams.serviceRegistryActor
- override lazy val calls: Set[TaskCall] = standardParams.calls
+ override lazy val calls: Set[WdlTaskCall] = standardParams.calls
override def beforeAll(): Future[Option[BackendInitializationData]] = {
- Future.fromTry(Try(Option(initializationData)))
+ initializationData map Option.apply
}
- lazy val initializationData: StandardInitializationData =
- new StandardInitializationData(workflowPaths, runtimeAttributesBuilder, classOf[StandardExpressionFunctions])
+ lazy val initializationData: Future[StandardInitializationData] =
+ workflowPaths map { new StandardInitializationData(_, runtimeAttributesBuilder, classOf[StandardExpressionFunctions]) }
lazy val expressionFunctions: Class[_ <: StandardExpressionFunctions] = classOf[StandardExpressionFunctions]
- lazy val pathBuilders: List[PathBuilder] = List(DefaultPathBuilder)
+ lazy val pathBuilders: Future[List[PathBuilder]] = Future.successful(List(DefaultPathBuilder))
- lazy val workflowPaths: WorkflowPaths =
- WorkflowPathBuilder.workflowPaths(configurationDescriptor, workflowDescriptor, pathBuilders)
+ lazy val workflowPaths: Future[WorkflowPaths] =
+ pathBuilders map { WorkflowPathBuilder.workflowPaths(configurationDescriptor, workflowDescriptor, _) }
/**
* Returns the runtime attribute builder for this backend.
@@ -68,7 +69,7 @@ class StandardInitializationActor(val standardParams: StandardInitializationActo
* @return runtime attributes builder with possible custom validations
*/
def runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder =
- StandardValidatedRuntimeAttributesBuilder.default
+ StandardValidatedRuntimeAttributesBuilder.default(configurationDescriptor.backendRuntimeConfig)
override protected lazy val runtimeAttributeValidators: Map[String, (Option[WdlValue]) => Boolean] = {
runtimeAttributesBuilder.validatorMap
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala
index e4dae0e0b..734377dd5 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala
@@ -15,7 +15,7 @@ class StandardInitializationData
standardExpressionFunctionsClass.getConstructor(classOf[StandardExpressionFunctionsParams])
def expressionFunctions(jobPaths: JobPaths): StandardExpressionFunctions = {
- val pathBuilders = jobPaths.asInstanceOf[WorkflowPaths].pathBuilders
+ val pathBuilders = jobPaths.workflowPaths.pathBuilders
val callContext = jobPaths.callContext
val standardParams = DefaultStandardExpressionFunctionsParams(pathBuilders, callContext)
standardExpressionFunctionsConstructor.newInstance(standardParams)
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala
index 3c14def64..9c692c9a2 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala
@@ -7,8 +7,8 @@ import cromwell.backend.standard.callcaching._
import cromwell.core.Dispatcher.BackendDispatcher
import cromwell.core.path.Path
import cromwell.core.{CallOutputs, Dispatcher}
-import wdl4s.TaskCall
-import wdl4s.expression.WdlStandardLibraryFunctions
+import wdl4s.wdl.WdlTaskCall
+import wdl4s.wdl.expression.WdlStandardLibraryFunctions
/**
* May be extended for using the standard sync/async backend pattern.
@@ -73,16 +73,16 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory {
*/
lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] = Option(classOf[StandardFinalizationActor])
- override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall],
- serviceRegistryActor: ActorRef): Option[Props] = {
- val params = workflowInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor)
+ override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall],
+ serviceRegistryActor: ActorRef, restart: Boolean): Option[Props] = {
+ val params = workflowInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, restart)
val props = Props(initializationActorClass, params).withDispatcher(Dispatcher.BackendDispatcher)
Option(props)
}
- def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall],
- serviceRegistryActor: ActorRef): StandardInitializationActorParams = {
- DefaultInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, configurationDescriptor)
+ def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall],
+ serviceRegistryActor: ActorRef, restarting: Boolean): StandardInitializationActorParams = {
+ DefaultInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, configurationDescriptor, restarting)
}
override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor,
@@ -152,7 +152,7 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory {
jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, configurationDescriptor)
}
- override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall],
+ override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall],
jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs,
initializationData: Option[BackendInitializationData]): Option[Props] = {
finalizationActorClassOption map { finalizationActorClass =>
@@ -162,7 +162,7 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory {
}
}
- def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall],
+ def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall],
jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs,
initializationDataOption: Option[BackendInitializationData]):
StandardFinalizationActorParams = {
@@ -181,12 +181,18 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory {
override def getExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, backendConfig: Config,
initializationData: Option[BackendInitializationData]): Path = {
- initializationData.get.asInstanceOf[StandardInitializationData].workflowPaths.executionRoot
+ initializationData match {
+ case Some(data) => data.asInstanceOf[StandardInitializationData].workflowPaths.executionRoot
+ case None => super.getExecutionRootPath(workflowDescriptor, backendConfig, initializationData)
+ }
}
override def getWorkflowExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, backendConfig: Config,
initializationData: Option[BackendInitializationData]): Path = {
- initializationData.get.asInstanceOf[StandardInitializationData].workflowPaths.workflowRoot
+ initializationData match {
+ case Some(data) => data.asInstanceOf[StandardInitializationData].workflowPaths.workflowRoot
+ case None => super.getWorkflowExecutionRootPath(workflowDescriptor, backendConfig, initializationData)
+ }
}
override def runtimeAttributeDefinitions(initializationDataOption: Option[BackendInitializationData]):
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala
index c3cb05143..180f4d345 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala
@@ -10,7 +10,6 @@ import cromwell.core.Dispatcher
import cromwell.services.keyvalue.KeyValueServiceActor._
import scala.concurrent.{Future, Promise}
-import scala.language.existentials
trait StandardSyncExecutionActorParams extends StandardJobExecutionActorParams {
/** The class for creating an async backend. */
diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala b/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala
index 20a649e00..37f037b57 100644
--- a/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala
@@ -1,5 +1,6 @@
package cromwell.backend.standard
+import com.typesafe.config.Config
import cromwell.backend.validation._
/**
@@ -29,8 +30,8 @@ object StandardValidatedRuntimeAttributesBuilder {
*
* Additional runtime attribute validations may be added by calling `withValidation` on the default.
*/
- lazy val default: StandardValidatedRuntimeAttributesBuilder = {
- val required = Seq(ContinueOnReturnCodeValidation.default, FailOnStderrValidation.default)
+ def default(backendRuntimeConfig: Option[Config]): StandardValidatedRuntimeAttributesBuilder = {
+ val required = Seq(ContinueOnReturnCodeValidation.default(backendRuntimeConfig), FailOnStderrValidation.default(backendRuntimeConfig))
val custom = Seq.empty
StandardValidatedRuntimeAttributesBuilderImpl(custom, required)
}
diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala
index baaa9a78d..e79f2e863 100644
--- a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala
@@ -19,9 +19,8 @@ import cromwell.core.io._
import cromwell.core.logging.JobLogging
import cromwell.core.path.{Path, PathCopier}
import cromwell.core.simpleton.{WdlValueBuilder, WdlValueSimpleton}
-import wdl4s.values.WdlFile
+import wdl4s.wdl.values.WdlFile
-import scala.language.postfixOps
import scala.util.{Failure, Success, Try}
/**
@@ -55,15 +54,52 @@ object StandardCacheHitCopyingActor {
sealed trait StandardCacheHitCopyingActorState
case object Idle extends StandardCacheHitCopyingActorState
- case object WaitingForCopyResponses extends StandardCacheHitCopyingActorState
+ case object WaitingForIoResponses extends StandardCacheHitCopyingActorState
+ case object FailedState extends StandardCacheHitCopyingActorState
+ case object WaitingForOnSuccessResponse extends StandardCacheHitCopyingActorState
- case class StandardCacheHitCopyingActorData(copyCommandsToWaitFor: Set[IoCopyCommand],
- copiedJobOutputs: CallOutputs,
- copiedDetritus: DetritusMap,
+ // TODO: this mechanism here is very close to the one in CallCacheHashingJobActorData
+ // Abstracting it might be valuable
+ /**
+ * The head subset of commandsToWaitFor is sent to the IoActor as a bulk.
+ * When a response comes back, the corresponding command is removed from the head set.
+ * When the head set is empty, it is removed and the next subset is sent, until there is no subset left.
+ * If at any point a response comes back as a failure. Other responses for the current set will be awaited for
+ * but subsequent sets will not be sent and the actor will send back a failure message.
+ */
+ case class StandardCacheHitCopyingActorData(commandsToWaitFor: List[Set[IoCommand[_]]],
+ newJobOutputs: CallOutputs,
+ newDetritus: DetritusMap,
returnCode: Option[Int]
) {
- def remove(copyCommand: IoCopyCommand) = copy(copyCommandsToWaitFor = copyCommandsToWaitFor filterNot { _ == copyCommand })
+
+ /**
+ * Removes the command from commandsToWaitFor
+ * returns a pair of the new state data and CommandSetState giving information about what to do next
+ */
+ def commandComplete(command: IoCommand[_]): (StandardCacheHitCopyingActorData, CommandSetState) = commandsToWaitFor match {
+ // If everything was already done send back current data and AllCommandsDone
+ case Nil => (this, AllCommandsDone)
+ case lastSubset :: Nil =>
+ val updatedSubset = lastSubset - command
+ // If the last subset is now empty, we're done
+ if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = List.empty), AllCommandsDone)
+ // otherwise update commandsToWaitFor and keep waiting
+ else (this.copy(commandsToWaitFor = List(updatedSubset)), StillWaiting)
+ case currentSubset :: otherSubsets =>
+ val updatedSubset = currentSubset - command
+ // This subset is done but there are other ones, remove it from commandsToWaitFor and return the next round of commands
+ if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = otherSubsets), NextSubSet(otherSubsets.head))
+ // otherwise update the head susbset and keep waiting
+ else (this.copy(commandsToWaitFor = List(updatedSubset) ++ otherSubsets), StillWaiting)
+ }
}
+
+ // Internal ADT to keep track of command set states
+ private[callcaching] sealed trait CommandSetState
+ private[callcaching] case object StillWaiting extends CommandSetState
+ private[callcaching] case object AllCommandsDone extends CommandSetState
+ private[callcaching] case class NextSubSet(commands: Set[IoCommand[_]]) extends CommandSetState
}
class DefaultStandardCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams) with DefaultIoCommandBuilder
@@ -92,37 +128,83 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit
when(Idle) {
case Event(CopyOutputsCommand(simpletons, jobDetritus, returnCode), None) =>
- val sourceCallRootPath = lookupSourceCallRootPath(jobDetritus)
-
- val processed = for {
- (callOutputs, simpletonCopyPairs) <- processSimpletons(simpletons, sourceCallRootPath)
- (destinationDetritus, detritusCopyPairs) <- processDetritus(jobDetritus)
- } yield (callOutputs, destinationDetritus, simpletonCopyPairs ++ detritusCopyPairs)
-
- processed match {
- case Success((callOutputs, destinationDetritus, allCopyPairs)) =>
- duplicate(allCopyPairs) match {
- case Some(Success(_)) => succeedAndStop(returnCode, callOutputs, destinationDetritus)
- case Some(Failure(failure)) => failAndStop(failure)
- case None =>
- val allCopyCommands = allCopyPairs map { case (source, destination) => copyCommand(source, destination, overwrite = true) }
-
- allCopyCommands foreach { sendIoCommand(_) }
- goto(WaitingForCopyResponses) using Option(StandardCacheHitCopyingActorData(allCopyCommands, callOutputs, destinationDetritus, returnCode))
+ // Try to make a Path of the callRootPath from the detritus
+ lookupSourceCallRootPath(jobDetritus) match {
+ case Success(sourceCallRootPath) =>
+
+ // process simpletons and detritus to get updated paths and corresponding IoCommands
+ val processed = for {
+ (destinationCallOutputs, simpletonIoCommands) <- processSimpletons(simpletons, sourceCallRootPath)
+ (destinationDetritus, detritusIoCommands) <- processDetritus(jobDetritus)
+ } yield (destinationCallOutputs, destinationDetritus, simpletonIoCommands ++ detritusIoCommands)
+
+ processed match {
+ case Success((destinationCallOutputs, destinationDetritus, detritusAndOutputsIoCommands)) =>
+ duplicate(ioCommandsToCopyPairs(detritusAndOutputsIoCommands)) match {
+ // Use the duplicate override if exists
+ case Some(Success(_)) => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus)
+ case Some(Failure(failure)) => failAndStop(failure)
+ // Otherwise send the first round of IoCommands (file outputs and detritus) if any
+ case None if detritusAndOutputsIoCommands.nonEmpty =>
+ detritusAndOutputsIoCommands foreach sendIoCommand
+
+ // Add potential additional commands to the list
+ val additionalCommands = additionalIoCommands(sourceCallRootPath, simpletons, destinationCallOutputs, jobDetritus, destinationDetritus)
+ val allCommands = List(detritusAndOutputsIoCommands) ++ additionalCommands
+
+ goto(WaitingForIoResponses) using Option(StandardCacheHitCopyingActorData(allCommands, destinationCallOutputs, destinationDetritus, returnCode))
+ case _ => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus)
+ }
+
+ case Failure(failure) => failAndStop(failure)
}
case Failure(failure) => failAndStop(failure)
}
}
- when(WaitingForCopyResponses) {
- case Event(IoSuccess(copyCommand: IoCopyCommand, _), Some(data)) =>
- val newData = data.remove(copyCommand)
- if (newData.copyCommandsToWaitFor.isEmpty) succeedAndStop(data.returnCode, data.copiedJobOutputs, data.copiedDetritus)
- else stay() using Option(newData)
- case Event(IoFailure(copyCommand: IoCopyCommand, failure), _) =>
- failAndStop(failure)
+ when(WaitingForIoResponses) {
+ case Event(IoSuccess(command: IoCommand[_], _), Some(data)) =>
+ val (newData, commandState) = data.commandComplete(command)
+
+ commandState match {
+ case StillWaiting => stay() using Option(newData)
+ case AllCommandsDone => succeedAndStop(newData.returnCode, newData.newJobOutputs, newData.newDetritus)
+ case NextSubSet(commands) =>
+ commands foreach sendIoCommand
+ stay() using Option(newData)
+ }
+ case Event(IoFailure(command: IoCommand[_], failure), Some(data)) =>
+ // any failure is fatal
+ context.parent ! JobFailedNonRetryableResponse(jobDescriptor.key, failure, None)
+
+ val (newData, commandState) = data.commandComplete(command)
+
+ commandState match {
+ // If we're still waiting for some responses, go to failed state
+ case StillWaiting => goto(FailedState) using Option(newData)
+ // Otherwise we're done
+ case _ =>
+ context stop self
+ stay()
+ }
+ // Should not be possible
+ case Event(IoFailure(_: IoCommand[_], failure), None) => failAndStop(failure)
+ }
+
+ when(FailedState) {
+ // At this point success or failure doesn't matter, we've already failed this hit
+ case Event(response: IoAck[_], Some(data)) =>
+ val (newData, commandState) = data.commandComplete(response.command)
+ commandState match {
+ // If we're still waiting for some responses, stay
+ case StillWaiting => stay() using Option(newData)
+ // Otherwise we're done
+ case _ =>
+ context stop self
+ stay()
+ }
}
whenUnhandled {
@@ -136,7 +218,7 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit
def succeedAndStop(returnCode: Option[Int], copiedJobOutputs: CallOutputs, detritusMap: DetritusMap) = {
import cromwell.services.metadata.MetadataService.implicits.MetadataAutoPutter
serviceRegistryActor.putMetadata(jobDescriptor.workflowDescriptor.id, Option(jobDescriptor.key), startMetadataKeyValues)
- context.parent ! JobSucceededResponse(jobDescriptor.key, returnCode, copiedJobOutputs, Option(detritusMap), Seq.empty)
+ context.parent ! JobSucceededResponse(jobDescriptor.key, returnCode, copiedJobOutputs, Option(detritusMap), Seq.empty, None)
context stop self
stay()
}
@@ -154,57 +236,79 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit
stay()
}
- private def lookupSourceCallRootPath(sourceJobDetritusFiles: Map[String, String]): Path = {
- sourceJobDetritusFiles.get(JobPaths.CallRootPathKey).map(getPath).get recover {
- case failure =>
- throw new RuntimeException(s"${JobPaths.CallRootPathKey} wasn't found for call ${jobDescriptor.call.fullyQualifiedName}", failure)
- } get
+ protected def lookupSourceCallRootPath(sourceJobDetritusFiles: Map[String, String]): Try[Path] = {
+ sourceJobDetritusFiles.get(JobPaths.CallRootPathKey) match {
+ case Some(source) => getPath(source)
+ case None => Failure(new RuntimeException(s"${JobPaths.CallRootPathKey} wasn't found for call ${jobDescriptor.call.fullyQualifiedName}"))
+ }
+ }
+
+ private def ioCommandsToCopyPairs(commands: Set[IoCommand[_]]): Set[PathPair] = commands collect {
+ case copyCommand: IoCopyCommand => copyCommand.source -> copyCommand.destination
}
/**
* Returns a pair of the list of simpletons with copied paths, and copy commands necessary to perform those copies.
*/
- private def processSimpletons(wdlValueSimpletons: Seq[WdlValueSimpleton], sourceCallRootPath: Path): Try[(CallOutputs, Set[PathPair])] = Try {
- val (destinationSimpletons, ioCommands): (List[WdlValueSimpleton], Set[PathPair]) = wdlValueSimpletons.toList.foldMap({
+ protected def processSimpletons(wdlValueSimpletons: Seq[WdlValueSimpleton], sourceCallRootPath: Path): Try[(CallOutputs, Set[IoCommand[_]])] = Try {
+ val (destinationSimpletons, ioCommands): (List[WdlValueSimpleton], Set[IoCommand[_]]) = wdlValueSimpletons.toList.foldMap({
case WdlValueSimpleton(key, wdlFile: WdlFile) =>
val sourcePath = getPath(wdlFile.value).get
val destinationPath = PathCopier.getDestinationFilePath(sourceCallRootPath, sourcePath, destinationCallRootPath)
val destinationSimpleton = WdlValueSimpleton(key, WdlFile(destinationPath.pathAsString))
- List(destinationSimpleton) -> Set(sourcePath -> destinationPath)
- case nonFileSimpleton => (List(nonFileSimpleton), Set.empty[PathPair])
+ List(destinationSimpleton) -> Set(copyCommand(sourcePath, destinationPath, overwrite = true))
+ case nonFileSimpleton => (List(nonFileSimpleton), Set.empty[IoCommand[_]])
})
(WdlValueBuilder.toJobOutputs(jobDescriptor.call.task.outputs, destinationSimpletons), ioCommands)
}
/**
- * Returns a pair of the detritus with copied paths, and copy commands necessary to perform those copies.
+ * Returns the file (and ONLY the file detritus) intersection between the cache hit and this call.
*/
- private def processDetritus(sourceJobDetritusFiles: Map[String, String]): Try[(Map[String, Path], Set[PathPair])] = Try {
+ protected final def detritusFileKeys(sourceJobDetritusFiles: Map[String, String]) = {
val sourceKeys = sourceJobDetritusFiles.keySet
val destinationKeys = destinationJobDetritusPaths.keySet
- val fileKeys = sourceKeys.intersect(destinationKeys).filterNot(_ == JobPaths.CallRootPathKey)
+ sourceKeys.intersect(destinationKeys).filterNot(_ == JobPaths.CallRootPathKey)
+ }
+
+ /**
+ * Returns a pair of the detritus with copied paths, and copy commands necessary to perform those copies.
+ */
+ protected def processDetritus(sourceJobDetritusFiles: Map[String, String]): Try[(Map[String, Path], Set[IoCommand[_]])] = Try {
+ val fileKeys = detritusFileKeys(sourceJobDetritusFiles)
- val zero = (Map.empty[String, Path], Set.empty[PathPair])
+ val zero = (Map.empty[String, Path], Set.empty[IoCommand[_]])
val (destinationDetritus, ioCommands) = fileKeys.foldLeft(zero)({
case ((detrituses, commands), detritus) =>
val sourcePath = getPath(sourceJobDetritusFiles(detritus)).get
val destinationPath = destinationJobDetritusPaths(detritus)
-
+
val newDetrituses = detrituses + (detritus -> destinationPath)
-
- (newDetrituses, commands + ((sourcePath, destinationPath)))
+
+ (newDetrituses, commands + copyCommand(sourcePath, destinationPath, overwrite = true))
})
-
+
(destinationDetritus + (JobPaths.CallRootPathKey -> destinationCallRootPath), ioCommands)
}
+ /**
+ * Additional IoCommands that will be sent after (and only after) output and detritus commands complete successfully.
+ * See StandardCacheHitCopyingActorData
+ */
+ protected def additionalIoCommands(sourceCallRootPath: Path,
+ originalSimpletons: Seq[WdlValueSimpleton],
+ newOutputs: CallOutputs,
+ originalDetritus: Map[String, String],
+ newDetritus: Map[String, Path]): List[Set[IoCommand[_]]] = List.empty
+
override protected def onTimeout(message: Any, to: ActorRef): Unit = {
val exceptionMessage = message match {
case copyCommand: IoCopyCommand => s"The Cache hit copying actor timed out waiting for a response to copy ${copyCommand.source.pathAsString} to ${copyCommand.destination.pathAsString}"
+ case touchCommand: IoTouchCommand => s"The Cache hit copying actor timed out waiting for a response to touch ${touchCommand.file.pathAsString}"
case other => s"The Cache hit copying actor timed out waiting for an unknown I/O operation: $other"
}
diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala
index 417cb1b74..bcb08d333 100644
--- a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala
+++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala
@@ -9,7 +9,7 @@ import cromwell.core.JobKey
import cromwell.core.callcaching._
import cromwell.core.io._
import cromwell.core.logging.JobLogging
-import wdl4s.values.WdlFile
+import wdl4s.wdl.values.WdlFile
import scala.util.{Failure, Success, Try}
@@ -44,8 +44,7 @@ object StandardFileHashingActor {
case class FileHashingFunction(work: (SingleFileHashRequest, LoggingAdapter) => Try[String])
sealed trait BackendSpecificHasherCommand { def jobKey: JobKey }
- case class SingleFileHashRequest(jobKey: JobKey, hashKey: HashKey, file: WdlFile, initializationData: Option[BackendInitializationData]) extends BackendSpecificHasherCommand
- case class HashesNoLongerRequired(jobKey: JobKey) extends BackendSpecificHasherCommand
+ final case class SingleFileHashRequest(jobKey: JobKey, hashKey: HashKey, file: WdlFile, initializationData: Option[BackendInitializationData]) extends BackendSpecificHasherCommand
sealed trait BackendSpecificHasherResponse extends SuccessfulHashResultMessage
case class FileHashResponse(hashResult: HashResult) extends BackendSpecificHasherResponse { override def hashes = Set(hashResult) }
@@ -68,17 +67,17 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor
customHashStrategy(fileRequest) match {
case Some(Success(result)) => context.parent ! FileHashResponse(HashResult(fileRequest.hashKey, HashValue(result)))
- case Some(Failure(failure)) => context.parent ! HashingFailedMessage(fileRequest.hashKey, failure)
+ case Some(Failure(failure)) => context.parent ! HashingFailedMessage(fileRequest.file.value, failure)
case None => asyncHashing(fileRequest, replyTo)
}
// Hash Success
- case (fileHashRequest: SingleFileHashRequest, response @ IoSuccess(_, result: String)) =>
+ case (fileHashRequest: SingleFileHashRequest, IoSuccess(_, result: String)) =>
context.parent ! FileHashResponse(HashResult(fileHashRequest.hashKey, HashValue(result)))
// Hash Failure
- case (fileHashRequest: SingleFileHashRequest, response @ IoFailure(_, failure: Throwable)) =>
- context.parent ! HashingFailedMessage(fileHashRequest.hashKey, failure)
+ case (fileHashRequest: SingleFileHashRequest, IoFailure(_, failure: Throwable)) =>
+ context.parent ! HashingFailedMessage(fileHashRequest.file.value, failure)
case other =>
log.warning(s"Async File hashing actor received unexpected message: $other")
@@ -86,7 +85,7 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor
def asyncHashing(fileRequest: SingleFileHashRequest, replyTo: ActorRef) = getPath(fileRequest.file.value) match {
case Success(gcsPath) => sendIoCommandWithContext(hashCommand(gcsPath), fileRequest)
- case Failure(failure) => replyTo ! HashingFailedMessage(fileRequest.hashKey, failure)
+ case Failure(failure) => replyTo ! HashingFailedMessage(fileRequest.file.value, failure)
}
override def receive: Receive = ioReceive orElse fileHashingReceive
diff --git a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala
index 0639b4f42..ed2bcfad0 100644
--- a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala
@@ -1,6 +1,6 @@
package cromwell.backend.validation
-import wdl4s.types.{WdlArrayType, WdlBooleanType, WdlIntegerType, WdlType}
+import wdl4s.wdl.types._
object ContinueOnReturnCode {
val validWdlTypes = Set[WdlType](WdlArrayType(WdlIntegerType), WdlBooleanType, WdlIntegerType)
diff --git a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala
index d0ca49339..27ae8170e 100644
--- a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala
@@ -4,10 +4,11 @@ import cats.data.Validated.{Invalid, Valid}
import cats.instances.list._
import cats.syntax.traverse._
import cats.syntax.validated._
+import com.typesafe.config.Config
import cromwell.backend.validation.RuntimeAttributesValidation._
import lenthall.validation.ErrorOr._
-import wdl4s.types.{WdlArrayType, WdlIntegerType, WdlStringType, WdlType}
-import wdl4s.values.{WdlArray, WdlBoolean, WdlInteger, WdlString, WdlValue}
+import wdl4s.wdl.types._
+import wdl4s.wdl.values._
import scala.util.Try
@@ -17,15 +18,16 @@ import scala.util.Try
*
* `instance` returns an validation that errors when no attribute is specified.
*
- * The default returns a `ContinueOnReturnCodeSet(0)` when no attribute is specified.
+ * `configDefaultWdlValue` returns the value of the attribute as specified by the
+ * reference.conf file, coerced into a WdlValue.
*
- * `optional` can be used return the validated value as an `Option`, wrapped in a `Some`, if present, or `None` if not
- * found.
+ * `default` a validation with the default value specified by the reference.conf file.
*/
object ContinueOnReturnCodeValidation {
lazy val instance: RuntimeAttributesValidation[ContinueOnReturnCode] = new ContinueOnReturnCodeValidation
- lazy val default: RuntimeAttributesValidation[ContinueOnReturnCode] = instance.withDefault(WdlInteger(0))
- lazy val optional: OptionalRuntimeAttributesValidation[ContinueOnReturnCode] = default.optional
+ def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[ContinueOnReturnCode] = instance.withDefault(
+ configDefaultWdlValue(runtimeConfig) getOrElse WdlInteger(0))
+ def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WdlValue] = instance.configDefaultWdlValue(runtimeConfig)
}
class ContinueOnReturnCodeValidation extends RuntimeAttributesValidation[ContinueOnReturnCode] {
diff --git a/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala b/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala
index 5a5d09730..60950ed00 100644
--- a/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala
@@ -1,24 +1,26 @@
package cromwell.backend.validation
import cats.syntax.validated._
+import com.typesafe.config.Config
import lenthall.validation.ErrorOr.ErrorOr
-import wdl4s.types.WdlIntegerType
-import wdl4s.values.{WdlInteger, WdlValue}
+import wdl4s.wdl.types.WdlIntegerType
+import wdl4s.wdl.values.{WdlInteger, WdlValue}
/**
* Validates the "cpu" runtime attribute an Integer greater than 0, returning the value as an `Int`.
*
* `instance` returns an validation that errors when no attribute is specified.
*
- * The default returns `1` when no attribute is specified.
+ * `default` a hardcoded default WdlValue for Cpu.
*
- * `optional` can be used return the validated value as an `Option`, wrapped in a `Some`, if present, or `None` if not
- * found.
+ * `configDefaultWdlValue` returns the value of the attribute as specified by the
+ * reference.conf file, coerced into a WdlValue.
*/
-object CpuValidation extends {
+object CpuValidation {
lazy val instance: RuntimeAttributesValidation[Int] = new CpuValidation
- lazy val default: RuntimeAttributesValidation[Int] = instance.withDefault(WdlInteger(1))
- lazy val optional: OptionalRuntimeAttributesValidation[Int] = default.optional
+ lazy val optional: OptionalRuntimeAttributesValidation[Int] = instance.optional
+ lazy val default: WdlValue = WdlInteger(1)
+ def configDefaultWdlValue(config: Option[Config]): Option[WdlValue] = instance.configDefaultWdlValue(config)
}
class CpuValidation extends IntRuntimeAttributesValidation(RuntimeAttributesKeys.CpuKey) {
diff --git a/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala b/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala
index f793d1397..788f7a7e6 100644
--- a/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala
@@ -2,7 +2,7 @@ package cromwell.backend.validation
import cats.syntax.validated._
import lenthall.validation.ErrorOr.ErrorOr
-import wdl4s.values.{WdlString, WdlValue}
+import wdl4s.wdl.values.{WdlString, WdlValue}
/**
* Validates the "docker" runtime attribute as a String, returning it as `String`.
diff --git a/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala b/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala
index 6a7ea6ee2..bf4f024c6 100644
--- a/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala
@@ -1,6 +1,7 @@
package cromwell.backend.validation
-import wdl4s.values.WdlBoolean
+import com.typesafe.config.Config
+import wdl4s.wdl.values.{WdlBoolean, WdlValue}
/**
* Validates the "failOnStderr" runtime attribute as a Boolean or a String 'true' or 'false', returning the value as a
@@ -8,12 +9,17 @@ import wdl4s.values.WdlBoolean
*
* `instance` returns an validation that errors when no attribute is specified.
*
- * The default returns `false` when no attribute is specified.
+ * `configDefaultWdlValue` returns the value of the attribute as specified by the
+ * reference.conf file, coerced into a WdlValue.
+ *
+ * `default` a validation with the default value specified by the reference.conf file.
*/
+
object FailOnStderrValidation {
lazy val instance: RuntimeAttributesValidation[Boolean] = new FailOnStderrValidation
- lazy val default: RuntimeAttributesValidation[Boolean] = instance.withDefault(WdlBoolean(false))
- lazy val optional: OptionalRuntimeAttributesValidation[Boolean] = default.optional
+ def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = instance.withDefault(
+ configDefaultWdlValue(runtimeConfig) getOrElse WdlBoolean(false))
+ def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WdlValue] = instance.configDefaultWdlValue(runtimeConfig)
}
class FailOnStderrValidation extends BooleanRuntimeAttributesValidation(RuntimeAttributesKeys.FailOnStderrKey) {
diff --git a/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala b/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala
index d493e357e..c2c97343a 100644
--- a/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala
@@ -1,11 +1,14 @@
package cromwell.backend.validation
import cats.syntax.validated._
+import com.typesafe.config.Config
import cromwell.backend.MemorySize
import lenthall.validation.ErrorOr._
import wdl4s.parser.MemoryUnit
-import wdl4s.types.{WdlIntegerType, WdlStringType}
-import wdl4s.values.{WdlInteger, WdlString, WdlValue}
+import wdl4s.wdl.types.{WdlIntegerType, WdlStringType}
+import wdl4s.wdl.values.{WdlInteger, WdlString, WdlValue}
+
+import scala.util.{Failure, Success}
/**
* Validates the "memory" runtime attribute as an Integer or String with format '8 GB', returning the value as a
@@ -13,61 +16,71 @@ import wdl4s.values.{WdlInteger, WdlString, WdlValue}
*
* `instance` returns an validation that errors when no attribute is specified.
*
- * There is no default, however `optional` can be used to validate the attribute and return the validated value as an
- * `Option`, wrapped in an `Some`, if present, or `None` if not found.
+ * `configDefaultWdlValue` returns the value of the attribute as specified by the
+ * reference.conf file, coerced into a WdlValue.
+ *
+ * `optional` can be used to return the validated value as an `Option`,
+ * wrapped in a `Some`, if present, or `None` if not found.
*
* `withDefaultMemory` can be used to create a memory validation that defaults to a particular memory size.
*/
object MemoryValidation {
- lazy val instance: RuntimeAttributesValidation[MemorySize] = new MemoryValidation
- lazy val optional: OptionalRuntimeAttributesValidation[MemorySize] = instance.optional
-
- def withDefaultMemory(memorySize: MemorySize): RuntimeAttributesValidation[MemorySize] =
- instance.withDefault(WdlInteger(memorySize.bytes.toInt))
+ def instance(attributeName: String = RuntimeAttributesKeys.MemoryKey): RuntimeAttributesValidation[MemorySize] =
+ new MemoryValidation(attributeName)
+ def optional(attributeName: String = RuntimeAttributesKeys.MemoryKey): OptionalRuntimeAttributesValidation[MemorySize] =
+ instance(attributeName).optional
+ def configDefaultString(attributeName: String = RuntimeAttributesKeys.MemoryKey, config: Option[Config]): Option[String] =
+ instance(attributeName).configDefaultValue(config)
+ def withDefaultMemory(attributeName: String = RuntimeAttributesKeys.MemoryKey, memorySize: String): RuntimeAttributesValidation[MemorySize] = {
+ MemorySize.parse(memorySize) match {
+ case Success(memory) => instance(attributeName).withDefault(WdlInteger(memory.bytes.toInt))
+ case Failure(_) => instance(attributeName).withDefault(BadDefaultAttribute(WdlString(memorySize.toString)))
+ }
+ }
private[validation] val wrongAmountFormat =
- s"Expecting ${RuntimeAttributesKeys.MemoryKey} runtime attribute value greater than 0 but got %s"
+ "Expecting %s runtime attribute value greater than 0 but got %s"
private[validation] val wrongTypeFormat =
- s"Expecting ${RuntimeAttributesKeys.MemoryKey} runtime attribute to be an Integer or String with format '8 GB'." +
- s" Exception: %s"
+ "Expecting %s runtime attribute to be an Integer or String with format '8 GB'." +
+ " Exception: %s"
- private[validation] def validateMemoryString(wdlString: WdlString): ErrorOr[MemorySize] =
- validateMemoryString(wdlString.value)
+ private[validation] def validateMemoryString(attributeName: String, wdlString: WdlString): ErrorOr[MemorySize] =
+ validateMemoryString(attributeName, wdlString.value)
- private[validation] def validateMemoryString(value: String): ErrorOr[MemorySize] = {
+ private[validation] def validateMemoryString(attributeName: String, value: String): ErrorOr[MemorySize] = {
MemorySize.parse(value) match {
case scala.util.Success(memorySize: MemorySize) if memorySize.amount > 0 =>
memorySize.to(MemoryUnit.GB).validNel
case scala.util.Success(memorySize: MemorySize) =>
- wrongAmountFormat.format(memorySize.amount).invalidNel
+ wrongAmountFormat.format(attributeName, memorySize.amount).invalidNel
case scala.util.Failure(throwable) =>
- wrongTypeFormat.format(throwable.getMessage).invalidNel
+ wrongTypeFormat.format(attributeName, throwable.getMessage).invalidNel
}
}
- private[validation] def validateMemoryInteger(wdlInteger: WdlInteger): ErrorOr[MemorySize] =
- validateMemoryInteger(wdlInteger.value)
+ private[validation] def validateMemoryInteger(attributeName: String, wdlInteger: WdlInteger): ErrorOr[MemorySize] =
+ validateMemoryInteger(attributeName, wdlInteger.value)
- private[validation] def validateMemoryInteger(value: Int): ErrorOr[MemorySize] = {
+ private[validation] def validateMemoryInteger(attributeName: String, value: Int): ErrorOr[MemorySize] = {
if (value <= 0)
- wrongAmountFormat.format(value).invalidNel
+ wrongAmountFormat.format(attributeName, value).invalidNel
else
MemorySize(value.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB).validNel
}
}
-class MemoryValidation extends RuntimeAttributesValidation[MemorySize] {
+class MemoryValidation(attributeName: String = RuntimeAttributesKeys.MemoryKey) extends RuntimeAttributesValidation[MemorySize] {
import MemoryValidation._
- override def key = RuntimeAttributesKeys.MemoryKey
+ override def key = attributeName
override def coercion = Seq(WdlIntegerType, WdlStringType)
override protected def validateValue: PartialFunction[WdlValue, ErrorOr[MemorySize]] = {
- case WdlInteger(value) => MemoryValidation.validateMemoryInteger(value)
- case WdlString(value) => MemoryValidation.validateMemoryString(value)
+ case WdlInteger(value) => MemoryValidation.validateMemoryInteger(key, value)
+ case WdlString(value) => MemoryValidation.validateMemoryString(key, value)
}
- override def missingValueMessage: String = wrongTypeFormat.format("Not supported WDL type value")
+ override def missingValueMessage: String = wrongTypeFormat.format(key, "Not supported WDL type value")
}
diff --git a/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala
index aa07afcdc..477f9f9c7 100644
--- a/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala
@@ -2,8 +2,8 @@ package cromwell.backend.validation
import cats.syntax.validated._
import lenthall.validation.ErrorOr.ErrorOr
-import wdl4s.types._
-import wdl4s.values.{WdlBoolean, WdlFloat, WdlInteger, WdlPrimitive, WdlString, WdlValue}
+import wdl4s.wdl.types._
+import wdl4s.wdl.values._
/**
* Validates one of the wdl primitive types: Boolean, Float, Integer, or String. WdlFile is not supported.
diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala
index 30ffe1043..a111fe554 100644
--- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala
@@ -4,8 +4,8 @@ import cats.data.ValidatedNel
import cats.syntax.validated._
import cromwell.core.{EvaluatedRuntimeAttributes, OptionNotFoundException, WorkflowOptions}
import lenthall.util.TryUtil
-import wdl4s.types.WdlType
-import wdl4s.values.WdlValue
+import wdl4s.wdl.types.WdlType
+import wdl4s.wdl.values.WdlValue
import scala.util.{Failure, Try}
diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala
index f8ff1441a..ffba5bbd8 100644
--- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala
@@ -2,13 +2,14 @@ package cromwell.backend.validation
import cats.data.{NonEmptyList, Validated}
import cats.syntax.validated._
+import com.typesafe.config.Config
import cromwell.backend.{MemorySize, RuntimeAttributeDefinition}
import lenthall.validation.ErrorOr._
import org.slf4j.Logger
-import wdl4s.expression.PureStandardLibraryFunctions
-import wdl4s.types.{WdlBooleanType, WdlIntegerType, WdlType}
-import wdl4s.values._
-import wdl4s.{NoLookup, WdlExpression}
+import wdl4s.wdl.expression.PureStandardLibraryFunctions
+import wdl4s.wdl.types.{WdlBooleanType, WdlIntegerType, WdlStringType, WdlType}
+import wdl4s.wdl.values._
+import wdl4s.wdl.{NoLookup, WdlExpression}
import scala.util.{Failure, Success}
@@ -20,24 +21,24 @@ object RuntimeAttributesValidation {
}
def validateDocker(docker: Option[WdlValue], onMissingKey: => ErrorOr[Option[String]]): ErrorOr[Option[String]] = {
- validateWithValidation(docker, DockerValidation.optional, onMissingKey)
+ validateWithValidation(docker, DockerValidation.instance.optional, onMissingKey)
}
def validateFailOnStderr(value: Option[WdlValue], onMissingKey: => ErrorOr[Boolean]): ErrorOr[Boolean] = {
- validateWithValidation(value, FailOnStderrValidation.default, onMissingKey)
+ validateWithValidation(value, FailOnStderrValidation.instance, onMissingKey)
}
def validateContinueOnReturnCode(value: Option[WdlValue],
onMissingKey: => ErrorOr[ContinueOnReturnCode]): ErrorOr[ContinueOnReturnCode] = {
- validateWithValidation(value, ContinueOnReturnCodeValidation.default, onMissingKey)
+ validateWithValidation(value, ContinueOnReturnCodeValidation.instance, onMissingKey)
}
def validateMemory(value: Option[WdlValue], onMissingKey: => ErrorOr[MemorySize]): ErrorOr[MemorySize] = {
- validateWithValidation(value, MemoryValidation.instance, onMissingKey)
+ validateWithValidation(value, MemoryValidation.instance(), onMissingKey)
}
def validateCpu(cpu: Option[WdlValue], onMissingKey: => ErrorOr[Int]): ErrorOr[Int] = {
- validateWithValidation(cpu, CpuValidation.default, onMissingKey)
+ validateWithValidation(cpu, CpuValidation.instance, onMissingKey)
}
private def validateWithValidation[T](valueOption: Option[WdlValue],
@@ -64,12 +65,12 @@ object RuntimeAttributesValidation {
}
}
- def parseMemoryString(s: WdlString): ErrorOr[MemorySize] = {
- MemoryValidation.validateMemoryString(s)
+ def parseMemoryString(k: String, s: WdlString): ErrorOr[MemorySize] = {
+ MemoryValidation.validateMemoryString(k, s)
}
- def parseMemoryInteger(i: WdlInteger): ErrorOr[MemorySize] = {
- MemoryValidation.validateMemoryInteger(i)
+ def parseMemoryInteger(k: String, i: WdlInteger): ErrorOr[MemorySize] = {
+ MemoryValidation.validateMemoryInteger(k, i)
}
def withDefault[ValidatedType](validation: RuntimeAttributesValidation[ValidatedType],
@@ -213,6 +214,15 @@ object RuntimeAttributesValidation {
}
/**
+ * A wrapper class to classify config-based default runtime attributes
+ * that cannot be coerced into an acceptable WdlType.
+ */
+case class BadDefaultAttribute(badDefaultValue: WdlValue) extends WdlValue {
+ val wdlType = WdlStringType
+}
+
+
+/**
* Performs a validation on a runtime attribute and returns some value.
*
* @tparam ValidatedType The type of the validated value.
@@ -371,6 +381,35 @@ trait RuntimeAttributesValidation[ValidatedType] {
final def withDefault(wdlValue: WdlValue): RuntimeAttributesValidation[ValidatedType] =
RuntimeAttributesValidation.withDefault(this, wdlValue)
+ /**
+ * Returns the value of the default runtime attribute of a
+ * validation key as specified in the reference.conf. Given
+ * a value, this method coerces it into an optional
+ * WdlValue. In case the value cannot be succesfully coerced
+ * the value is wrapped as a "BadDefaultAttributeValue" type that
+ * is failed downstream by the ValidatedRuntimeAttributesBuilder.
+ *
+ * @param optionalRuntimeConfig Optional default runtime attributes config of a particular backend.
+ * @return The new version of this validation.
+ */
+ final def configDefaultWdlValue(optionalRuntimeConfig: Option[Config]): Option[WdlValue] = {
+ optionalRuntimeConfig flatMap { config =>
+ val value = config.getValue(key).unwrapped()
+ coercion.collectFirst({
+ case wdlType if wdlType.coerceRawValue(value).isSuccess => {
+ wdlType.coerceRawValue(value).get
+ }
+ }) orElse Option(BadDefaultAttribute(WdlString(value.toString)))
+ }
+ }
+
+ final def configDefaultValue(optionalRuntimeConfig: Option[Config]): Option[String] = {
+ optionalRuntimeConfig match {
+ case Some(config) if config.hasPath(key) => Option(config.getValue(key).unwrapped().toString)
+ case _ => None
+ }
+ }
+
/*
Methods below provide aliases to expose protected methods to the package.
Allows wrappers to wire their overrides to invoke the corresponding method on the inner object.
diff --git a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala
index ed3981817..c125e24b8 100644
--- a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala
+++ b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala
@@ -6,8 +6,8 @@ import cromwell.backend.RuntimeAttributeDefinition
import lenthall.exception.MessageAggregation
import lenthall.validation.ErrorOr._
import org.slf4j.Logger
-import wdl4s.types.WdlType
-import wdl4s.values.WdlValue
+import wdl4s.wdl.types.WdlType
+import wdl4s.wdl.values.WdlValue
final case class ValidatedRuntimeAttributes(attributes: Map[String, Any])
diff --git a/backend/src/main/scala/cromwell/backend/wdl/Command.scala b/backend/src/main/scala/cromwell/backend/wdl/Command.scala
index b0e3c9294..5e0d1d424 100644
--- a/backend/src/main/scala/cromwell/backend/wdl/Command.scala
+++ b/backend/src/main/scala/cromwell/backend/wdl/Command.scala
@@ -1,9 +1,9 @@
package cromwell.backend.wdl
import cromwell.backend.BackendJobDescriptor
-import wdl4s.EvaluatedTaskInputs
-import wdl4s.expression.WdlFunctions
-import wdl4s.values.WdlValue
+import wdl4s.wdl.EvaluatedTaskInputs
+import wdl4s.wdl.expression.WdlFunctions
+import wdl4s.wdl.values.WdlValue
import scala.util.{Success, Try}
diff --git a/backend/src/main/scala/cromwell/backend/wdl/FileSizeLimitationConfig.scala b/backend/src/main/scala/cromwell/backend/wdl/FileSizeLimitationConfig.scala
new file mode 100644
index 000000000..44f2835a3
--- /dev/null
+++ b/backend/src/main/scala/cromwell/backend/wdl/FileSizeLimitationConfig.scala
@@ -0,0 +1,48 @@
+package cromwell.backend.wdl
+
+import com.typesafe.config.ConfigFactory
+import net.ceedubs.ficus.readers.ValueReader
+import net.ceedubs.ficus.Ficus._
+
+trait FileSizeLimitationConfig {
+
+ def readLinesLimit: Int
+
+ def readBoolLimit: Int
+
+ def readIntLimit: Int
+
+ def readFloatLimit: Int
+
+ def readStringLimit: Int
+
+ def readJsonLimit: Int
+
+ def readTsvLimit: Int
+
+ def readMapLimit: Int
+
+ def readObjectLimit: Int
+}
+
+object FileSizeLimitationConfig {
+ private val config = ConfigFactory.load.getConfig("system")
+
+ def fileSizeLimitationConfig: FileSizeLimitationConfig = config.as[FileSizeLimitationConfig]("input-read-limits")
+
+ implicit val configReader : ValueReader[FileSizeLimitationConfig] = ValueReader.relative{c =>
+ def f(s: String) = c.as[Int](s)
+ new FileSizeLimitationConfig {
+ val readLinesLimit = f("lines")
+ val readBoolLimit = f("bool")
+ val readIntLimit = f("int")
+ val readFloatLimit = f("float")
+ val readStringLimit = f("string")
+ val readJsonLimit = f("json")
+ val readTsvLimit = f("tsv")
+ val readMapLimit = f("map")
+ val readObjectLimit = f("object")
+ }
+ }
+}
+
diff --git a/backend/src/main/scala/cromwell/backend/wdl/FileSizeTooBig.scala b/backend/src/main/scala/cromwell/backend/wdl/FileSizeTooBig.scala
new file mode 100644
index 000000000..a0cf7477a
--- /dev/null
+++ b/backend/src/main/scala/cromwell/backend/wdl/FileSizeTooBig.scala
@@ -0,0 +1,4 @@
+package cromwell.backend.wdl
+
+case class FileSizeTooBig(override val getMessage: String) extends Exception
+
diff --git a/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala b/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala
index d41d25b19..93d3563b6 100644
--- a/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala
+++ b/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala
@@ -2,9 +2,9 @@ package cromwell.backend.wdl
import cromwell.backend.BackendJobDescriptor
import cromwell.core.JobOutput
-import wdl4s.LocallyQualifiedName
-import wdl4s.expression.WdlStandardLibraryFunctions
-import wdl4s.values.WdlValue
+import wdl4s.wdl.LocallyQualifiedName
+import wdl4s.wdl.expression.WdlStandardLibraryFunctions
+import wdl4s.wdl.values.WdlValue
import scala.util.{Success, Try}
diff --git a/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala b/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala
index 5a45cef96..349cefbe6 100644
--- a/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala
+++ b/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala
@@ -2,15 +2,21 @@ package cromwell.backend.wdl
import cromwell.backend.MemorySize
import cromwell.core.path.PathFactory
-import wdl4s.expression.WdlStandardLibraryFunctions
+import wdl4s.wdl.expression.WdlStandardLibraryFunctions
import wdl4s.parser.MemoryUnit
-import wdl4s.types.{WdlArrayType, WdlFileType, WdlObjectType, WdlStringType}
-import wdl4s.values._
+import wdl4s.wdl.types._
+import wdl4s.wdl.values._
import scala.util.{Failure, Success, Try}
trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions =>
+ val fileSizeLimitationConfig = FileSizeLimitationConfig.fileSizeLimitationConfig
+ import fileSizeLimitationConfig._
+
+ def fileSize: WdlValue=> Try[Long] =
+ w => Try(buildPath(w.valueString).size)
+
/**
* Asserts that the parameter list contains a single parameter which will be interpreted
* as a File and attempts to read the contents of that file
@@ -22,10 +28,12 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions
} yield string
}
- private def extractObjects(functionName: String, params: Seq[Try[WdlValue]]): Try[Array[WdlObject]] = for {
- contents <- readContentsFromSingleFileParameter(functionName, params)
- wdlObjects <- WdlObject.fromTsv(contents)
- } yield wdlObjects
+ private def extractObjects(functionName: String, params: Seq[Try[WdlValue]]): Try[Array[WdlObject]] =
+ for {
+ _ <- validateFileSizeIsWithinLimits("read_object", params, readObjectLimit)
+ contents <- readContentsFromSingleFileParameter(functionName, params)
+ wdlObjects <- WdlObject.fromTsv(contents)
+ } yield wdlObjects
override def readFile(path: String): String = buildPath(path).contentAsString
@@ -34,13 +42,25 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions
*/
override def read_lines(params: Seq[Try[WdlValue]]): Try[WdlArray] = {
for {
+ _ <- validateFileSizeIsWithinLimits("read_lines", params, readLinesLimit)
contents <- readContentsFromSingleFileParameter("read_lines", params)
lines = contents.split("\n")
} yield WdlArray(WdlArrayType(WdlStringType), lines map WdlString)
}
+ def validateFileSizeIsWithinLimits(functionName: String, params: Seq[Try[WdlValue]], limit: Int): Try[Unit] =
+ for {
+ fileName <- extractSingleArgument(functionName, params)
+ fileSize <- fileSize(fileName)
+ _ = if (fileSize > limit) {
+ val errorMsg = s"Use of $fileName failed because the file was too big ($fileSize bytes when only files of up to $limit bytes are permissible"
+ throw FileSizeTooBig(errorMsg)
+ }
+ } yield ()
+
override def read_map(params: Seq[Try[WdlValue]]): Try[WdlMap] = {
for {
+ _ <- validateFileSizeIsWithinLimits("read_map", params, readMapLimit)
contents <- readContentsFromSingleFileParameter("read_map", params)
wdlMap <- WdlMap.fromTsv(contents)
} yield wdlMap
@@ -58,13 +78,18 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions
/**
* Try to read a string from the file referenced by the specified `WdlValue`.
*/
- override def read_string(params: Seq[Try[WdlValue]]): Try[WdlString] = readContentsFromSingleFileParameter("read_string", params).map(s => WdlString(s.trim))
+ override def read_string(params: Seq[Try[WdlValue]]): Try[WdlString] =
+ for {
+ _ <- validateFileSizeIsWithinLimits("read_string", params, readStringLimit)
+ string <- readContentsFromSingleFileParameter("read_string", params)
+ } yield WdlString(string.trim)
/**
* Read a file in TSV format into an Array[Array[String]]
*/
override def read_tsv(params: Seq[Try[WdlValue]]): Try[WdlArray] = {
for {
+ _ <- validateFileSizeIsWithinLimits("read_tsv", params, readTsvLimit)
contents <- readContentsFromSingleFileParameter("read_tsv", params)
wdlArray = WdlArray.fromTsv(contents)
} yield wdlArray
@@ -73,12 +98,20 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions
/**
* Try to read an integer from the file referenced by the specified `WdlValue`.
*/
- override def read_int(params: Seq[Try[WdlValue]]): Try[WdlInteger] = read_string(params) map { s => WdlInteger(s.value.trim.toInt) }
+ override def read_int(params: Seq[Try[WdlValue]]): Try[WdlInteger] =
+ for {
+ _ <- validateFileSizeIsWithinLimits("read_int", params, readIntLimit)
+ r <- read_string(params) map { s => WdlInteger(s.value.trim.toInt) }
+ } yield r
/**
* Try to read a float from the file referenced by the specified `WdlValue`.
*/
- override def read_float(params: Seq[Try[WdlValue]]): Try[WdlFloat] = read_string(params) map { s => WdlFloat(s.value.trim.toDouble) }
+ override def read_float(params: Seq[Try[WdlValue]]): Try[WdlFloat] =
+ for {
+ _ <- validateFileSizeIsWithinLimits("read_float", params, readFloatLimit)
+ s <- read_string(params)
+ } yield WdlFloat(s.value.trim.toDouble)
/**
* Try to read a boolean from the file referenced by the specified `WdlValue`.
@@ -86,14 +119,39 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions
override def read_boolean(params: Seq[Try[WdlValue]]): Try[WdlBoolean] =
read_string(params) map { s => WdlBoolean(java.lang.Boolean.parseBoolean(s.value.trim.toLowerCase)) }
+ protected def size(file: WdlValue): Try[Double] = Try(buildPath(file.valueString).size.toDouble)
+
+ /**
+ * Gets the size of a file.
+ *
+ * @param params First parameter must be a File or File? or coerceable to one. The second is an optional string containing the size unit (eg "MB", "GiB")
+ */
override def size(params: Seq[Try[WdlValue]]): Try[WdlFloat] = {
+ // Inner function: get the memory unit from the second (optional) parameter
def toUnit(wdlValue: Try[WdlValue]) = wdlValue flatMap { unit => Try(MemoryUnit.fromSuffix(unit.valueString)) }
+ // Inner function: is this a file type, or an optional containing a file type?
+ def isOptionalOfFileType(wdlType: WdlType): Boolean = wdlType match {
+ case f if WdlFileType.isCoerceableFrom(f) => true
+ case WdlOptionalType(inner) => isOptionalOfFileType(inner)
+ case _ => false
+ }
+
+ // Inner function: Get the file size, allowing for unpacking of optionals
+ def optionalSafeFileSize(value: WdlValue): Try[Double] = value match {
+ case f if f.isInstanceOf[WdlFile] || WdlFileType.isCoerceableFrom(f.wdlType) => size(f)
+ case WdlOptionalValue(_, Some(o)) => optionalSafeFileSize(o)
+ case WdlOptionalValue(f, None) if isOptionalOfFileType(f) => Success(0d)
+ case _ => Failure(new Exception(s"The 'size' method expects a 'File' or 'File?' argument but instead got ${value.wdlType.toWdlString}."))
+ }
+
+ // Inner function: get the file size and convert into the requested memory unit
def fileSize(wdlValue: Try[WdlValue], convertTo: Try[MemoryUnit] = Success(MemoryUnit.Bytes)) = {
for {
value <- wdlValue
unit <- convertTo
- } yield MemorySize(buildPath(value.valueString).size.toDouble, MemoryUnit.Bytes).to(unit).amount
+ fileSize <- optionalSafeFileSize(value)
+ } yield MemorySize(fileSize, MemoryUnit.Bytes).to(unit).amount
}
params match {
diff --git a/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala b/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala
index 1c39c4d89..746b71ecd 100644
--- a/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala
+++ b/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala
@@ -1,7 +1,7 @@
package cromwell.backend.wdl
import lenthall.util.TryUtil
-import wdl4s.values.{WdlArray, WdlFile, WdlMap, WdlOptionalValue, WdlPair, WdlValue}
+import wdl4s.wdl.values._
import scala.util.{Success, Try}
diff --git a/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala b/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala
index b8568f840..28ff91145 100644
--- a/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala
+++ b/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala
@@ -1,10 +1,10 @@
package cromwell.backend.wdl
import cromwell.core.path.Path
-import wdl4s.TsvSerializable
-import wdl4s.expression.WdlStandardLibraryFunctions
-import wdl4s.types._
-import wdl4s.values._
+import wdl4s.wdl.TsvSerializable
+import wdl4s.wdl.expression.WdlStandardLibraryFunctions
+import wdl4s.wdl.types._
+import wdl4s.wdl.values._
import scala.util.{Failure, Try}
diff --git a/backend/src/test/scala/cromwell/backend/BackendSpec.scala b/backend/src/test/scala/cromwell/backend/BackendSpec.scala
index e0632ec1c..ec7d92949 100644
--- a/backend/src/test/scala/cromwell/backend/BackendSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/BackendSpec.scala
@@ -1,9 +1,8 @@
package cromwell.backend
-import com.typesafe.config.ConfigFactory
import cromwell.backend.BackendJobExecutionActor.{BackendJobExecutionResponse, JobFailedNonRetryableResponse, JobFailedRetryableResponse, JobSucceededResponse}
import cromwell.backend.io.TestWorkflows._
-import cromwell.core.callcaching.CallCachingEligible
+import cromwell.core.callcaching.NoDocker
import cromwell.core.labels.Labels
import cromwell.core.{WorkflowId, WorkflowOptions}
import lenthall.exception.AggregatedException
@@ -12,9 +11,9 @@ import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{Millis, Seconds, Span}
import org.specs2.mock.Mockito
import spray.json.{JsObject, JsValue}
-import wdl4s._
-import wdl4s.expression.NoFunctions
-import wdl4s.values.WdlValue
+import wdl4s.wdl._
+import wdl4s.wdl.expression.NoFunctions
+import wdl4s.wdl.values.WdlValue
trait BackendSpec extends ScalaFutures with Matchers with Mockito {
@@ -24,13 +23,13 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito {
executeJobAndAssertOutputs(backend, workflow.expectedResponse)
}
- def buildWorkflowDescriptor(wdl: WdlSource,
+ def buildWorkflowDescriptor(workflowSource: WorkflowSource,
inputs: Map[String, WdlValue] = Map.empty,
options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])),
runtime: String = "") = {
BackendWorkflowDescriptor(
WorkflowId.randomId(),
- WdlNamespaceWithWorkflow.load(wdl.replaceAll("RUNTIME", runtime), Seq.empty[ImportResolver]).get.workflow, // Get ok, this is a test!
+ WdlNamespaceWithWorkflow.load(workflowSource.replaceAll("RUNTIME", runtime), Seq.empty[ImportResolver]).get.workflow, // Get ok, this is a test!
inputs,
options,
Labels.empty
@@ -56,10 +55,10 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito {
val inputDeclarations = call.evaluateTaskInputs(inputs, NoFunctions).get // .get is ok because this is a test
val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(call.task.runtimeAttributes, NoFunctions, inputDeclarations).get // .get is OK here because this is a test
val runtimeAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes)
- BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, CallCachingEligible, Map.empty)
+ BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, Map.empty)
}
- def jobDescriptorFromSingleCallWorkflow(wdl: WdlSource,
+ def jobDescriptorFromSingleCallWorkflow(wdl: WorkflowSource,
options: WorkflowOptions,
runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition]): BackendJobDescriptor = {
val workflowDescriptor = buildWorkflowDescriptor(wdl)
@@ -68,10 +67,10 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito {
val inputDeclarations = fqnMapToDeclarationMap(workflowDescriptor.knownValues)
val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(call.task.runtimeAttributes, NoFunctions, inputDeclarations).get // .get is OK here because this is a test
val runtimeAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes)
- BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, CallCachingEligible, Map.empty)
+ BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, Map.empty)
}
- def jobDescriptorFromSingleCallWorkflow(wdl: WdlSource,
+ def jobDescriptorFromSingleCallWorkflow(wdl: WorkflowSource,
runtime: String,
attempt: Int,
options: WorkflowOptions,
@@ -82,12 +81,12 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito {
val inputDeclarations = fqnMapToDeclarationMap(workflowDescriptor.knownValues)
val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(call.task.runtimeAttributes, NoFunctions, inputDeclarations).get // .get is OK here because this is a test
val runtimeAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes)
- BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, CallCachingEligible, Map.empty)
+ BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, Map.empty)
}
def assertResponse(executionResponse: BackendJobExecutionResponse, expectedResponse: BackendJobExecutionResponse) = {
(executionResponse, expectedResponse) match {
- case (JobSucceededResponse(_, _, responseOutputs, _, _), JobSucceededResponse(_, _, expectedOutputs, _, _)) =>
+ case (JobSucceededResponse(_, _, responseOutputs, _, _, _), JobSucceededResponse(_, _, expectedOutputs, _, _, _)) =>
responseOutputs.size shouldBe expectedOutputs.size
responseOutputs foreach {
case (fqn, out) =>
@@ -117,9 +116,6 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito {
}
}
- lazy val emptyBackendConfig = BackendConfigurationDescriptor(
- ConfigFactory.parseString("{}"), ConfigFactory.load())
-
def firstJobDescriptorKey(workflowDescriptor: BackendWorkflowDescriptor): BackendJobDescriptorKey = {
val call = workflowDescriptor.workflow.taskCalls.head
BackendJobDescriptorKey(call, None, 1)
@@ -127,7 +123,7 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito {
def firstJobDescriptor(workflowDescriptor: BackendWorkflowDescriptor,
inputs: Map[String, WdlValue] = Map.empty) = {
- BackendJobDescriptor(workflowDescriptor, firstJobDescriptorKey(workflowDescriptor), Map.empty, fqnMapToDeclarationMap(inputs), CallCachingEligible, Map.empty)
+ BackendJobDescriptor(workflowDescriptor, firstJobDescriptorKey(workflowDescriptor), Map.empty, fqnMapToDeclarationMap(inputs), NoDocker, Map.empty)
}
}
diff --git a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala
index 82e62d641..7438282ba 100644
--- a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala
@@ -2,13 +2,14 @@ package cromwell.backend
import akka.actor.ActorRef
import akka.testkit.TestActorRef
+import com.typesafe.config.ConfigFactory
import cromwell.backend.validation.{ContinueOnReturnCodeFlag, ContinueOnReturnCodeSet, ContinueOnReturnCodeValidation, RuntimeAttributesKeys}
import cromwell.core.{TestKitSuite, WorkflowOptions}
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FlatSpecLike, Matchers}
-import wdl4s.types._
-import wdl4s.values.{WdlArray, WdlBoolean, WdlFloat, WdlInteger, WdlString, WdlValue}
-import wdl4s.{TaskCall, WdlExpression}
+import wdl4s.wdl.types._
+import wdl4s.wdl.values.{WdlArray, WdlBoolean, WdlFloat, WdlInteger, WdlString, WdlValue}
+import wdl4s.wdl.{WdlTaskCall, WdlExpression}
import scala.concurrent.Future
import scala.util.Try
@@ -30,9 +31,11 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
testPredicateBackendWorkflowInitializationActor.continueOnReturnCodePredicate(valueRequired = false)
}
+ val optionalConfig = Option(TestConfig.optionalRuntimeConfig)
+
it should "continueOnReturnCodePredicate" in {
testContinueOnReturnCode(None) should be(true)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(None) should be(true)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(None) should be(true)
val booleanRows = Table(
"value",
@@ -66,9 +69,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlBoolean(value)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.right.get should be(ContinueOnReturnCodeFlag(value))
}
@@ -77,9 +80,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlString(value.toString)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.right.get should be(ContinueOnReturnCodeFlag(value))
}
@@ -88,7 +91,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlExpression.fromString(value.toString)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
// NOTE: expressions are never valid to validate
}
@@ -96,9 +99,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlInteger(value)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value)))
}
@@ -107,9 +110,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlString(value.toString)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value)))
}
@@ -118,7 +121,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlExpression.fromString(value.toString)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
// NOTE: expressions are never valid to validate
}
@@ -126,9 +129,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlArray(WdlArrayType(WdlIntegerType), Seq(WdlInteger(value)))
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value)))
}
@@ -137,9 +140,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlArray(WdlArrayType(WdlStringType), Seq(WdlString(value.toString)))
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value)))
}
@@ -148,7 +151,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlArray(WdlArrayType(WdlExpressionType), Seq(WdlExpression.fromString(value.toString)))
val result = false
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
// NOTE: expressions are never valid to validate
}
@@ -156,16 +159,16 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
val wdlValue = WdlExpression.fromString(expression)
val result = true
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
// NOTE: expressions are never valid to validate
}
forAll(invalidWdlValueRows) { wdlValue =>
val result = false
testContinueOnReturnCode(Option(wdlValue)) should be(result)
- ContinueOnReturnCodeValidation.default.validateOptionalExpression(Option(wdlValue)) should be(result)
+ ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalExpression(Option(wdlValue)) should be(result)
val valid =
- ContinueOnReturnCodeValidation.default.validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
+ ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> wdlValue))
valid.isValid should be(result)
valid.toEither.left.get.toList should contain theSameElementsAs List(
"Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]"
@@ -177,9 +180,10 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite("BackendWorkfl
}
class TestPredicateBackendWorkflowInitializationActor extends BackendWorkflowInitializationActor {
+
override val serviceRegistryActor: ActorRef = context.system.deadLetters
- override def calls: Set[TaskCall] = throw new NotImplementedError("calls")
+ override def calls: Set[WdlTaskCall] = throw new NotImplementedError("calls")
override protected def runtimeAttributeValidators: Map[String, (Option[WdlValue]) => Boolean] =
throw new NotImplementedError("runtimeAttributeValidators")
@@ -194,8 +198,7 @@ class TestPredicateBackendWorkflowInitializationActor extends BackendWorkflowIni
override protected def workflowDescriptor: BackendWorkflowDescriptor =
throw new NotImplementedError("workflowDescriptor")
- override protected def configurationDescriptor: BackendConfigurationDescriptor =
- throw new NotImplementedError("configurationDescriptor")
+ override protected def configurationDescriptor: BackendConfigurationDescriptor = BackendConfigurationDescriptor(TestConfig.sampleBackendRuntimeConfig, ConfigFactory.empty())
override def continueOnReturnCodePredicate(valueRequired: Boolean)
(wdlExpressionMaybe: Option[WdlValue]): Boolean = {
diff --git a/backend/src/test/scala/cromwell/backend/TestConfig.scala b/backend/src/test/scala/cromwell/backend/TestConfig.scala
new file mode 100644
index 000000000..42050dbe3
--- /dev/null
+++ b/backend/src/test/scala/cromwell/backend/TestConfig.scala
@@ -0,0 +1,44 @@
+package cromwell.backend
+
+import com.typesafe.config.ConfigFactory
+
+object TestConfig {
+
+ lazy val sampleBackendRuntimeConfigString =
+ s"""
+ |default-runtime-attributes {
+ | failOnStderr: false
+ | continueOnReturnCode: 0
+ | memory: "2 GB"
+ |}
+ |""".stripMargin
+
+ lazy val allBackendRuntimeAttrsString =
+ s"""
+ |default-runtime-attributes {
+ | cpu: 1
+ | failOnStderr: false
+ | continueOnReturnCode: 0
+ | memory: "1 GB"
+ | bootDiskSizeGb: 10
+ | disks: "local-disk 10 SSD"
+ | noAddress: false
+ | preemptible: 0
+ | zones: ["us-central1-a", "us-central1-b"]
+ |}
+ """.stripMargin
+
+ lazy val sampleBackendRuntimeConfig = ConfigFactory.parseString(sampleBackendRuntimeConfigString)
+
+ lazy val allRuntimeAttrsConfig = ConfigFactory.parseString(allBackendRuntimeAttrsString).getConfig("default-runtime-attributes")
+
+ lazy val optionalRuntimeConfig = sampleBackendRuntimeConfig.getConfig("default-runtime-attributes")
+
+ lazy val globalConfig = ConfigFactory.load()
+
+ lazy val emptyConfig = ConfigFactory.empty()
+
+ lazy val emptyBackendConfigDescriptor = BackendConfigurationDescriptor(emptyConfig, globalConfig)
+
+ lazy val backendRuntimeConfigDescriptor = BackendConfigurationDescriptor(sampleBackendRuntimeConfig, emptyConfig)
+}
diff --git a/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala
index 0a8cb5936..eeccc39e1 100644
--- a/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala
@@ -1,10 +1,10 @@
package cromwell.backend.io
import com.typesafe.config.ConfigFactory
-import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptorKey, BackendSpec}
+import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptorKey, BackendSpec, TestConfig}
import cromwell.core.path.DefaultPathBuilder
import org.scalatest.{FlatSpec, Matchers}
-import wdl4s.TaskCall
+import wdl4s.wdl.WdlTaskCall
class JobPathsSpec extends FlatSpec with Matchers with BackendSpec {
@@ -23,16 +23,16 @@ class JobPathsSpec extends FlatSpec with Matchers with BackendSpec {
| }
""".stripMargin
- val globalConfig = ConfigFactory.load()
val backendConfig = ConfigFactory.parseString(configString)
- val defaultBackendConfigDescriptor = BackendConfigurationDescriptor(backendConfig, globalConfig)
+ val defaultBackendConfigDescriptor = BackendConfigurationDescriptor(backendConfig, TestConfig.globalConfig)
"JobPaths" should "provide correct paths for a job" in {
val wd = buildWorkflowDescriptor(TestWorkflows.HelloWorld)
- val call: TaskCall = wd.workflow.taskCalls.head
+ val call: WdlTaskCall = wd.workflow.taskCalls.head
val jobKey = BackendJobDescriptorKey(call, None, 1)
- val jobPaths = new JobPathsWithDocker(jobKey, wd, backendConfig)
+ val workflowPaths = new WorkflowPathsWithDocker(wd, backendConfig)
+ val jobPaths = new JobPathsWithDocker(workflowPaths, jobKey)
val id = wd.id
jobPaths.callRoot.pathAsString shouldBe
fullPath(s"local-cromwell-executions/wf_hello/$id/call-hello")
@@ -59,17 +59,17 @@ class JobPathsSpec extends FlatSpec with Matchers with BackendSpec {
fullPath("/cromwell-executions/dock/path")
val jobKeySharded = BackendJobDescriptorKey(call, Option(0), 1)
- val jobPathsSharded = new JobPathsWithDocker(jobKeySharded, wd, backendConfig)
+ val jobPathsSharded = new JobPathsWithDocker(workflowPaths, jobKeySharded)
jobPathsSharded.callExecutionRoot.pathAsString shouldBe
fullPath(s"local-cromwell-executions/wf_hello/$id/call-hello/shard-0/execution")
val jobKeyAttempt = BackendJobDescriptorKey(call, None, 2)
- val jobPathsAttempt = new JobPathsWithDocker(jobKeyAttempt, wd, backendConfig)
+ val jobPathsAttempt = new JobPathsWithDocker(workflowPaths, jobKeyAttempt)
jobPathsAttempt.callExecutionRoot.pathAsString shouldBe
fullPath(s"local-cromwell-executions/wf_hello/$id/call-hello/attempt-2/execution")
val jobKeyShardedAttempt = BackendJobDescriptorKey(call, Option(0), 2)
- val jobPathsShardedAttempt = new JobPathsWithDocker(jobKeyShardedAttempt, wd, backendConfig)
+ val jobPathsShardedAttempt = new JobPathsWithDocker(workflowPaths, jobKeyShardedAttempt)
jobPathsShardedAttempt.callExecutionRoot.pathAsString shouldBe
fullPath(s"local-cromwell-executions/wf_hello/$id/call-hello/shard-0/attempt-2/execution")
}
diff --git a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala
index c16348da9..66b48467b 100644
--- a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala
@@ -6,7 +6,7 @@ import cromwell.core.path.DefaultPathBuilder
import cromwell.core.{JobKey, WorkflowId}
import org.mockito.Mockito._
import org.scalatest.{FlatSpec, Matchers}
-import wdl4s.{Call, Workflow}
+import wdl4s.wdl.{WdlCall, WdlWorkflow}
class WorkflowPathsSpec extends FlatSpec with Matchers with BackendSpec {
@@ -29,22 +29,22 @@ class WorkflowPathsSpec extends FlatSpec with Matchers with BackendSpec {
when(backendConfig.getString(any[String])).thenReturn("local-cromwell-executions") // This is the folder defined in the config as the execution root dir
val rootWd = mock[BackendWorkflowDescriptor]
- val rootWorkflow = mock[Workflow]
+ val rootWorkflow = mock[WdlWorkflow]
val rootWorkflowId = WorkflowId.randomId()
rootWorkflow.unqualifiedName returns "rootWorkflow"
rootWd.workflow returns rootWorkflow
rootWd.id returns rootWorkflowId
val subWd = mock[BackendWorkflowDescriptor]
- val subWorkflow = mock[Workflow]
+ val subWorkflow = mock[WdlWorkflow]
val subWorkflowId = WorkflowId.randomId()
subWorkflow.unqualifiedName returns "subWorkflow"
subWd.workflow returns subWorkflow
subWd.id returns subWorkflowId
- val call1 = mock[Call]
+ val call1 = mock[WdlCall]
call1.unqualifiedName returns "call1"
- val call2 = mock[Call]
+ val call2 = mock[WdlCall]
call2.unqualifiedName returns "call2"
val jobKey = new JobKey {
diff --git a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala
index b116aae72..7c78177b3 100644
--- a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala
@@ -1,6 +1,6 @@
package cromwell.backend.standard
-import cromwell.backend.RuntimeAttributeDefinition
+import cromwell.backend.{RuntimeAttributeDefinition, TestConfig}
import cromwell.backend.validation.RuntimeAttributesKeys._
import cromwell.backend.validation._
import cromwell.core.WorkflowOptions
@@ -8,7 +8,7 @@ import org.scalatest.{Matchers, WordSpecLike}
import org.slf4j.{Logger, LoggerFactory}
import org.specs2.mock.Mockito
import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsValue}
-import wdl4s.values.{WdlBoolean, WdlInteger, WdlString, WdlValue}
+import wdl4s.wdl.values.{WdlBoolean, WdlInteger, WdlString, WdlValue}
class StandardValidatedRuntimeAttributesBuilderSpec extends WordSpecLike with Matchers with Mockito {
@@ -153,6 +153,8 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends WordSpecLike with Ma
val defaultLogger: Logger = LoggerFactory.getLogger(classOf[StandardValidatedRuntimeAttributesBuilderSpec])
val emptyWorkflowOptions: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get
+ val mockBackendRuntimeConfig = Option(TestConfig.optionalRuntimeConfig)
+
private def assertRuntimeAttributesSuccessfulCreation(runtimeAttributes: Map[String, WdlValue],
expectedRuntimeAttributes: Map[String, Any],
includeDockerSupport: Boolean = true,
@@ -160,9 +162,9 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends WordSpecLike with Ma
logger: Logger = defaultLogger): Unit = {
val builder = if (includeDockerSupport) {
- StandardValidatedRuntimeAttributesBuilder.default.withValidation(DockerValidation.optional)
+ StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig).withValidation(DockerValidation.optional)
} else {
- StandardValidatedRuntimeAttributesBuilder.default
+ StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig)
}
val runtimeAttributeDefinitions = builder.definitions.toSet
val addDefaultsToAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, workflowOptions) _
@@ -189,9 +191,9 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends WordSpecLike with Ma
logger: Logger = defaultLogger): Unit = {
val thrown = the[RuntimeException] thrownBy {
val builder = if (supportsDocker) {
- StandardValidatedRuntimeAttributesBuilder.default.withValidation(DockerValidation.optional)
+ StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig).withValidation(DockerValidation.optional)
} else {
- StandardValidatedRuntimeAttributesBuilder.default
+ StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig)
}
val runtimeAttributeDefinitions = builder.definitions.toSet
val addDefaultsToAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, workflowOptions) _
diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala
index 7420c2d89..05edd089e 100644
--- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala
@@ -4,8 +4,8 @@ import cromwell.backend.validation.RuntimeAttributesDefault._
import cromwell.core.WorkflowOptions
import org.scalatest.{FlatSpec, Matchers}
import spray.json._
-import wdl4s.types._
-import wdl4s.values.{WdlArray, WdlBoolean, WdlInteger, WdlString}
+import wdl4s.wdl.types._
+import wdl4s.wdl.values.{WdlArray, WdlBoolean, WdlInteger, WdlString}
class RuntimeAttributesDefaultSpec extends FlatSpec with Matchers {
diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala
index 22180bb77..76e47ce2a 100644
--- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala
+++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala
@@ -2,12 +2,16 @@ package cromwell.backend.validation
import cats.data.Validated.{Invalid, Valid}
import cats.syntax.validated._
+import com.typesafe.config.{Config, ConfigFactory}
+import cromwell.backend.TestConfig
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
-import wdl4s.types.{WdlArrayType, WdlIntegerType, WdlStringType}
-import wdl4s.values.{WdlArray, WdlBoolean, WdlInteger, WdlString}
+import wdl4s.wdl.types.{WdlArrayType, WdlIntegerType, WdlStringType}
+import wdl4s.wdl.values.{WdlArray, WdlBoolean, WdlInteger, WdlString}
class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with BeforeAndAfterAll {
+ val mockBackendRuntimeConfig = TestConfig.allRuntimeAttrsConfig
+
"RuntimeAttributesValidation" should {
"return success when tries to validate a valid Docker entry" in {
val dockerValue = Some(WdlString("someImage"))
@@ -33,7 +37,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateDocker(dockerValue,
"Failed to get Docker mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Failed to get Docker mandatory key from runtime attributes")
}
}
@@ -43,7 +47,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateDocker(dockerValue,
"Failed to get Docker mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting docker runtime attribute to be a String")
}
}
@@ -83,7 +87,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateFailOnStderr(failOnStderrValue,
"Failed to get failOnStderr mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'")
}
}
@@ -142,7 +146,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue,
"Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) =>
assert(e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]")
}
@@ -163,7 +167,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue,
"Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]")
}
}
@@ -193,7 +197,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateMemory(memoryValue,
"Failed to get memory mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting memory runtime attribute value greater than 0 but got -1")
}
}
@@ -214,7 +218,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateMemory(memoryValue,
"Failed to get memory mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting memory runtime attribute value greater than 0 but got 0.0")
}
}
@@ -224,7 +228,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateMemory(memoryValue,
"Failed to get memory mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: value should be of the form 'X Unit' where X is a number, e.g. 8 GB")
}
}
@@ -234,7 +238,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateMemory(memoryValue,
"Failed to get memory mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: Not supported WDL type value")
}
}
@@ -244,7 +248,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateMemory(memoryValue,
"Failed to get memory mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Failed to get memory mandatory key from runtime attributes")
}
}
@@ -264,7 +268,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateCpu(cpuValue,
"Failed to get cpu mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Expecting cpu runtime attribute value greater than 0")
}
}
@@ -274,9 +278,87 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be
val result = RuntimeAttributesValidation.validateMemory(cpuValue,
"Failed to get cpu mandatory key from runtime attributes".invalidNel)
result match {
- case Valid(x) => fail("A failure was expected.")
+ case Valid(_) => fail("A failure was expected.")
case Invalid(e) => assert(e.head == "Failed to get cpu mandatory key from runtime attributes")
}
}
+
+ "return default values as WdlValues when they can be coerced into expected WdlTypes" in {
+ val optionalConfig = Option(TestConfig.allRuntimeAttrsConfig)
+
+ val defaultVals = Map(
+ "cpu" -> CpuValidation.configDefaultWdlValue(optionalConfig).get,
+ "failOnStderr" -> FailOnStderrValidation.configDefaultWdlValue(optionalConfig).get,
+ "continueOnReturnCode" -> ContinueOnReturnCodeValidation.configDefaultWdlValue(optionalConfig).get
+ )
+
+ val expectedDefaultVals = Map(
+ "cpu" -> WdlInteger(1),
+ "failOnStderr" -> WdlBoolean(false),
+ "continueOnReturnCode" -> WdlInteger(0)
+ )
+
+ defaultVals shouldBe expectedDefaultVals
+ }
+
+ "return default values as BadDefaultAttribute when they can't be coerced to expected WdlTypes" in {
+ val optionalInvalidAttrsConfig = Option(ConfigFactory.parseString(
+ """
+ |cpu = 1.4
+ |failOnStderr = "notReal"
+ |continueOnReturnCode = 0
+ """.stripMargin))
+
+ val defaultVals = Map(
+ "cpu" -> CpuValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get,
+ "failOnStderr" -> FailOnStderrValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get,
+ "continueOnReturnCode" -> ContinueOnReturnCodeValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get
+ )
+
+ val expectedDefaultVals = Map(
+ "cpu" -> BadDefaultAttribute(WdlString("1.4")),
+ "failOnStderr" -> BadDefaultAttribute(WdlString("notReal")),
+ "continueOnReturnCode" -> WdlInteger(0)
+ )
+
+ defaultVals shouldBe expectedDefaultVals
+ }
+
+ "should parse memory successfully" in {
+ val backendConfigTemplate: String =
+ s"""
+ | default-runtime-attributes {
+ | memory: "2 GB"
+ | }
+ |""".stripMargin
+
+ val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate).getConfig("default-runtime-attributes")
+
+ val memoryVal = MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, Some(backendConfig))
+ MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryKey, memoryVal.get).runtimeAttributeDefinition.factoryDefault shouldBe Some((WdlInteger(2000000000)))
+ }
+
+ "shouldn't throw up if the value for a default-runtime-attribute key cannot be coerced into an expected WdlType" in {
+ val backendConfigTemplate: String =
+ s"""
+ | default-runtime-attributes {
+ | memory: "blahblah"
+ | }
+ |""".stripMargin
+
+ val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate).getConfig("default-runtime-attributes")
+
+ val memoryVal = MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, Some(backendConfig))
+ MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryKey, memoryVal.get).runtimeAttributeDefinition.factoryDefault shouldBe Some(BadDefaultAttribute(WdlString("blahblah")))
+ }
+
+ "should be able to coerce a list of return codes into an WdlArray" in {
+ val optinalBackendConfig = Option(ConfigFactory.parseString(
+ s"""
+ |continueOnReturnCode = [0,1,2]
+ |""".stripMargin))
+
+ ContinueOnReturnCodeValidation.configDefaultWdlValue(optinalBackendConfig).get shouldBe WdlArray(WdlArrayType(WdlIntegerType), Array(WdlInteger(0), WdlInteger(1), WdlInteger(2)))
+ }
}
}
diff --git a/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala b/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala
new file mode 100644
index 000000000..bc265b9aa
--- /dev/null
+++ b/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala
@@ -0,0 +1,109 @@
+package cromwell.backend.wdl
+
+import java.nio.file.{Paths, Path}
+import java.nio.file.StandardOpenOption._
+import scala.util.{Failure, Success, Try}
+
+import cromwell.backend.standard.{DefaultStandardExpressionFunctionsParams, StandardExpressionFunctions}
+import cromwell.core.CallContext
+import cromwell.core.path.DefaultPathBuilder
+import org.scalatest.{FlatSpec, Matchers}
+import wdl4s.wdl.values._
+import com.google.common.io.Files
+import fs2.{Task, Stream}
+
+class FileSizeSpec extends FlatSpec with Matchers {
+ val _readLinesLimit = 4
+ val _readBoolLimit = 5
+ val _readIntLimit = 6
+ val _readFloatLimit = 7
+ val _readStringLimit = 8
+ val _readJsonLimit = 9
+ val _readTsvLimit = 10
+ val _readMapLimit = 11
+ val _readObjectLimit = 12
+
+ val rlf = {
+ val path = DefaultPathBuilder.build("/tmp").get
+
+ val dp = DefaultStandardExpressionFunctionsParams(List(cromwell.core.path.DefaultPathBuilder), CallContext(path, "stdout", "stderr"))
+
+ new StandardExpressionFunctions(dp) {
+ override val fileSizeLimitationConfig =
+ new FileSizeLimitationConfig {
+ val readLinesLimit = _readLinesLimit
+ val readIntLimit = _readIntLimit
+ val readFloatLimit = _readFloatLimit
+ val readStringLimit = _readStringLimit
+ val readJsonLimit = _readJsonLimit
+ val readTsvLimit = _readTsvLimit
+ val readMapLimit = _readMapLimit
+ val readObjectLimit = _readObjectLimit
+ val readBoolLimit = _readBoolLimit
+ }
+ }
+ }
+
+ val tempDir = Files.createTempDir
+ tempDir.deleteOnExit
+
+ def testOverUnder(command: String, n: Int, f: ReadLikeFunctions => (Seq[Try[WdlValue]] => Try[WdlValue])) = {
+
+ def testInner(n: Int, test: Try[WdlValue] => Unit) = {
+
+ def createTempFileOfSize(size: Int): Path = {
+
+ val fn = tempDir.toString + "/" + scala.util.Random.alphanumeric.take(5).mkString
+ val jPath = Paths.get(fn)
+ jPath.toFile.deleteOnExit
+ val start = Stream[Task, Byte](1).repeat.take(size.toLong)
+ val end = fs2.io.file.writeAll[Task](jPath, Seq(CREATE_NEW, WRITE))
+ (start to end).run.unsafeRunSync
+ //jPath is now a file of n bytes, we can return it
+ jPath
+ }
+
+ val file = createTempFileOfSize(n)
+ val params = Seq(Try(WdlString(file.toString)))
+
+ f(rlf)(params) match {
+ case t => test(t)
+ }
+ }
+
+ def testOver() = {
+ testInner(n + 1, {
+ case Failure(_: FileSizeTooBig) => //success
+ case t => throw new RuntimeException(s"should not have eaten this file that is too big! msg: $t")
+ })
+ }
+
+ def testUnder() = {
+ testInner(n - 1, {
+ case Success(_) =>
+ case Failure(_: NumberFormatException) => //we're not testing parsing
+ case Failure(_: UnsupportedOperationException) => //we're not testing tsv compatibility
+ case Failure(t) => throw t
+ })
+ }
+
+ //construct a test for both over and under
+ List(
+ s"read $command" should "limit according to a setting" in testOver,
+ it should "allow when under the limit" in testUnder
+ )
+ }
+
+ //test all the functions
+ List[(String, Int, ReadLikeFunctions => (Seq[Try[WdlValue]] => Try[WdlValue]))](
+ ("lines", _readLinesLimit, _.read_lines),
+ ("int", _readIntLimit, _.read_int),
+ ("map", _readMapLimit, _.read_map),
+ ("float", _readFloatLimit, _.read_float),
+ ("String", _readStringLimit, _.read_string),
+ ("tsv", _readTsvLimit, _.read_tsv),
+ ("object", _readObjectLimit, _.read_object)
+ ).flatMap {
+ (testOverUnder _).tupled
+ }
+}
diff --git a/backend/src/test/scala/cromwell/backend/wdl/ReadLikeFunctionsSpec.scala b/backend/src/test/scala/cromwell/backend/wdl/ReadLikeFunctionsSpec.scala
new file mode 100644
index 000000000..ad4716b95
--- /dev/null
+++ b/backend/src/test/scala/cromwell/backend/wdl/ReadLikeFunctionsSpec.scala
@@ -0,0 +1,99 @@
+package cromwell.backend.wdl
+
+import cromwell.core.path.PathBuilder
+import org.apache.commons.lang3.NotImplementedException
+import org.scalatest.{FlatSpec, Matchers}
+import wdl4s.wdl.expression.PureStandardLibraryFunctionsLike
+import wdl4s.wdl.types.{WdlFileType, WdlIntegerType, WdlOptionalType}
+import wdl4s.wdl.values.{WdlFloat, WdlInteger, WdlOptionalValue, WdlSingleFile, WdlString, WdlValue}
+
+import scala.util.{Failure, Success, Try}
+
+class ReadLikeFunctionsSpec extends FlatSpec with Matchers {
+
+ behavior of "ReadLikeFunctions.size"
+
+ it should "correctly report a 2048 byte file, in bytes by default" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlSingleFile("blah")))) should be(Success(WdlFloat(2048d)))
+ }
+
+ it should "correctly report a 2048 byte file, in bytes" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlSingleFile("blah")), Success(WdlString("B")))) should be(Success(WdlFloat(2048d)))
+ }
+
+ it should "correctly report a 2048 byte file, in KB" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlSingleFile("blah")), Success(WdlString("KB")))) should be(Success(WdlFloat(2.048d)))
+ }
+
+ it should "correctly report a 2048 byte file, in KiB" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlSingleFile("blah")), Success(WdlString("Ki")))) should be(Success(WdlFloat(2d)))
+ }
+
+ it should "correctly report the size of a supplied, optional, 2048 byte file" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, Some(WdlSingleFile("blah")))))) should be(Success(WdlFloat(2048d)))
+ }
+
+ it should "correctly report the size of a supplied, optional optional, 2048 byte file" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlOptionalValue(WdlOptionalType(WdlFileType), Some(WdlOptionalValue(WdlFileType, Some(WdlSingleFile("blah")))))))) should be(Success(WdlFloat(2048d)))
+ }
+
+ it should "correctly report the size of a supplied, optional, 2048 byte file, in MB" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, Some(WdlSingleFile("blah")))), Success(WdlString("MB")))) should be(Success(WdlFloat(0.002048d)))
+ }
+
+ it should "correctly report that an unsupplied optional file is empty" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, None)))) should be(Success(WdlFloat(0d)))
+ }
+
+ it should "correctly report that an unsupplied File?? is empty" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlOptionalValue(WdlOptionalType(WdlFileType), None)))) should be(Success(WdlFloat(0d)))
+ }
+
+ it should "correctly report that an unsupplied optional file is empty, even in MB" in {
+ val readLike = new TestReadLikeFunctions(Success(2048d))
+ readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, None)), Success(WdlString("MB")))) should be(Success(WdlFloat(0d)))
+ }
+
+ it should "refuse to report file sizes for Ints" in {
+ val readLike = new TestReadLikeFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size")))
+ val oops = readLike.size(Seq(Success(WdlInteger(7))))
+ oops match {
+ case Success(x) => fail(s"Expected a string to not have a file length but instead got $x")
+ case Failure(e) => e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int.")
+ }
+ }
+
+ it should "refuse to report file sizes for Int?s" in {
+ val readLike = new TestReadLikeFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size")))
+ val oops = readLike.size(Seq(Success(WdlOptionalValue(WdlIntegerType, None))))
+ oops match {
+ case Success(x) => fail(s"Expected a string to not have a file length but instead got $x")
+ case Failure(e) => e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int?.")
+ }
+ }
+
+ it should "pass on underlying size reading errors" in {
+ val readLike = new TestReadLikeFunctions(Failure(new Exception("'size' inner exception, expect me to be passed on")))
+ val oops = readLike.size(Seq(Success(WdlSingleFile("blah"))))
+ oops match {
+ case Success(_) => fail(s"The 'size' engine function didn't return the error generated in the inner 'size' method")
+ case Failure(e) => e.getMessage should be("'size' inner exception, expect me to be passed on")
+ }
+ }
+}
+
+
+class TestReadLikeFunctions(sizeResult: Try[Double]) extends PureStandardLibraryFunctionsLike with ReadLikeFunctions {
+ override protected def size(file: WdlValue): Try[Double] = sizeResult
+ override def pathBuilders: List[PathBuilder] = throw new NotImplementedException("Didn't expect ReadLikefunctionsSpec to need pathBuilders")
+}
+
diff --git a/build.sbt b/build.sbt
index a07ab730e..98789be3a 100644
--- a/build.sbt
+++ b/build.sbt
@@ -26,6 +26,10 @@ lazy val dockerHashing = (project in file("dockerHashing"))
.dependsOn(core % "test->test")
.withTestSettings
+lazy val cromwellApiClient = (project in file("cromwellApiClient"))
+ .settings(cromwellApiClientSettings: _*)
+ .withTestSettings
+
lazy val services = (project in file("services"))
.settings(servicesSettings:_*)
.withTestSettings
@@ -57,12 +61,6 @@ lazy val tesBackend = (project in backendRoot / "tes")
.dependsOn(sfsBackend)
.dependsOn(backend % "test->test")
-lazy val htCondorBackend = (project in backendRoot / "htcondor")
- .settings(htCondorBackendSettings:_*)
- .withTestSettings
- .dependsOn(sfsBackend)
- .dependsOn(backend % "test->test")
-
lazy val sparkBackend = (project in backendRoot / "spark")
.settings(sparkBackendSettings:_*)
.withTestSettings
@@ -106,16 +104,15 @@ lazy val root = (project in file("."))
.aggregate(services)
.aggregate(backend)
.aggregate(sfsBackend)
- .aggregate(htCondorBackend)
.aggregate(sparkBackend)
.aggregate(jesBackend)
.aggregate(tesBackend)
.aggregate(engine)
+ .aggregate(cromwellApiClient)
// Next level of projects to include in the fat jar (their dependsOn will be transitively included)
.dependsOn(engine)
.dependsOn(jesBackend)
.dependsOn(tesBackend)
- .dependsOn(htCondorBackend)
.dependsOn(sparkBackend)
// Dependencies for tests
.dependsOn(engine % "test->test")
diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf
index ad3982a4e..865559f87 100644
--- a/core/src/main/resources/reference.conf
+++ b/core/src/main/resources/reference.conf
@@ -38,7 +38,7 @@ akka {
}
# A dispatcher for engine actors
- # Because backends behaviour is unpredictable (potentially blocking, slow) the engine runs
+ # Because backends behavior is unpredictable (potentially blocking, slow) the engine runs
# on its own dispatcher to prevent backends from affecting its performance.
engine-dispatcher {
type = Dispatcher
@@ -59,11 +59,33 @@ akka {
# Note that without further configuration, all other actors run on the default dispatcher
}
+
+ coordinated-shutdown.phases {
+ abort-all-workflows {
+ # This phase is used to give time to Cromwell to abort all workflows upon shutdown.
+ # It's only used if system.abort-jobs-on-terminate = true
+ # This timeout can be adusted to give more or less time to Cromwell to abort workflows
+ timeout = 1 hour
+ depends-on = [service-unbind]
+ }
+
+ stop-io-activity{
+ # Adjust this timeout according to the maximum amount of time Cromwell
+ # should be allowed to spend flushing its database queues
+ timeout = 30 minutes
+ depends-on = [service-stop]
+ }
+ }
}
system {
# If 'true', a SIGINT will trigger Cromwell to attempt to abort all currently running jobs before exiting
- #abort-jobs-on-terminate = false
+ abort-jobs-on-terminate = false
+
+ # If 'true', a SIGTERM or SIGINT will trigger Cromwell to attempt to gracefully shutdown in server mode,
+ # in particular clearing up all queued database writes before letting the JVM shut down.
+ # The shutdown is a multi-phase process, each phase having its own configurable timeout. See the Dev Wiki for more details.
+ graceful-server-shutdown = true
# If 'true' then when Cromwell starts up, it tries to restart incomplete workflows
workflow-restart = true
@@ -82,16 +104,39 @@ system {
# Default number of cache read workers
number-of-cache-read-workers = 25
-
+
io {
# Global Throttling - This is mostly useful for GCS and can be adjusted to match
# the quota availble on the GCS API
number-of-requests = 100000
per = 100 seconds
-
+
# Number of times an I/O operation should be attempted before giving up and failing it.
number-of-attempts = 5
}
+
+ # Maximum number of input file bytes allowed in order to read each type.
+ # If exceeded a FileSizeTooBig exception will be thrown.
+ input-read-limits {
+
+ lines = 128000
+
+ bool = 7
+
+ int = 19
+
+ float = 50
+
+ string = 128000
+
+ json = 128000
+
+ tsv = 128000
+
+ map = 128000
+
+ object = 128000
+ }
}
workflow-options {
@@ -142,19 +187,29 @@ google {
#{
# name = "service-account"
# scheme = "service_account"
+ # Choose between PEM file and JSON file as a credential format. They're mutually exclusive.
+ # PEM format:
# service-account-id = "my-service-account"
# pem-file = "/path/to/file.pem"
+ # JSON format:
+ # json-file = "/path/to/file.json"
#}
]
}
docker {
- // Set this to match your available quota against the Google Container Engine API
- gcr-api-queries-per-100-seconds = 1000
- // Time in minutes before an entry expires from the docker hashes cache and needs to be fetched again
- cache-entry-ttl = "20 minutes"
- // Maximum number of elements to be kept in the cache. If the limit is reached, old elements will be removed from the cache
- cache-size = 200
+ hash-lookup {
+ // Set this to match your available quota against the Google Container Engine API
+ gcr-api-queries-per-100-seconds = 1000
+ // Time in minutes before an entry expires from the docker hashes cache and needs to be fetched again
+ cache-entry-ttl = "20 minutes"
+ // Maximum number of elements to be kept in the cache. If the limit is reached, old elements will be removed from the cache
+ cache-size = 200
+ // How should docker hashes be looked up. Possible values are "local" and "remote"
+ // "local": Lookup hashes on the local docker daemon using the cli
+ // "remote": Lookup hashes on docker hub and gcr
+ method = "remote"
+ }
}
engine {
@@ -166,11 +221,14 @@ engine {
# }
# You will need to provide the engine with a gcs filesystem
# Note that the default filesystem (local) is always available.
- #filesystems {
- # gcs {
- # auth = "application-default"
- # }
- #}
+ filesystems {
+ # gcs {
+ # auth = "application-default"
+ # }
+ local {
+ enabled: true
+ }
+ }
}
backend {
@@ -201,11 +259,13 @@ backend {
docker run \
--rm -i \
${"--user " + docker_user} \
+ --entrypoint /bin/bash \
-v ${cwd}:${docker_cwd} \
- ${docker} \
- /bin/bash ${script}
+ ${docker} ${script}
"""
+
+
# Root directory where Cromwell writes job results. This directory must be
# visible and writeable by the Cromwell process as well as the jobs that Cromwell
# launches.
@@ -235,6 +295,11 @@ backend {
}
}
}
+
+ default-runtime-attributes {
+ failOnStderr: false
+ continueOnReturnCode: 0
+ }
}
}
@@ -244,6 +309,13 @@ backend {
# root = "cromwell-executions"
# dockerRoot = "/cromwell-executions"
# endpoint = "http://127.0.0.1:9000/v1/jobs"
+ # default-runtime-attributes {
+ # cpu: 1
+ # failOnStderr: false
+ # continueOnReturnCode: 0
+ # memory: "2 GB"
+ # disk: "2 GB"
+ # }
# }
#}
@@ -292,7 +364,7 @@ backend {
# job-id-regex = "Job <(\\d+)>.*"
# }
#}
-
+
#SLURM {
# actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
# config {
@@ -326,48 +398,62 @@ backend {
#}
#HtCondor {
- # actor-factory = "cromwell.backend.impl.htcondor.HtCondorBackendFactory"
+ # actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory"
# config {
- # # Root directory where Cromwell writes job results. This directory must be
- # # visible and writeable by the Cromwell process as well as the jobs that Cromwell
- # # launches.
- # root: "cromwell-executions"
+ # runtime-attributes = """
+ # Int cpu = 1
+ # Float memory_mb = 512.0
+ # Float disk_kb = 256000.0
+ # String? nativeSpecs
+ # String? docker
+ # """
#
- # #Placeholders:
- # #1. Working directory.
- # #2. Working directory volume.
- # #3. Inputs volumes.
- # #4. Output volume.
- # #5. Docker image.
- # #6. Job command.
- # docker {
- # #Allow soft links in dockerized jobs
- # cmd = "docker run -w %s %s %s %s --rm %s /bin/bash -c \"%s\""
- # defaultWorkingDir = "/workingDir/"
- # defaultOutputDir = "/output/"
- # }
+ # submit = """
+ # chmod 755 ${script}
+ # cat > ${cwd}/execution/submitFile < ${cwd}/execution/dockerScript < ${cwd}/execution/submitFile < v.getKey }
+ def keys = config.entrySet().asScala.toSet map { v: java.util.Map.Entry[String, ConfigValue] => v.getKey }
/**
* For keys that are in the configuration but not in the reference keySet, log a warning.
@@ -37,14 +37,14 @@ object ConfigUtil {
def validateString(key: String): ValidatedNel[String, String] = try {
config.getString(key).validNel
} catch {
- case e: ConfigException.Missing => s"Could not find key: $key".invalidNel
+ case _: ConfigException.Missing => s"Could not find key: $key".invalidNel
}
def validateConfig(key: String): ValidatedNel[String, Config] = try {
config.getConfig(key).validNel
} catch {
- case e: ConfigException.Missing => s"Could not find key: $key".invalidNel
- case e: ConfigException.WrongType => s"key $key cannot be parsed to a Config".invalidNel
+ case _: ConfigException.Missing => s"Could not find key: $key".invalidNel
+ case _: ConfigException.WrongType => s"key $key cannot be parsed to a Config".invalidNel
}
}
diff --git a/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala b/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala
index 52af92e5c..79e1621ca 100644
--- a/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala
+++ b/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala
@@ -5,7 +5,7 @@ import akka.actor.{ActorInitializationException, OneForOneStrategy, SupervisorSt
class CromwellUserGuardianStrategy extends SupervisorStrategyConfigurator {
override def create(): SupervisorStrategy = OneForOneStrategy() {
- case aie: ActorInitializationException => Escalate
+ case _: ActorInitializationException => Escalate
case t => akka.actor.SupervisorStrategy.defaultDecider.applyOrElse(t, (_: Any) => Escalate)
}
}
diff --git a/core/src/main/scala/cromwell/core/DockerConfiguration.scala b/core/src/main/scala/cromwell/core/DockerConfiguration.scala
new file mode 100644
index 000000000..ca0f872fc
--- /dev/null
+++ b/core/src/main/scala/cromwell/core/DockerConfiguration.scala
@@ -0,0 +1,45 @@
+package cromwell.core
+
+import com.typesafe.config.ConfigFactory
+
+import scala.concurrent.duration.FiniteDuration
+import cats.data.Validated._
+import cats.syntax.cartesian._
+import lenthall.exception.AggregatedMessageException
+import net.ceedubs.ficus.Ficus._
+import lenthall.validation.Validation._
+
+object DockerConfiguration {
+ private lazy val dockerConfig = ConfigFactory.load().getConfig("docker")
+ private lazy val dockerHashLookupConfig = dockerConfig.getConfig("hash-lookup")
+
+ lazy val instance: DockerConfiguration = {
+ val gcrApiQueriesPer100Seconds = validate { dockerHashLookupConfig.as[Int]("gcr-api-queries-per-100-seconds") }
+ val cacheEntryTtl = validate { dockerHashLookupConfig.as[FiniteDuration]("cache-entry-ttl") }
+ val cacheSize = validate { dockerHashLookupConfig.as[Long]("cache-size") }
+ val method = validate { dockerHashLookupConfig.as[String]("method") } map {
+ case "local" => DockerLocalLookup
+ case "remote" => DockerRemoteLookup
+ case other => throw new IllegalArgumentException(s"Unrecognized docker hash lookup method: $other")
+ }
+
+ val dockerConfiguration = (gcrApiQueriesPer100Seconds |@| cacheEntryTtl |@| cacheSize |@| method) map DockerConfiguration.apply
+
+ dockerConfiguration match {
+ case Valid(conf) => conf
+ case Invalid(errors) => throw AggregatedMessageException("Invalid docker configuration", errors.toList)
+ }
+ }
+}
+
+case class DockerConfiguration(
+ gcrApiQueriesPer100Seconds: Int,
+ cacheEntryTtl: FiniteDuration,
+ cacheSize: Long,
+ method: DockerHashLookupMethod
+ )
+
+sealed trait DockerHashLookupMethod
+
+case object DockerLocalLookup extends DockerHashLookupMethod
+case object DockerRemoteLookup extends DockerHashLookupMethod
diff --git a/core/src/main/scala/cromwell/core/DockerCredentials.scala b/core/src/main/scala/cromwell/core/DockerCredentials.scala
deleted file mode 100644
index cfe7be01f..000000000
--- a/core/src/main/scala/cromwell/core/DockerCredentials.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-package cromwell.core
-
-import com.typesafe.config.Config
-import cromwell.core.ConfigUtil._
-
-/**
- * Encapsulate docker credential information.
- */
-case class DockerCredentials(account: String, token: String)
-
-case class DockerHubConfiguration(namespace: String, v1Registry: String, v2Registry: String)
-
-case class DockerConfiguration(dockerCredentials: Option[DockerCredentials], dockerHubConf: DockerHubConfiguration)
-
-/**
- * Singleton encapsulating a DockerConf instance.
- */
-object DockerConfiguration {
-
- private val dockerKeys = Set("account", "token")
-
- def build(config: Config) = {
- import net.ceedubs.ficus.Ficus._
- val dockerConf: Option[DockerCredentials] = for {
- dockerConf <- config.as[Option[Config]]("dockerhub")
- _ = dockerConf.warnNotRecognized(dockerKeys, "dockerhub")
- account <- dockerConf.validateString("account").toOption
- token <- dockerConf.validateString("token").toOption
- } yield DockerCredentials(account, token)
-
- val dockerHubConf = {
- DockerHubConfiguration(
- namespace = config.as[Option[String]]("docker.hub.namespace").getOrElse("docker.io"),
- v1Registry = config.as[Option[String]]("docker.hub.v1Registry").getOrElse("index.docker.io"),
- v2Registry = config.as[Option[String]]("docker.hub.v2Registry").getOrElse("registry-1.docker.io")
- )
- }
- new DockerConfiguration(dockerConf, dockerHubConf)
- }
-}
diff --git a/core/src/main/scala/cromwell/core/ExecutionStatus.scala b/core/src/main/scala/cromwell/core/ExecutionStatus.scala
index 652a9995b..b4ee84f9e 100644
--- a/core/src/main/scala/cromwell/core/ExecutionStatus.scala
+++ b/core/src/main/scala/cromwell/core/ExecutionStatus.scala
@@ -3,12 +3,27 @@ package cromwell.core
object ExecutionStatus extends Enumeration {
type ExecutionStatus = Value
val NotStarted, QueuedInCromwell, Starting, Running, Failed, RetryableFailure, Done, Bypassed, Aborted = Value
- val TerminalStatuses = Set(Failed, Done, Aborted, RetryableFailure, Bypassed)
+ val TerminalStatuses = Set(Failed, Done, Aborted, Bypassed)
+ val TerminalOrRetryableStatuses = TerminalStatuses + RetryableFailure
- implicit class EnhancedExecutionStatus(val status: ExecutionStatus) extends AnyVal {
- def isTerminal: Boolean = {
- TerminalStatuses contains status
+ implicit val ExecutionStatusOrdering = Ordering.by { status: ExecutionStatus =>
+ status match {
+ case NotStarted => 0
+ case QueuedInCromwell => 1
+ case Starting => 2
+ case Running => 3
+ case Aborted => 4
+ case Bypassed => 5
+ case RetryableFailure => 6
+ case Failed => 7
+ case Done => 8
}
+ }
+
+ implicit class EnhancedExecutionStatus(val status: ExecutionStatus) extends AnyVal {
+ def isTerminal: Boolean = TerminalStatuses contains status
+
+ def isTerminalOrRetryable: Boolean = TerminalOrRetryableStatuses contains status
def isDoneOrBypassed: Boolean = status == Done || status == Bypassed
}
diff --git a/core/src/main/scala/cromwell/core/JobKey.scala b/core/src/main/scala/cromwell/core/JobKey.scala
index 9fd22b31e..f230134ce 100644
--- a/core/src/main/scala/cromwell/core/JobKey.scala
+++ b/core/src/main/scala/cromwell/core/JobKey.scala
@@ -1,9 +1,9 @@
package cromwell.core
-import wdl4s.{GraphNode, Scope}
+import wdl4s.wdl.{Scope, WdlGraphNode}
trait JobKey {
- def scope: Scope with GraphNode
+ def scope: Scope with WdlGraphNode
def index: Option[Int]
def attempt: Int
def tag: String
diff --git a/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala b/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala
new file mode 100644
index 000000000..53f0e4de6
--- /dev/null
+++ b/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala
@@ -0,0 +1,46 @@
+package cromwell.core
+
+import akka.actor.{Actor, ActorLogging, ActorRef, Props}
+import cromwell.core.MonitoringCompanionActor._
+import cromwell.util.GracefulShutdownHelper.ShutdownCommand
+
+import scala.concurrent.duration._
+import scala.language.postfixOps
+
+object MonitoringCompanionActor {
+ sealed trait MonitoringCompanionCommand
+ private [core] case object AddWork extends MonitoringCompanionCommand
+ private [core] case object RemoveWork extends MonitoringCompanionCommand
+ private [core] def props(actorToMonitor: ActorRef) = Props(new MonitoringCompanionActor(actorToMonitor))
+}
+
+private [core] class MonitoringCompanionActor(actorToMonitor: ActorRef) extends Actor with ActorLogging {
+ private var workCount: Int = 0
+
+ override def receive = {
+ case AddWork => workCount += 1
+ case RemoveWork => workCount -= 1
+ case ShutdownCommand if workCount <= 0 =>
+ context stop actorToMonitor
+ context stop self
+ case ShutdownCommand =>
+ log.info(s"{} is still processing {} messages", actorToMonitor.path.name, workCount)
+ context.system.scheduler.scheduleOnce(1 second, self, ShutdownCommand)(context.dispatcher)
+ ()
+ }
+}
+
+trait MonitoringCompanionHelper { this: Actor =>
+ private val monitoringActor = context.actorOf(MonitoringCompanionActor.props(self))
+ private var shuttingDown: Boolean = false
+
+ def addWork() = monitoringActor ! AddWork
+ def removeWork() = monitoringActor ! RemoveWork
+
+ val monitoringReceive: Receive = {
+ case ShutdownCommand if !shuttingDown =>
+ shuttingDown = true
+ monitoringActor ! ShutdownCommand
+ case ShutdownCommand => // Ignore if we're already shutting down
+ }
+}
diff --git a/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala b/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala
index 922be7ffb..00afb2b61 100644
--- a/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala
+++ b/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala
@@ -20,4 +20,9 @@ object WorkflowMetadataKeys {
val SubmissionSection_Inputs = "inputs"
val SubmissionSection_Options = "options"
val SubmissionSection_Imports = "imports"
+ val SubmissionSection_WorkflowType = "workflowType"
+ val SubmissionSection_Labels = "labels"
+ val SubmissionSection_WorkflowTypeVersion = "workflowTypeVersion"
+
+ val Labels = "labels"
}
diff --git a/core/src/main/scala/cromwell/core/WorkflowOptions.scala b/core/src/main/scala/cromwell/core/WorkflowOptions.scala
index 709e667e1..701ea5428 100644
--- a/core/src/main/scala/cromwell/core/WorkflowOptions.scala
+++ b/core/src/main/scala/cromwell/core/WorkflowOptions.scala
@@ -152,7 +152,7 @@ case class WorkflowOptions(jsObject: JsObject) {
}
lazy val defaultRuntimeOptions = jsObject.fields.get(defaultRuntimeOptionKey) match {
- case Some(jsObj: JsObject) => TryUtil.sequenceMap(jsObj.fields map { case (k, v) => k -> WorkflowOptions.getAsJson(k, jsObj) })
+ case Some(jsObj: JsObject) => TryUtil.sequenceMap(jsObj.fields map { case (k, _) => k -> WorkflowOptions.getAsJson(k, jsObj) })
case Some(jsVal) => Failure(new IllegalArgumentException(s"Unsupported JsValue for $defaultRuntimeOptionKey: $jsVal. Expected a JSON object."))
case None => Failure(OptionNotFoundException(s"Cannot find definition for default runtime attributes"))
}
diff --git a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala
index 55fa68b47..281cdf653 100644
--- a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala
+++ b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala
@@ -1,59 +1,58 @@
package cromwell.core
-import wdl4s.{WdlJson, WdlSource}
+import wdl4s.wdl.{WorkflowJson, WorkflowSource}
/**
* Represents the collection of source files that a user submits to run a workflow
*/
sealed trait WorkflowSourceFilesCollection {
- def wdlSource: WdlSource
- def inputsJson: WdlJson
+ def workflowSource: WorkflowSource
+ def inputsJson: WorkflowJson
def workflowOptionsJson: WorkflowOptionsJson
- def labelsJson: WdlJson
-
+ def labelsJson: WorkflowJson
+ def workflowType: Option[WorkflowType]
+ def workflowTypeVersion: Option[WorkflowTypeVersion]
def importsZipFileOption: Option[Array[Byte]] = this match {
case _: WorkflowSourceFilesWithoutImports => None
- case WorkflowSourceFilesWithDependenciesZip(_, _, _, _, importsZip) => Option(importsZip) // i.e. Some(importsZip) if our wiring is correct
+ case w: WorkflowSourceFilesWithDependenciesZip => Option(w.importsZip) // i.e. Some(importsZip) if our wiring is correct
}
def copyOptions(workflowOptions: WorkflowOptionsJson) = this match {
- case w: WorkflowSourceFilesWithoutImports => WorkflowSourceFilesWithoutImports(
- wdlSource = w.wdlSource,
- inputsJson = w.inputsJson,
- workflowOptionsJson = workflowOptions,
- labelsJson = w.labelsJson)
-
- case w: WorkflowSourceFilesWithDependenciesZip => WorkflowSourceFilesWithDependenciesZip(
- wdlSource = w.wdlSource,
- inputsJson = w.inputsJson,
- workflowOptionsJson = workflowOptions,
- labelsJson = w.labelsJson,
- importsZip = w.importsZip)
+ case w: WorkflowSourceFilesWithoutImports => w.copy(workflowOptionsJson = workflowOptions)
+ case w: WorkflowSourceFilesWithDependenciesZip => w.copy(workflowOptionsJson = workflowOptions)
}
}
object WorkflowSourceFilesCollection {
- def apply(wdlSource: WdlSource,
- inputsJson: WdlJson,
+ def apply(workflowSource: WorkflowSource,
+ workflowType: Option[WorkflowType],
+ workflowTypeVersion: Option[WorkflowTypeVersion],
+ inputsJson: WorkflowJson,
workflowOptionsJson: WorkflowOptionsJson,
- labelsJson: WdlJson,
+ labelsJson: WorkflowJson,
importsFile: Option[Array[Byte]]): WorkflowSourceFilesCollection = importsFile match {
- case Some(imports) => WorkflowSourceFilesWithDependenciesZip(wdlSource, inputsJson, workflowOptionsJson, labelsJson, imports)
- case None => WorkflowSourceFilesWithoutImports(wdlSource, inputsJson, workflowOptionsJson, labelsJson)
+ case Some(imports) =>
+ WorkflowSourceFilesWithDependenciesZip(workflowSource, workflowType, workflowTypeVersion, inputsJson, workflowOptionsJson, labelsJson, imports)
+ case None =>
+ WorkflowSourceFilesWithoutImports(workflowSource, workflowType, workflowTypeVersion, inputsJson, workflowOptionsJson, labelsJson)
}
}
-final case class WorkflowSourceFilesWithoutImports(wdlSource: WdlSource,
- inputsJson: WdlJson,
+final case class WorkflowSourceFilesWithoutImports(workflowSource: WorkflowSource,
+ workflowType: Option[WorkflowType],
+ workflowTypeVersion: Option[WorkflowTypeVersion],
+ inputsJson: WorkflowJson,
workflowOptionsJson: WorkflowOptionsJson,
- labelsJson: WdlJson) extends WorkflowSourceFilesCollection
+ labelsJson: WorkflowJson) extends WorkflowSourceFilesCollection
-final case class WorkflowSourceFilesWithDependenciesZip(wdlSource: WdlSource,
- inputsJson: WdlJson,
+final case class WorkflowSourceFilesWithDependenciesZip(workflowSource: WorkflowSource,
+ workflowType: Option[WorkflowType],
+ workflowTypeVersion: Option[WorkflowTypeVersion],
+ inputsJson: WorkflowJson,
workflowOptionsJson: WorkflowOptionsJson,
- labelsJson: WdlJson,
+ labelsJson: WorkflowJson,
importsZip: Array[Byte]) extends WorkflowSourceFilesCollection {
- override def toString = s"WorkflowSourceFilesWithDependenciesZip($wdlSource, $inputsJson, $workflowOptionsJson, $labelsJson, <>)"
+ override def toString = s"WorkflowSourceFilesWithDependenciesZip($workflowSource, $inputsJson, $workflowOptionsJson, $labelsJson, <>)"
}
diff --git a/core/src/main/scala/cromwell/core/WorkflowState.scala b/core/src/main/scala/cromwell/core/WorkflowState.scala
index 41ac2bb97..98cef7896 100644
--- a/core/src/main/scala/cromwell/core/WorkflowState.scala
+++ b/core/src/main/scala/cromwell/core/WorkflowState.scala
@@ -10,9 +10,9 @@ sealed trait WorkflowState {
}
object WorkflowState {
- private lazy val WorkflowState = Seq(WorkflowSubmitted, WorkflowRunning, WorkflowFailed, WorkflowSucceeded, WorkflowAborting, WorkflowAborted)
+ private lazy val WorkflowStateValues = Seq(WorkflowSubmitted, WorkflowRunning, WorkflowFailed, WorkflowSucceeded, WorkflowAborting, WorkflowAborted)
- def fromString(str: String): WorkflowState = WorkflowState.find(_.toString == str).getOrElse(
+ def withName(str: String): WorkflowState = WorkflowStateValues.find(_.toString == str).getOrElse(
throw new NoSuchElementException(s"No such WorkflowState: $str"))
implicit val WorkflowStateSemigroup = new Semigroup[WorkflowState] {
diff --git a/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala b/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala
new file mode 100644
index 000000000..db5be437b
--- /dev/null
+++ b/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala
@@ -0,0 +1,106 @@
+package cromwell.core.actor
+
+import akka.actor.{ActorRef, Cancellable, FSM}
+import cats.data.NonEmptyVector
+import cromwell.core.actor.BatchingDbWriter._
+import cromwell.util.GracefulShutdownHelper.ShutdownCommand
+import org.slf4j.LoggerFactory
+
+import scala.util.{Failure, Success, Try}
+
+
+/** A collection of state, data, and message types to support batched database writes. */
+object BatchingDbWriter {
+ val logger = LoggerFactory.getLogger("BatchingDbWriteActor")
+
+ /** Data for batched database writes. */
+ sealed trait BatchingDbWriterData {
+ def addData[D](datum: D): BatchingDbWriterData = addData(Vector(datum))
+ def addData[D](data: Iterable[D]): BatchingDbWriterData = {
+ Try(NonEmptyVector.fromVector(data.toVector)) match {
+ case Success(Some(v)) =>
+ val newEvents = this match {
+ case NoData => v
+ case HasData(e) => e.concatNev(v)
+ }
+ HasData(newEvents)
+ case Success(None) => this
+ case Failure(f) =>
+ val dataSample = data.take(3).mkString(", ") + (if (data.size > 3) ", ..." else "")
+ logger.error(s"Failed processing batched data: $dataSample. Data will be dropped and not be sent to the database.", f)
+ this
+ }
+ }
+
+ def length: Int = this match {
+ case NoData => 0
+ case HasData(e) => e.length
+ }
+ }
+
+ case object NoData extends BatchingDbWriterData
+ case class HasData[E](events: NonEmptyVector[E]) extends BatchingDbWriterData
+
+ /** The states for batched database writes. */
+ sealed trait BatchingDbWriterState
+ case object WaitingToWrite extends BatchingDbWriterState
+ case object WritingToDb extends BatchingDbWriterState
+
+ /** The message types for batched database writes. */
+ sealed trait BatchingDbWriterMessage
+ case object DbWriteComplete extends BatchingDbWriterMessage
+ case object FlushBatchToDb extends BatchingDbWriterMessage
+ case object ScheduledFlushToDb extends BatchingDbWriterMessage
+
+ case class CommandAndReplyTo[C](command: C, replyTo: ActorRef)
+}
+
+/**
+ * Trait that contains some common batch-related and graceful shutdown logic.
+ * Be careful NOT to add a custom whenUnhandled state function when mixing in this trait as it will override the
+ * graceful shutdown handling logic.
+ *
+ * Note that there is more common logic that could be abstracted here.
+ */
+trait BatchingDbWriterActor { this: FSM[BatchingDbWriterState, BatchingDbWriterData] =>
+ import scala.concurrent.duration._
+
+ private var shuttingDown: Boolean = false
+
+ def isShuttingDown: Boolean = shuttingDown
+ def dbFlushRate: FiniteDuration
+ var periodicFlush: Option[Cancellable] = None
+
+ override def preStart(): Unit = {
+ periodicFlush = Option(context.system.scheduler.schedule(0.seconds, dbFlushRate, self, ScheduledFlushToDb)(context.dispatcher))
+ }
+
+ /**
+ * WhenUnhandled state function that handles reception of ShutdownCommand and acts appropriately
+ */
+ private val whenUnhandledFunction: StateFunction = {
+ case Event(ShutdownCommand, NoData) if stateName == WaitingToWrite =>
+ periodicFlush foreach { _.cancel() }
+ context stop self
+ stay()
+ case Event(ShutdownCommand, _) if stateName == WaitingToWrite =>
+ logger.info("{} flushing database writes...", self.path.name)
+ shuttingDown = true
+ // transitioning to WritingToDb triggers a FlushBatchToDb to be sent to self
+ goto(WritingToDb)
+ case Event(ShutdownCommand, _) if stateName == WritingToDb =>
+ logger.info("{} waiting for database writes to be flushed...", self.path.name)
+ shuttingDown = true
+ stay()
+ }
+
+ whenUnhandled(whenUnhandledFunction)
+
+ onTransition {
+ case WaitingToWrite -> WritingToDb => self ! FlushBatchToDb
+ // When transitioning back to WaitingToWrite, if there's no data left to process, and we're trying to shutdown, then stop
+ case _ -> WaitingToWrite if shuttingDown && nextStateData == NoData =>
+ periodicFlush foreach { _.cancel() }
+ context stop self
+ }
+}
diff --git a/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala b/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala
index 579c6a6a7..ff413ee78 100644
--- a/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala
+++ b/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala
@@ -24,7 +24,7 @@ trait RobustClientHelper { this: Actor with ActorLogging =>
protected def backpressureTimeout: FiniteDuration = 10 seconds
protected def backpressureRandomizerFactor: Double = 0.5D
- private [core] def robustReceive: Receive = {
+ def robustReceive: Receive = {
case BackPressure(request) =>
val snd = sender()
newTimer(request, snd, generateBackpressureTime)
@@ -37,7 +37,7 @@ trait RobustClientHelper { this: Actor with ActorLogging =>
context.system.scheduler.scheduleOnce(in, to, msg)(robustActorHelperEc, self)
}
- private [core] def robustSend(msg: Any, to: ActorRef, timeout: FiniteDuration = DefaultRequestLostTimeout): Unit = {
+ def robustSend(msg: Any, to: ActorRef, timeout: FiniteDuration = DefaultRequestLostTimeout): Unit = {
to ! msg
addTimeout(msg, to, timeout)
}
diff --git a/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala b/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala
index c595a2ad1..cb4dd6e67 100644
--- a/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala
+++ b/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala
@@ -7,6 +7,7 @@ import akka.stream.QueueOfferResult.{Dropped, Enqueued, QueueClosed}
import akka.stream.scaladsl.{Sink, Source, SourceQueueWithComplete}
import cromwell.core.actor.StreamActorHelper.{ActorRestartException, StreamCompleted, StreamFailed}
import cromwell.core.actor.StreamIntegration._
+import cromwell.util.GracefulShutdownHelper.ShutdownCommand
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
@@ -19,7 +20,7 @@ object StreamActorHelper {
trait StreamActorHelper[T <: StreamContext] { this: Actor with ActorLogging =>
- implicit val ec: ExecutionContext = context.system.dispatcher
+ implicit def ec: ExecutionContext
implicit def materializer: ActorMaterializer
@@ -46,10 +47,8 @@ trait StreamActorHelper[T <: StreamContext] { this: Actor with ActorLogging =>
override def preStart(): Unit = {
stream.watchCompletion() onComplete {
- case Success(_) =>
- self ! StreamCompleted
- case Failure(failure) =>
- self ! StreamFailed(failure)
+ case Success(_) => self ! StreamCompleted
+ case Failure(failure) => self ! StreamFailed(failure)
}
}
@@ -71,17 +70,17 @@ trait StreamActorHelper[T <: StreamContext] { this: Actor with ActorLogging =>
}
private def streamReceive: Receive = {
- case EnqueueResponse(Enqueued, commandContext: T @unchecked) => // Good !
+ case ShutdownCommand => stream.complete()
+ case EnqueueResponse(Enqueued, _: T @unchecked) => // Good !
case EnqueueResponse(Dropped, commandContext) => backpressure(commandContext)
- // In any of the cases below, the stream is in a failed state, which will he caught by the watchCompletion hook and the
+ // In any of the cases below, the stream is in a failed state, which will be caught by the watchCompletion hook and the
// actor will be restarted
case EnqueueResponse(QueueClosed, commandContext) => backpressure(commandContext)
- case EnqueueResponse(QueueOfferResult.Failure(failure), commandContext) => backpressure(commandContext)
- case FailedToEnqueue(throwable, commandContext) => backpressure(commandContext)
+ case EnqueueResponse(QueueOfferResult.Failure(_), commandContext) => backpressure(commandContext)
+ case FailedToEnqueue(_, commandContext) => backpressure(commandContext)
- // Those 2 cases should never happen, as long as the strategy is Resume, but in case it does...
- case StreamCompleted => restart(new IllegalStateException("Stream was completed unexepectedly"))
+ case StreamCompleted => context stop self
case StreamFailed(failure) => restart(failure)
}
diff --git a/core/src/main/scala/cromwell/core/callcaching/CallCachingEligibility.scala b/core/src/main/scala/cromwell/core/callcaching/CallCachingEligibility.scala
deleted file mode 100644
index f39d82e19..000000000
--- a/core/src/main/scala/cromwell/core/callcaching/CallCachingEligibility.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-package cromwell.core.callcaching
-
-sealed trait CallCachingEligibility
-case object CallCachingEligible extends CallCachingEligibility
-sealed trait CallCachingIneligible extends CallCachingEligibility {
- def message: String
-}
-
-case class FloatingDockerTagWithHash(hash: String) extends CallCachingIneligible {
- override val message = s"""You are using a floating docker tag in this task. Cromwell does not consider tasks with floating tags to be eligible for call caching.
- |If you want this task to be eligible for call caching in the future, use a docker runtime attribute with a digest instead.
- |This is the exact docker image that was used for this job: $hash
- |You can replace the docker runtime attribute in your task with the above value to make this task eligible for call caching.""".stripMargin
-}
-
-case object FloatingDockerTagWithoutHash extends CallCachingIneligible {
- override val message = s"""You are using a floating docker tag in this task. Cromwell does not consider tasks with floating tags to be eligible for call caching.
- |If you want this task to be eligible for call caching in the future, use a docker runtime attribute with a digest instead.
- |Cromwell attempted to retrieve the current hash for this docker image but failed.
- |This is not necessarily a cause for concern as Cromwell is currently only able to retrieve hashes for Dockerhub and GCR images.
- |The job will be dispatched to the appropriate backend that will attempt to run it.""".stripMargin
-}
diff --git a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala
index a604cc9b7..9233061cd 100644
--- a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala
+++ b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala
@@ -1,6 +1,14 @@
package cromwell.core.callcaching
-case class HashKey(key: String, checkForHitOrMiss: Boolean = true)
+
+object HashKey {
+ def apply(keyComponents: String*) = new HashKey(true, keyComponents.toList)
+ def apply(checkForHitOrMiss: Boolean, keyComponents: String*) = new HashKey(checkForHitOrMiss, keyComponents.toList)
+}
+
+case class HashKey(checkForHitOrMiss: Boolean, keyComponents: List[String]) {
+ val key = keyComponents.mkString(": ")
+}
case class HashValue(value: String)
case class HashResult(hashKey: HashKey, hashValue: HashValue)
@@ -8,5 +16,6 @@ sealed trait HashResultMessage
trait SuccessfulHashResultMessage extends HashResultMessage {
def hashes: Set[HashResult]
}
-case class HashingFailedMessage(key: HashKey, reason: Throwable) extends HashResultMessage
+case class MultiHashingFailedMessage(keys: Set[HashKey], reason: Throwable) extends HashResultMessage
+case class HashingFailedMessage(file: String, reason: Throwable) extends HashResultMessage
case object HashingServiceUnvailable extends HashResultMessage
diff --git a/core/src/main/scala/cromwell/core/callcaching/MaybeCallCachingEligible.scala b/core/src/main/scala/cromwell/core/callcaching/MaybeCallCachingEligible.scala
new file mode 100644
index 000000000..c2460e1b8
--- /dev/null
+++ b/core/src/main/scala/cromwell/core/callcaching/MaybeCallCachingEligible.scala
@@ -0,0 +1,19 @@
+package cromwell.core.callcaching
+
+sealed trait MaybeCallCachingEligible {
+ def dockerHash: Option[String]
+}
+
+sealed trait CallCachingEligible extends MaybeCallCachingEligible
+sealed trait CallCachingIneligible extends MaybeCallCachingEligible
+
+case object NoDocker extends CallCachingEligible {
+ override def dockerHash: Option[String] = None
+}
+case class DockerWithHash(dockerAttribute: String) extends CallCachingEligible {
+ override def dockerHash: Option[String] = Option(dockerAttribute)
+}
+
+case class FloatingDockerTagWithoutHash(dockerTag: String) extends CallCachingIneligible {
+ override def dockerHash: Option[String] = None
+}
diff --git a/core/src/main/scala/cromwell/core/core.scala b/core/src/main/scala/cromwell/core/core.scala
index 8cfe7c2f1..7a5831bbe 100644
--- a/core/src/main/scala/cromwell/core/core.scala
+++ b/core/src/main/scala/cromwell/core/core.scala
@@ -2,7 +2,7 @@ package cromwell.core
import cromwell.core.path.Path
import lenthall.exception.ThrowableAggregation
-import wdl4s.values.WdlValue
+import wdl4s.wdl.values.WdlValue
case class CallContext(root: Path, stdout: String, stderr: String)
diff --git a/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala b/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala
new file mode 100644
index 000000000..a05961b4a
--- /dev/null
+++ b/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala
@@ -0,0 +1,25 @@
+package cromwell.core.io
+
+import better.files.File.OpenOptions
+import cromwell.core.path.Path
+
+object DefaultIoCommand {
+ case class DefaultIoCopyCommand(override val source: Path,
+ override val destination: Path,
+ override val overwrite: Boolean) extends IoCopyCommand(
+ source, destination, overwrite
+ )
+ case class DefaultIoContentAsStringCommand(override val file: Path) extends IoContentAsStringCommand(file)
+ case class DefaultIoSizeCommand(override val file: Path) extends IoSizeCommand(file)
+ case class DefaultIoWriteCommand(override val file: Path,
+ override val content: String,
+ override val openOptions: OpenOptions) extends IoWriteCommand(
+ file, content, openOptions
+ )
+ case class DefaultIoDeleteCommand(override val file: Path,
+ override val swallowIOExceptions: Boolean) extends IoDeleteCommand(
+ file, swallowIOExceptions
+ )
+ case class DefaultIoHashCommand(override val file: Path) extends IoHashCommand(file)
+ case class DefaultIoTouchCommand(override val file: Path) extends IoTouchCommand(file)
+}
diff --git a/core/src/main/scala/cromwell/core/io/IoClientHelper.scala b/core/src/main/scala/cromwell/core/io/IoClientHelper.scala
index 01cdabf2a..b6f13b50e 100644
--- a/core/src/main/scala/cromwell/core/io/IoClientHelper.scala
+++ b/core/src/main/scala/cromwell/core/io/IoClientHelper.scala
@@ -19,7 +19,11 @@ trait IoClientHelper extends RobustClientHelper { this: Actor with ActorLogging
def ioReceive = robustReceive orElse ioResponseReceive
- def sendIoCommand(ioCommand: IoCommand[_], timeout: FiniteDuration = RobustClientHelper.DefaultRequestLostTimeout) = {
+ def sendIoCommand(ioCommand: IoCommand[_]) = {
+ sendIoCommandWithCustomTimeout(ioCommand, RobustClientHelper.DefaultRequestLostTimeout)
+ }
+
+ def sendIoCommandWithCustomTimeout(ioCommand: IoCommand[_], timeout: FiniteDuration) = {
robustSend(ioCommand, ioActor, timeout)
}
diff --git a/core/src/main/scala/cromwell/core/io/IoCommand.scala b/core/src/main/scala/cromwell/core/io/IoCommand.scala
index 4df237315..fcc2d72a3 100644
--- a/core/src/main/scala/cromwell/core/io/IoCommand.scala
+++ b/core/src/main/scala/cromwell/core/io/IoCommand.scala
@@ -37,42 +37,51 @@ trait IoCommand[+T] {
/**
* Copy source -> destination
+ * Will create the destination directory if it doesn't exist.
*/
-class IoCopyCommand(val source: Path, val destination: Path, val overwrite: Boolean) extends IoCommand[Unit] {
+abstract class IoCopyCommand(val source: Path, val destination: Path, val overwrite: Boolean) extends IoCommand[Unit] {
override def toString = s"copy ${source.pathAsString} to ${destination.pathAsString} with overwrite = $overwrite"
}
/**
* Read file as a string (load the entire content in memory)
*/
-class IoContentAsStringCommand(val file: Path) extends IoCommand[String] {
+abstract class IoContentAsStringCommand(val file: Path) extends IoCommand[String] {
override def toString = s"read content of ${file.pathAsString}"
}
/**
* Return the size of file
*/
-class IoSizeCommand(val file: Path) extends IoCommand[Long] {
+abstract class IoSizeCommand(val file: Path) extends IoCommand[Long] {
override def toString = s"get size of ${file.pathAsString}"
}
/**
* Write content in file
+ * Will create the destination directory if it doesn't exist.
*/
-class IoWriteCommand(val file: Path, val content: String, val openOptions: OpenOptions) extends IoCommand[Unit] {
+abstract class IoWriteCommand(val file: Path, val content: String, val openOptions: OpenOptions) extends IoCommand[Unit] {
override def toString = s"write to ${file.pathAsString}"
}
/**
* Delete file
*/
-class IoDeleteCommand(val file: Path, val swallowIOExceptions: Boolean) extends IoCommand[Unit] {
+abstract class IoDeleteCommand(val file: Path, val swallowIOExceptions: Boolean) extends IoCommand[Unit] {
override def toString = s"delete ${file.pathAsString}"
}
/**
* Get Hash value for file
*/
-class IoHashCommand(val file: Path) extends IoCommand[String] {
+abstract class IoHashCommand(val file: Path) extends IoCommand[String] {
override def toString = s"get hash of ${file.pathAsString}"
}
+
+/**
+ * Touch a file
+ */
+abstract class IoTouchCommand(val file: Path) extends IoCommand[Unit] {
+ override def toString = s"touch ${file.pathAsString}"
+}
diff --git a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala
index d79e67438..ad26449b4 100644
--- a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala
+++ b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala
@@ -1,5 +1,6 @@
package cromwell.core.io
+import cromwell.core.io.DefaultIoCommand._
import cromwell.core.path.BetterFileMethods.OpenOptions
import cromwell.core.path.Path
@@ -10,13 +11,15 @@ trait IoCommandBuilder {
def deleteCommand(path: Path, swallowIoExceptions: Boolean): IoDeleteCommand
def copyCommand(src: Path, dest: Path, overwrite: Boolean): IoCopyCommand
def hashCommand(file: Path): IoHashCommand
+ def touchCommand(file: Path): IoTouchCommand
}
trait DefaultIoCommandBuilder extends IoCommandBuilder {
- def contentAsStringCommand(path: Path) = new IoContentAsStringCommand(path)
- def writeCommand(path: Path, content: String, options: OpenOptions) = new IoWriteCommand(path, content, options)
- def sizeCommand(path: Path) = new IoSizeCommand(path)
- def deleteCommand(path: Path, swallowIoExceptions: Boolean) = new IoDeleteCommand(path, swallowIoExceptions)
- def copyCommand(src: Path, dest: Path, overwrite: Boolean) = new IoCopyCommand(src, dest, overwrite)
- def hashCommand(file: Path) = new IoHashCommand(file)
+ def contentAsStringCommand(path: Path): IoContentAsStringCommand = DefaultIoContentAsStringCommand(path)
+ def writeCommand(path: Path, content: String, options: OpenOptions): IoWriteCommand = DefaultIoWriteCommand(path, content, options)
+ def sizeCommand(path: Path): IoSizeCommand = DefaultIoSizeCommand(path)
+ def deleteCommand(path: Path, swallowIoExceptions: Boolean): IoDeleteCommand = DefaultIoDeleteCommand(path, swallowIoExceptions)
+ def copyCommand(src: Path, dest: Path, overwrite: Boolean): IoCopyCommand = DefaultIoCopyCommand(src, dest, overwrite)
+ def hashCommand(file: Path): IoHashCommand = DefaultIoHashCommand(file)
+ def touchCommand(file: Path): IoTouchCommand = DefaultIoTouchCommand(file)
}
diff --git a/core/src/main/scala/cromwell/core/labels/Label.scala b/core/src/main/scala/cromwell/core/labels/Label.scala
index 4fc1e2959..cee9ac91a 100644
--- a/core/src/main/scala/cromwell/core/labels/Label.scala
+++ b/core/src/main/scala/cromwell/core/labels/Label.scala
@@ -5,69 +5,40 @@ import cats.data.Validated._
import cats.syntax.cartesian._
import cats.syntax.validated._
+import scala.util.matching.Regex
+
sealed abstract case class Label(key: String, value: String)
object Label {
- // Yes, 63. Not a typo for 64.
- // See 'labels' in https://cloud.google.com/genomics/reference/rpc/google.genomics.v1alpha2#google.genomics.v1alpha2.RunPipelineArgs
- private val MaxLabelLength = 63
- val LabelRegexPattern = "[a-z]([-a-z0-9]*[a-z0-9])?"
+ val MaxLabelLength = 63
+ val LabelKeyRegex = "[a-z]([-a-z0-9]*[a-z0-9])?"
+ val LabelValueRegex = "([a-z0-9]*[-a-z0-9]*[a-z0-9])?"
- def validateName(s: String): ErrorOr[String] = {
- if (LabelRegexPattern.r.pattern.matcher(s).matches) {
- if (s.length <= MaxLabelLength) s.validNel else s"Invalid label: $s was ${s.length} characters. The maximum is $MaxLabelLength".invalidNel
- } else {
- s"Invalid label: $s did not match the regex $LabelRegexPattern".invalidNel
- }
- }
+ val LabelExpectationsMessage =
+ s"A Label key must match the pattern `$LabelKeyRegex` and a label value must match the pattern `$LabelValueRegex`."
- def validateLabel(key: String, value: String): ErrorOr[Label] = {
- val validatedKey = validateName(key)
- val validatedValue = validateName(value)
-
- (validatedKey |@| validatedValue) map { case (k, v) => new Label(k, v) {} }
+ def validateLabelRegex(s: String, regexAllowed: Regex): ErrorOr[String] = {
+ (regexAllowed.pattern.matcher(s).matches, s.length <= MaxLabelLength) match {
+ case (true, true) => s.validNel
+ case (false, false) => s"Invalid label: `$s` did not match regex $regexAllowed and it is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel
+ case (false, _) => s"Invalid label: `$s` did not match the regex $regexAllowed.".invalidNel
+ case (_, false) => s"Invalid label: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel
+ }
}
- /**
- * Change to meet the constraint:
- * - To match the regex LabelRegexPattern
- * - To be between 1 and MaxLabelLength characters total
- */
- def safeName(mainText: String): String = {
+ def validateLabelKey(key: String): ErrorOr[String] = validateLabelRegex(key, LabelKeyRegex.r)
- validateName(mainText) match {
- case Valid(labelText) => labelText
- case _ =>
- def appendSafe(current: String, nextChar: Char): String = {
- nextChar match {
- case c if c.isLetterOrDigit || c == '-' => current + c.toLower
- case _ => current + '-'
- }
- }
+ def validateLabelValue(key: String): ErrorOr[String] = validateLabelRegex(key, LabelValueRegex.r)
- val foldResult = mainText.toCharArray.foldLeft("")(appendSafe)
-
- val startsValid = foldResult.headOption.exists(_.isLetter)
- val endsValid = foldResult.lastOption.exists(_.isLetterOrDigit)
-
- val validStart = if (startsValid) foldResult else "x--" + foldResult
- val validStartAndEnd = if (endsValid) validStart else validStart + "--x"
-
- val length = validStartAndEnd.length
- val tooLong = length > MaxLabelLength
+ def validateLabel(key: String, value: String): ErrorOr[Label] = {
+ val validatedKey = validateLabelKey(key)
+ val validatedValue = validateLabelValue(value)
- if (tooLong) {
- val middleSeparator = "---"
- val subSectionLength = (MaxLabelLength - middleSeparator.length) / 2
- validStartAndEnd.substring(0, subSectionLength) + middleSeparator + validStartAndEnd.substring(length - subSectionLength, length)
- } else {
- validStartAndEnd
- }
- }
+ (validatedKey |@| validatedValue) map { case (k, v) => new Label(k, v) {} }
}
- def safeLabel(key: String, value: String): Label = {
- new Label(safeName(key), safeName(value)) {}
+ def apply(key: String, value: String) = {
+ new Label(key, value) {}
}
}
diff --git a/core/src/main/scala/cromwell/core/labels/Labels.scala b/core/src/main/scala/cromwell/core/labels/Labels.scala
index 8e891fb51..e5fe3f575 100644
--- a/core/src/main/scala/cromwell/core/labels/Labels.scala
+++ b/core/src/main/scala/cromwell/core/labels/Labels.scala
@@ -1,18 +1,31 @@
package cromwell.core.labels
+import cats.data.Validated._
+import cats.instances.vector._
+import cats.syntax.traverse._
+import lenthall.validation.ErrorOr
+import lenthall.validation.ErrorOr.ErrorOr
+
import scala.collection.JavaConverters._
case class Labels(value: Vector[Label]) {
- def asJesLabels = (value map { label => label.key -> label.value }).toMap.asJava
+ def asTuple: Vector[(String, String)] = value.map(label => label.key -> label.value)
+
+ def asMap: Map[String, String] = asTuple.toMap
+
+ def asJavaMap = asMap.asJava
def ++(that: Labels) = Labels(value ++ that.value)
}
object Labels {
def apply(values: (String, String)*): Labels = {
- val kvps: Seq[(String, String)] = values.toSeq
- Labels((kvps map { case (k, v) => Label.safeLabel(k, v) }).to[Vector])
+ Labels(values.toVector map (Label.apply _).tupled)
+ }
+
+ def validateMapOfLabels(labels: Map[String, String]): ErrorOr[Labels] = {
+ labels.toVector traverse { Label.validateLabel _ }.tupled map Labels.apply
}
def empty = Labels(Vector.empty)
diff --git a/core/src/main/scala/cromwell/core/package.scala b/core/src/main/scala/cromwell/core/package.scala
index def878003..e94dbc809 100644
--- a/core/src/main/scala/cromwell/core/package.scala
+++ b/core/src/main/scala/cromwell/core/package.scala
@@ -1,13 +1,34 @@
package cromwell
-import wdl4s.values.WdlValue
+import cats.data.Validated._
+import cats.syntax.validated._
+import lenthall.validation.ErrorOr.ErrorOr
+import wdl4s.wdl.values.WdlValue
+
+import scala.util.{Failure, Success, Try}
package object core {
type LocallyQualifiedName = String
type FullyQualifiedName = String
type WorkflowOutputs = Map[FullyQualifiedName, JobOutput]
type WorkflowOptionsJson = String
+ type WorkflowType = String
+ type WorkflowTypeVersion = String
type CallOutputs = Map[LocallyQualifiedName, JobOutput]
type HostInputs = Map[String, WdlValue]
type EvaluatedRuntimeAttributes = Map[String, WdlValue]
+
+ implicit class toErrorOr[A](val trySomething: Try[A]) {
+ def tryToErrorOr: ErrorOr[A] = trySomething match {
+ case Success(options) => options.validNel
+ case Failure(err) => err.getMessage.invalidNel
+ }
+ }
+
+ implicit class toTry[A](val validatedSomething: ErrorOr[A]) {
+ def errorOrToTry: Try[A] = validatedSomething match {
+ case Valid(options) => Success(options)
+ case Invalid(err) => Failure(new RuntimeException(s"Error(s): ${err.toList.mkString(",")}"))
+ }
+ }
}
diff --git a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala
index 5346ec70c..94c780a84 100644
--- a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala
+++ b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala
@@ -214,11 +214,11 @@ trait BetterFileMethods {
betterFile.bufferedReader(codec)
final def newBufferedWriter(implicit codec: Codec, openOptions: OpenOptions = OpenOptions.default): BufferedWriter =
- betterFile.newBufferedWriter(codec)
+ betterFile.newBufferedWriter(codec, openOptions)
final def bufferedWriter(implicit codec: Codec,
openOptions: OpenOptions = OpenOptions.default): ManagedResource[BufferedWriter] =
- betterFile.bufferedWriter(codec)
+ betterFile.bufferedWriter(codec, openOptions)
final def newFileReader: FileReader = betterFile.newFileReader
diff --git a/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala b/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala
index 5339fae3c..234a19d79 100644
--- a/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala
+++ b/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala
@@ -3,6 +3,8 @@ package cromwell.core.path
import akka.actor.ActorSystem
import cromwell.core.WorkflowOptions
+import scala.concurrent.{ExecutionContext, Future}
+
case object DefaultPathBuilderFactory extends PathBuilderFactory {
- override def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem) = DefaultPathBuilder
+ override def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem, ec: ExecutionContext) = Future.successful(DefaultPathBuilder)
}
diff --git a/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala b/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala
index 1fc6bf4df..3b71d0712 100644
--- a/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala
+++ b/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala
@@ -38,7 +38,7 @@ trait EvenBetterPathMethods {
// betterFile.symbolicLink calls Files.readSymbolicLink, but then implicitly converts the java.nio.Path returned to a better.File
// which calls toAbsolutePath. Consequently, if the path was relative, the current directory is used to make it absolute.
- // This is not the desired behaviour to be able to follow relative symbolic links, so bypass better files method and directly use the java one.
+ // This is not the desired behavior to be able to follow relative symbolic links, so bypass better files method and directly use the java one.
final def symbolicLinkRelative: Option[Path] = {
if (betterFile.isSymbolicLink) {
Option(newPath(Files.readSymbolicLink(betterFile.path)))
diff --git a/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala b/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala
index 7ee20eb2d..63a91e02b 100644
--- a/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala
+++ b/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala
@@ -3,9 +3,11 @@ package cromwell.core.path
import akka.actor.ActorSystem
import cromwell.core.WorkflowOptions
+import scala.concurrent.{ExecutionContext, Future}
+
/**
* Provide a method that can instantiate a path builder with the specified workflow options.
*/
trait PathBuilderFactory {
- def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem): PathBuilder
+ def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder]
}
diff --git a/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala b/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala
index 774c1b5ce..238553f2a 100644
--- a/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala
+++ b/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala
@@ -1,12 +1,12 @@
package cromwell.core.simpleton
-import wdl4s.TaskOutput
-import wdl4s.types._
-import wdl4s.values.{WdlArray, WdlMap, WdlOptionalValue, WdlPair, WdlValue}
+import cromwell.core.simpleton.WdlValueSimpleton._
+import cromwell.core.{CallOutputs, JobOutput}
+import wdl4s.wdl.TaskOutput
+import wdl4s.wdl.types._
+import wdl4s.wdl.values.{WdlArray, WdlMap, WdlOptionalValue, WdlPair, WdlValue}
import scala.language.postfixOps
-import cromwell.core.{CallOutputs, JobOutput}
-import cromwell.core.simpleton.WdlValueSimpleton._
/**
@@ -88,7 +88,7 @@ object WdlValueBuilder {
// Group tuples by key using a Map with key type `K`.
def group[K](tuples: Traversable[(K, SimpletonComponent)]): Map[K, Traversable[SimpletonComponent]] = {
- tuples groupBy { case (i, _) => i } mapValues { _ map { case (i, s) => s} }
+ tuples groupBy { case (i, _) => i } mapValues { _ map { case (_, s) => s} }
}
outputType match {
diff --git a/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala b/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala
index 1f5e04375..aa5397d55 100644
--- a/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala
+++ b/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala
@@ -1,6 +1,6 @@
package cromwell.core.simpleton
-import wdl4s.values._
+import wdl4s.wdl.values._
case class WdlValueSimpleton(simpletonKey: String, simpletonValue: WdlPrimitive)
diff --git a/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala b/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala
new file mode 100644
index 000000000..5ed66fb5e
--- /dev/null
+++ b/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala
@@ -0,0 +1,34 @@
+package cromwell.util
+
+import akka.actor.{Actor, ActorLogging, ActorRef, Terminated}
+import akka.pattern.GracefulStopSupport
+import cats.data.NonEmptyList
+import cromwell.util.GracefulShutdownHelper.ShutdownCommand
+
+object GracefulShutdownHelper {
+ case object ShutdownCommand
+}
+
+trait GracefulShutdownHelper extends GracefulStopSupport { this: Actor with ActorLogging =>
+ private var shuttingDown: Boolean = false
+ private var shutdownList: Set[ActorRef] = Set.empty
+
+ def isShuttingDown: Boolean = shuttingDown
+
+ def waitForActorsAndShutdown(actorsLists: NonEmptyList[ActorRef]): Unit = {
+ if (shuttingDown) {
+ log.error("Programmer error, this actor has already initiated its shutdown. Only call this once per actor !")
+ } else {
+ shuttingDown = true
+ shutdownList = actorsLists.toList.toSet
+ shutdownList foreach context.watch
+ shutdownList foreach { _ ! ShutdownCommand }
+
+ context become {
+ case Terminated(actor) if shuttingDown && shutdownList.contains(actor) =>
+ shutdownList = shutdownList - actor
+ if (shutdownList.isEmpty) context stop self
+ }
+ }
+ }
+}
diff --git a/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala b/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala
index dc7f55fe5..53bdd4293 100644
--- a/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala
+++ b/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala
@@ -1,9 +1,10 @@
package cromwell.util.JsonFormatting
import spray.json._
-import wdl4s.WdlExpression
-import wdl4s.types.{WdlArrayType, WdlMapType, WdlStringType}
-import wdl4s.values._
+import wdl4s.wdl.WdlExpression
+import wdl4s.wdl.types._
+import wdl4s.wdl.values._
+import wdl4s.wdl.values.{WdlBoolean, WdlFloat, WdlInteger, WdlString, WdlValue}
object WdlValueJsonFormatter extends DefaultJsonProtocol {
implicit object WdlValueJsonFormat extends RootJsonFormat[WdlValue] {
diff --git a/core/src/main/scala/cromwell/util/PromiseActor.scala b/core/src/main/scala/cromwell/util/PromiseActor.scala
index 58aea267a..bd5efa5b0 100644
--- a/core/src/main/scala/cromwell/util/PromiseActor.scala
+++ b/core/src/main/scala/cromwell/util/PromiseActor.scala
@@ -1,7 +1,7 @@
package cromwell.util
import akka.actor._
-
+import cromwell.core.Dispatcher.EngineDispatcher
import scala.concurrent.{Future, Promise}
private class PromiseActor(promise: Promise[Any], sendTo: ActorRef, msg: Any) extends Actor with ActorLogging {
@@ -42,7 +42,7 @@ object PromiseActor {
promise.future
}
- def props(promise: Promise[Any], sendTo: ActorRef, msg: Any): Props = Props(new PromiseActor(promise, sendTo, msg))
+ def props(promise: Promise[Any], sendTo: ActorRef, msg: Any): Props = Props(new PromiseActor(promise, sendTo, msg)).withDispatcher(EngineDispatcher)
implicit class EnhancedActorRef(val actorRef: ActorRef) extends AnyVal {
def askNoTimeout(message: Any)(implicit actorRefFactory: ActorRefFactory): Future[Any] = {
diff --git a/core/src/test/resources/application.conf b/core/src/test/resources/application.conf
index 1ef667470..f45d06517 100644
--- a/core/src/test/resources/application.conf
+++ b/core/src/test/resources/application.conf
@@ -22,7 +22,7 @@ database.db.connectionTimeout = 3000
database-test-mysql {
# Run the following to (optionally) drop and (re-)create the database:
# mysql -utravis -e "DROP DATABASE IF EXISTS cromwell_test" && mysql -utravis -e "CREATE DATABASE cromwell_test"
- driver = "slick.driver.MySQLDriver$"
+ profile = "slick.jdbc.MySQLProfile$"
db {
driver = "com.mysql.jdbc.Driver"
url = "jdbc:mysql://localhost/cromwell_test?useSSL=false"
diff --git a/core/src/test/scala/cromwell/core/TestKitSuite.scala b/core/src/test/scala/cromwell/core/TestKitSuite.scala
index c29a0338e..282765e9f 100644
--- a/core/src/test/scala/cromwell/core/TestKitSuite.scala
+++ b/core/src/test/scala/cromwell/core/TestKitSuite.scala
@@ -55,7 +55,7 @@ object TestKitSuite {
| }
|
| # A dispatcher for engine actors
- | # Because backends behaviour is unpredictable (potentially blocking, slow) the engine runs
+ | # Because backends behavior is unpredictable (potentially blocking, slow) the engine runs
| # on its own dispatcher to prevent backends from affecting its performance.
| engine-dispatcher {
| type = Dispatcher
diff --git a/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala b/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala
index cc3121402..4fcc1fc98 100644
--- a/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala
+++ b/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala
@@ -10,6 +10,8 @@ import cromwell.core.actor.StreamIntegration._
import cromwell.core.actor.TestStreamActor.{TestStreamActorCommand, TestStreamActorContext}
import org.scalatest.{FlatSpecLike, Matchers}
+import scala.concurrent.ExecutionContext
+
class StreamActorHelperSpec extends TestKitSuite with FlatSpecLike with Matchers with ImplicitSender {
behavior of "StreamActorHelper"
@@ -67,4 +69,6 @@ private class TestStreamActor(queueSize: Int)(implicit override val materializer
override protected val streamSource = Source.queue[TestStreamActorContext](queueSize, OverflowStrategy.dropNew)
.map{ ("hello", _) }
+
+ override implicit def ec: ExecutionContext = context.dispatcher
}
diff --git a/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala b/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala
new file mode 100644
index 000000000..a25b311a6
--- /dev/null
+++ b/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala
@@ -0,0 +1,35 @@
+package cromwell.core.callcaching
+
+import org.scalatest.{FlatSpec, Matchers}
+
+class HashKeySpec extends FlatSpec with Matchers {
+
+ "HashKey" should "produce consistent key value" in {
+ val keys = Set(
+ HashKey("command template"),
+ HashKey("backend name"),
+ HashKey("input count"),
+ HashKey("output count"),
+ HashKey("runtime attribute", "failOnStderr"),
+ HashKey(checkForHitOrMiss = false, "runtime attribute", "cpu"),
+ HashKey("runtime attribute", "continueOnReturnCode"),
+ HashKey("input", "String stringInput"),
+ HashKey("output", "String myOutput"),
+ HashKey("runtime attribute", "docker")
+ )
+
+ keys map { _.key } should contain theSameElementsAs Set(
+ "command template",
+ "backend name",
+ "input count",
+ "output count",
+ "runtime attribute: failOnStderr",
+ "runtime attribute: cpu",
+ "runtime attribute: continueOnReturnCode",
+ "input: String stringInput",
+ "output: String myOutput",
+ "runtime attribute: docker"
+ )
+ }
+
+}
diff --git a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala
index 6f92f3003..1411a47c6 100644
--- a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala
+++ b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala
@@ -3,6 +3,7 @@ package cromwell.core.io
import akka.actor.{Actor, ActorLogging, ActorRef}
import akka.testkit.{TestActorRef, TestProbe}
import cromwell.core.TestKitSuite
+import cromwell.core.io.DefaultIoCommand.DefaultIoSizeCommand
import cromwell.core.path.Path
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FlatSpecLike, Matchers}
@@ -22,7 +23,7 @@ class IoClientHelperSpec extends TestKitSuite with FlatSpecLike with Matchers wi
val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backpressureTimeout, noResponseTimeout))
- val command = new IoSizeCommand(mock[Path])
+ val command = DefaultIoSizeCommand(mock[Path])
val response = IoSuccess(command, 5)
// Send the command
@@ -53,7 +54,7 @@ class IoClientHelperSpec extends TestKitSuite with FlatSpecLike with Matchers wi
val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backpressureTimeout, noResponseTimeout))
val commandContext = "context"
- val command = new IoSizeCommand(mock[Path])
+ val command = DefaultIoSizeCommand(mock[Path])
val response = IoSuccess(command, 5)
// Send the command
@@ -91,7 +92,7 @@ class IoClientHelperSpec extends TestKitSuite with FlatSpecLike with Matchers wi
}
def sendMessage(command: IoCommand[_]) = {
- sendIoCommand(command, noResponseTimeout)
+ sendIoCommandWithCustomTimeout(command, noResponseTimeout)
}
def sendMessageWithContext(context: Any, command: IoCommand[_]) = {
diff --git a/core/src/test/scala/cromwell/core/labels/LabelSpec.scala b/core/src/test/scala/cromwell/core/labels/LabelSpec.scala
index 47060b6d6..52c331bec 100644
--- a/core/src/test/scala/cromwell/core/labels/LabelSpec.scala
+++ b/core/src/test/scala/cromwell/core/labels/LabelSpec.scala
@@ -10,40 +10,44 @@ class LabelSpec extends FlatSpec with Matchers {
/**
* In the format 'to validate', 'expected result'
*/
- val goodLabelStrings = List(
+ val goodLabelKeys = List(
"cromwell-root-workflow-id",
"cromwell-11f2468c-39d6-4be3-85c8-32735c01e66b",
"just-the-right-length-just-the-right-length-just-the-right-leng"
)
- val badLabelConversions = List(
- "11f2468c-39d6-4be3-85c8-32735c01e66b" -> "x--11f2468c-39d6-4be3-85c8-32735c01e66b",
- "0-cromwell-root-workflow-id" -> "x--0-cromwell-root-workflow-id",
- "" -> "x----x",
- "cromwell-root-workflow-id-" -> "cromwell-root-workflow-id---x",
- "0-cromwell-root-workflow-id-" -> "x--0-cromwell-root-workflow-id---x",
- "Cromwell-root-workflow-id" -> "cromwell-root-workflow-id",
- "cromwell_root_workflow_id" -> "cromwell-root-workflow-id",
- "too-long-too-long-too-long-too-long-too-long-too-long-too-long-t" -> "too-long-too-long-too-long-too---g-too-long-too-long-too-long-t",
- "0-too-long-and-invalid-too-long-and-invalid-too-long-and-invali+" -> "x--0-too-long-and-invalid-too----nvalid-too-long-and-invali---x"
+ val goodLabelValues = List(
+ "11f2468c-39d6-4be3-85c8-32735c01e66b",
+ ""
)
- goodLabelStrings foreach { label =>
- it should s"validate the good label string '$label'" in {
- Label.validateName(label) should be(Valid(label))
+ val badLabelKeys = List(
+ "11f2468c-39d6-4be3-85c8-32735c01e66b",
+ "0-cromwell-root-workflow-id",
+ "",
+ "cromwell-root-workflow-id-",
+ "0-cromwell-root-workflow-id-",
+ "Cromwell-root-workflow-id"
+ )
+
+ goodLabelKeys foreach { key =>
+ it should s"validate a good label key '$key'" in {
+ Label.validateLabelKey(key) should be(Valid(key))
}
}
- badLabelConversions foreach { case (label: String, conversion: String) =>
- it should s"not validate the bad label string '$label'" in {
- Label.validateName(label) match {
- case Invalid(_) => // Good!
- case Valid(_) => fail(s"Label validation succeeded but should have failed.")
- }
+ goodLabelValues foreach { value =>
+ it should s"validate a good label value '$value'" in {
+ Label.validateLabelValue(value) should be(Valid(value))
}
+ }
- it should s"convert the bad label string '$label' into the safe label string '$conversion'" in {
- Label.safeName(label) should be(conversion)
+ badLabelKeys foreach { key =>
+ it should s"not validate a bad label key $key" in {
+ Label.validateLabelKey(key) match {
+ case Invalid(_) => // Good!
+ case Valid(_) => fail(s"Label key validation succeeded but should have failed.")
+ }
}
}
}
diff --git a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala
index 27f24076c..f62b49d47 100644
--- a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala
+++ b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala
@@ -34,7 +34,7 @@ class RetrySpec extends TestKitSuite("retry-spec") with FlatSpecLike with Matche
isFatal: Throwable => Boolean = Retry.throwableToFalse): Future[Int] = {
withRetry(
- f = work.doIt,
+ f = () => work.doIt(),
maxRetries = Option(retries),
isTransient = isTransient,
isFatal = isFatal
diff --git a/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala b/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala
index 1e558ceb1..514051e22 100644
--- a/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala
+++ b/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala
@@ -4,9 +4,9 @@ import cromwell.core.simpleton.WdlValueBuilderSpec._
import org.scalatest.{FlatSpec, Matchers}
import org.specs2.mock.Mockito
import wdl4s.parser.WdlParser.Ast
-import wdl4s.types.{WdlArrayType, WdlIntegerType, WdlMapType, WdlStringType}
-import wdl4s.values.{WdlArray, WdlInteger, WdlMap, WdlPair, WdlString, WdlValue}
-import wdl4s.{TaskOutput, WdlExpression}
+import wdl4s.wdl.types.{WdlArrayType, WdlIntegerType, WdlMapType, WdlStringType}
+import wdl4s.wdl.values.{WdlArray, WdlInteger, WdlMap, WdlPair, WdlString, WdlValue}
+import wdl4s.wdl.{TaskOutput, WdlExpression}
object WdlValueBuilderSpec {
// WdlValueBuilder doesn't care about this expression, but something needs to be passed to the TaskOutput constructor.
@@ -115,9 +115,9 @@ class WdlValueBuilderSpec extends FlatSpec with Matchers with Mockito {
it should "round trip everything together with no losses" in {
- val wdlValues = (simpletonConversions map { case SimpletonConversion(name, wdlValue, simpletons) => name -> wdlValue }).toMap
+ val wdlValues = (simpletonConversions map { case SimpletonConversion(name, wdlValue, _) => name -> wdlValue }).toMap
val taskOutputs = wdlValues map { case (k, wv) => TaskOutput(k, wv.wdlType, IgnoredExpression, mock[Ast], None) }
- val allSimpletons = simpletonConversions flatMap { case SimpletonConversion(name, wdlValue, simpletons) => simpletons }
+ val allSimpletons = simpletonConversions flatMap { case SimpletonConversion(_, _, simpletons) => simpletons }
import WdlValueSimpleton._
diff --git a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala
index 10b05dc2b..1633a7d1d 100644
--- a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala
+++ b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala
@@ -8,7 +8,7 @@ object AkkaTestUtil {
implicit class EnhancedTestProbe(probe: TestProbe) {
def props = Props(new Actor with ActorLogging {
def receive = {
- case outbound if sender == probe.ref =>
+ case outbound @ _ if sender == probe.ref =>
val msg = "Unexpected outbound message from Probe. You're doing something wrong!"
log.error(msg)
throw new RuntimeException(msg)
diff --git a/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala b/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala
new file mode 100644
index 000000000..4d93073dc
--- /dev/null
+++ b/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala
@@ -0,0 +1,42 @@
+package cromwell.util
+
+import akka.actor.{Actor, ActorLogging, Props}
+import akka.testkit.TestProbe
+import cats.data.NonEmptyList
+import cromwell.core.TestKitSuite
+import cromwell.util.GracefulShutdownHelper.ShutdownCommand
+import org.scalatest.{FlatSpecLike, Matchers}
+
+class GracefulShutdownHelperSpec extends TestKitSuite with FlatSpecLike with Matchers {
+ behavior of "GracefulShutdownHelper"
+
+ it should "send ShutdownCommand to actors, wait for them to shutdown, then shut itself down" in {
+ val testProbeA = TestProbe()
+ val testProbeB = TestProbe()
+
+ val testActor = system.actorOf(Props(new Actor with GracefulShutdownHelper with ActorLogging {
+ override def receive: Receive = {
+ case ShutdownCommand => waitForActorsAndShutdown(NonEmptyList.of(testProbeA.ref, testProbeB.ref))
+ }
+ }))
+
+ watch(testActor)
+
+ testActor ! ShutdownCommand
+
+ testProbeA.expectMsg(ShutdownCommand)
+ testProbeB.expectMsg(ShutdownCommand)
+
+ // Make sure it's still alive
+ expectNoMsg()
+
+ system stop testProbeA.ref
+
+ // Make sure it's still alive
+ expectNoMsg()
+
+ system stop testProbeB.ref
+
+ expectTerminated(testActor)
+ }
+}
diff --git a/core/src/test/scala/cromwell/util/SampleWdl.scala b/core/src/test/scala/cromwell/util/SampleWdl.scala
index 5a3940b81..b7d07fee9 100644
--- a/core/src/test/scala/cromwell/util/SampleWdl.scala
+++ b/core/src/test/scala/cromwell/util/SampleWdl.scala
@@ -4,18 +4,29 @@ import java.util.UUID
import cromwell.core.WorkflowSourceFilesWithoutImports
import cromwell.core.path.{DefaultPathBuilder, Path}
-import cromwell.core.WorkflowSourceFilesWithoutImports
import spray.json._
-import wdl4s._
-import wdl4s.types.{WdlArrayType, WdlStringType}
-import wdl4s.values._
+import wdl4s.wdl.types.{WdlArrayType, WdlStringType}
+import wdl4s.wdl.values._
+import wdl4s.wdl.{WorkflowJson, WorkflowRawInputs, WorkflowSource}
import scala.language.postfixOps
trait SampleWdl extends TestFileUtil {
- def wdlSource(runtime: String = ""): WdlSource
- def asWorkflowSources(runtime: String = "", workflowOptions: String = "{}", labels: String = "{}") =
- WorkflowSourceFilesWithoutImports(wdlSource = wdlSource(runtime), inputsJson = wdlJson, workflowOptionsJson = workflowOptions, labelsJson = labels)
+ def workflowSource(runtime: String = ""): WorkflowSource
+ def asWorkflowSources(runtime: String = "",
+ workflowOptions: String = "{}",
+ labels: String = "{}",
+ workflowType: Option[String] = Option("WDL"),
+ workflowTypeVersion: Option[String] = None) = {
+ WorkflowSourceFilesWithoutImports(
+ workflowSource = workflowSource(runtime),
+ inputsJson = workflowJson,
+ workflowOptionsJson = workflowOptions,
+ labelsJson = labels,
+ workflowType = workflowType,
+ workflowTypeVersion = workflowTypeVersion)
+ }
+
val rawInputs: WorkflowRawInputs
def name = getClass.getSimpleName.stripSuffix("$")
@@ -54,7 +65,7 @@ trait SampleWdl extends TestFileUtil {
def read(value: JsValue) = throw new NotImplementedError(s"Reading JSON not implemented: $value")
}
- def wdlJson: WdlJson = rawInputs.toJson.prettyPrint
+ def workflowJson: WorkflowJson = rawInputs.toJson.prettyPrint
def deleteFile(path: Path) = path.delete()
}
@@ -62,7 +73,7 @@ trait SampleWdl extends TestFileUtil {
object SampleWdl {
object HelloWorld extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
|task hello {
| String addressee
@@ -87,7 +98,7 @@ object SampleWdl {
}
object HelloWorldWithoutWorkflow extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
|task hello {
| String addressee
@@ -107,7 +118,7 @@ object SampleWdl {
}
object GoodbyeWorld extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
"""
|task goodbye {
| command {
@@ -128,7 +139,7 @@ object SampleWdl {
}
object EmptyString extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
|task hello {
| command {
@@ -170,13 +181,13 @@ object SampleWdl {
object EmptyWorkflow extends SampleWdl {
- override def wdlSource(runtime: String = "") = "workflow empty_workflow {}"
+ override def workflowSource(runtime: String = "") = "workflow empty_workflow {}"
val rawInputs = Map.empty[String, Any]
}
object CoercionNotDefined extends SampleWdl {
- override def wdlSource(runtime: String = "") = {
+ override def workflowSource(runtime: String = "") = {
s"""
|task summary {
| String bfile
@@ -207,7 +218,7 @@ object SampleWdl {
}
trait ThreeStepTemplate extends SampleWdl {
- override def wdlSource(runtime: String = "") = sourceString().replaceAll("RUNTIME", runtime)
+ override def workflowSource(runtime: String = "") = sourceString().replaceAll("RUNTIME", runtime)
private val outputSectionPlaceholder = "OUTPUTSECTIONPLACEHOLDER"
def sourceString(outputsSection: String = "") = {
val withPlaceholders =
@@ -268,7 +279,7 @@ object SampleWdl {
object ThreeStep extends ThreeStepTemplate
object ThreeStepWithOutputsSection extends ThreeStepTemplate {
- override def wdlSource(runtime: String = "") = sourceString(outputsSection =
+ override def workflowSource(runtime: String = "") = sourceString(outputsSection =
"""
|output {
| cgrep.count
@@ -278,7 +289,7 @@ object SampleWdl {
}
object ThreeStepWithInputsInTheOutputsSection extends ThreeStepTemplate {
- override def wdlSource(runtime: String = "") = sourceString(outputsSection =
+ override def workflowSource(runtime: String = "") = sourceString(outputsSection =
"""
|output {
| cgrep.pattern
@@ -293,7 +304,7 @@ object SampleWdl {
object WorkflowOutputsWithFiles extends SampleWdl {
// ASCII art from http://www.chris.com/ascii/joan/www.geocities.com/SoHo/7373/flag.html with pipes
// replaced by exclamation points to keep stripMargin from removing the flagpole.
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
"""
task A {
command {
@@ -360,7 +371,7 @@ object SampleWdl {
}
object WorkflowScatterOutputsWithFileArrays extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
"""
|task A {
| command {
@@ -387,7 +398,7 @@ object SampleWdl {
object DeclarationsWorkflow extends SampleWdl {
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""
|task cat {
| File file
@@ -441,7 +452,7 @@ object SampleWdl {
}
trait ZeroOrMorePostfixQuantifier extends SampleWdl {
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""
|task hello {
| Array[String] person
@@ -472,7 +483,7 @@ object SampleWdl {
}
trait OneOrMorePostfixQuantifier extends SampleWdl {
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""
|task hello {
| Array[String]+ person
@@ -499,7 +510,7 @@ object SampleWdl {
}
object CurrentDirectory extends SampleWdl {
- override def wdlSource(runtime: String): String =
+ override def workflowSource(runtime: String): String =
"""
|task whereami {
| command {
@@ -520,7 +531,7 @@ object SampleWdl {
}
object ArrayIO extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
|task serialize {
| Array[String] strs
@@ -547,7 +558,7 @@ object SampleWdl {
createFileArray(catRootDir)
def cleanup() = cleanupFileArray(catRootDir)
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
|task cat {
| Array[File]+ files
@@ -572,7 +583,7 @@ object SampleWdl {
createFileArray(catRootDir)
def cleanup() = cleanupFileArray(catRootDir)
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
|task write_map {
| Map[File, String] file_to_name
@@ -661,7 +672,7 @@ object SampleWdl {
|}
""".stripMargin
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""$tasks
|
|workflow w {
@@ -679,7 +690,7 @@ object SampleWdl {
}
object SiblingsScatterWdl extends ScatterWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""$tasks
|
|workflow w {
@@ -700,7 +711,7 @@ object SampleWdl {
}
object SimpleScatterWdl extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""task echo_int {
| Int int
| command {echo $${int}}
@@ -723,7 +734,7 @@ object SampleWdl {
}
object SimpleScatterWdlWithOutputs extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""task echo_int {
| Int int
| command {echo $${int}}
@@ -748,7 +759,7 @@ object SampleWdl {
}
case class PrepareScatterGatherWdl(salt: String = UUID.randomUUID().toString) extends SampleWdl {
- override def wdlSource(runtime: String = "") = {
+ override def workflowSource(runtime: String = "") = {
s"""
|#
|# Goal here is to split up the input file into files of 1 line each (in the prepare) then in parallel call wc -w on each newly created file and count the words into another file then in the gather, sum the results of each parallel call to come up with
@@ -816,7 +827,7 @@ object SampleWdl {
}
object FileClobber extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""task read_line {
| File in
| command { cat $${in} }
@@ -841,7 +852,7 @@ object SampleWdl {
}
object FilePassingWorkflow extends SampleWdl {
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""task a {
| File in
| String out_name = "out"
@@ -881,7 +892,7 @@ object SampleWdl {
* different
*/
case class CallCachingWorkflow(salt: String) extends SampleWdl {
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""task a {
| File in
| String out_name = "out"
@@ -933,7 +944,7 @@ object SampleWdl {
|k3\tv3
""".stripMargin.trim
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""
|task a {
| Array[String] array
@@ -969,7 +980,7 @@ object SampleWdl {
}
object ArrayOfArrays extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""task subtask {
| Array[File] a
| command {
@@ -1009,7 +1020,7 @@ object SampleWdl {
}
object CallCachingHashingWdl extends SampleWdl {
- override def wdlSource(runtime: String): WdlSource =
+ override def workflowSource(runtime: String): WorkflowSource =
s"""task t {
| Int a
| Float b
@@ -1047,7 +1058,7 @@ object SampleWdl {
}
object ExpressionsInInputs extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""task echo {
| String inString
| command {
@@ -1077,7 +1088,7 @@ object SampleWdl {
}
object WorkflowFailSlow extends SampleWdl {
- override def wdlSource(runtime: String = "") =
+ override def workflowSource(runtime: String = "") =
s"""
task shouldCompleteFast {
| Int a
diff --git a/core/src/test/scala/cromwell/util/TestFileUtil.scala b/core/src/test/scala/cromwell/util/TestFileUtil.scala
index a6bedd490..6f0d08a1f 100644
--- a/core/src/test/scala/cromwell/util/TestFileUtil.scala
+++ b/core/src/test/scala/cromwell/util/TestFileUtil.scala
@@ -3,7 +3,7 @@ package cromwell.util
import java.nio.file.attribute.PosixFilePermission
import cromwell.core.path.{DefaultPathBuilder, Path}
-import wdl4s.values._
+import wdl4s.wdl.values._
trait TestFileUtil {
def createCannedFile(prefix: String, contents: String, dir: Option[Path] = None): Path = {
diff --git a/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala b/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala
index 91d678c01..7924f30ab 100644
--- a/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala
+++ b/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala
@@ -1,14 +1,10 @@
package cromwell.util
-import scala.Vector
-
-import org.scalatest.FlatSpec
-import org.scalatest.Matchers
-
-import JsonFormatting.WdlValueJsonFormatter.WdlValueJsonFormat
-import spray.json.{ JsObject, pimpString }
-import wdl4s.types.{ WdlArrayType, WdlStringType }
-import wdl4s.values.{ WdlArray, WdlPair, WdlString }
+import cromwell.util.JsonFormatting.WdlValueJsonFormatter.WdlValueJsonFormat
+import org.scalatest.{FlatSpec, Matchers}
+import spray.json.{JsObject, pimpString}
+import wdl4s.wdl.types.{WdlArrayType, WdlStringType}
+import wdl4s.wdl.values.{WdlArray, WdlPair, WdlString}
class WdlValueJsonFormatterSpec extends FlatSpec with Matchers {
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala
new file mode 100644
index 000000000..71c0d813f
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala
@@ -0,0 +1,173 @@
+package cromwell.api
+
+import java.net.URL
+
+import akka.http.scaladsl.Http
+import akka.actor.ActorSystem
+import akka.http.scaladsl.coding.{Deflate, Gzip, NoCoding}
+import akka.http.scaladsl.model.{HttpEntity, _}
+import akka.http.scaladsl.model.headers.HttpEncodings
+import akka.http.scaladsl.unmarshalling.{Unmarshal, Unmarshaller}
+import akka.stream.ActorMaterializer
+import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
+import akka.util.ByteString
+import cromwell.api.model._
+import spray.json._
+
+import scala.concurrent.{ExecutionContext, Future}
+import cromwell.api.CromwellClient._
+
+import scala.util.{Failure, Success, Try}
+
+class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit actorSystem: ActorSystem, materializer: ActorMaterializer) {
+
+ lazy val engineEndpoint = s"$cromwellUrl/engine/$apiVersion"
+ lazy val submitEndpoint = s"$cromwellUrl/api/workflows/$apiVersion"
+ // Everything else is a suffix off the submit endpoint:
+ lazy val batchSubmitEndpoint = s"$submitEndpoint/batch"
+ private def workflowSpecificEndpoint(workflowId: WorkflowId, endpoint: String) = s"$submitEndpoint/$workflowId/$endpoint"
+ def abortEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "abort")
+ def statusEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "status")
+ def metadataEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "metadata")
+ def outputsEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "outputs")
+ def logsEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "logs")
+ def diffEndpoint(workflowA: WorkflowId, callA: String, indexA: ShardIndex, workflowB: WorkflowId, callB: String, indexB: ShardIndex): String = {
+ def shardParam(aOrB: String, s: ShardIndex) = s.index.map(i => s"&index$aOrB=$i.toString").getOrElse("")
+ s"$submitEndpoint/callcaching/diff?workflowA=$workflowA&callA=$callA&workflowB=$workflowB&callB=$callB${shardParam("A", indexA)}${shardParam("B", indexB)}"
+ }
+ lazy val backendsEndpoint = s"$submitEndpoint/backends"
+ lazy val versionEndpoint = s"$engineEndpoint/version"
+
+ import model.CromwellStatusJsonSupport._
+ import model.WorkflowOutputsJsonSupport._
+ import model.WorkflowLogsJsonSupport._
+ import model.CromwellBackendsJsonSupport._
+ import model.CromwellVersionJsonSupport._
+ import model.CallCacheDiffJsonSupport._
+
+ private def requestEntityForSubmit(workflowSubmission: WorkflowSubmission) = {
+ import cromwell.api.model.LabelsJsonFormatter._
+
+ val sourceBodyParts = Map(
+ "workflowSource" -> Option(workflowSubmission.wdl),
+ "workflowType" -> workflowSubmission.workflowType,
+ "workflowTypeVersion" -> workflowSubmission.workflowTypeVersion,
+ "workflowInputs" -> workflowSubmission.inputsJson,
+ "workflowOptions" -> insertSecrets(workflowSubmission.options, workflowSubmission.refreshToken),
+ "customLabels" -> Option(workflowSubmission.customLabels.toJson.toString)
+ ) collect { case (name, Some(source: String)) => Multipart.FormData.BodyPart(name, HttpEntity(MediaTypes.`application/json`, ByteString(source))) }
+ val zipBodyParts = Map(
+ "workflowDependencies" -> workflowSubmission.zippedImports
+ ) collect { case (name, Some(file)) => Multipart.FormData.BodyPart.fromPath(name, MediaTypes.`application/zip`, file.path) }
+
+ val multipartFormData = Multipart.FormData((sourceBodyParts ++ zipBodyParts).toSeq : _*)
+ multipartFormData.toEntity()
+ }
+
+ def submit(workflow: WorkflowSubmission)(implicit ec: ExecutionContext): Future[SubmittedWorkflow] = {
+ val requestEntity = requestEntityForSubmit(workflow)
+
+ makeRequest[CromwellStatus](HttpRequest(HttpMethods.POST, submitEndpoint, List.empty[HttpHeader], requestEntity)) map { status =>
+ SubmittedWorkflow(WorkflowId.fromString(status.id), cromwellUrl, workflow)
+ }
+ }
+
+ def submitBatch(workflow: WorkflowBatchSubmission)(implicit ec: ExecutionContext): Future[List[SubmittedWorkflow]] = {
+ import DefaultJsonProtocol._
+
+ val requestEntity = requestEntityForSubmit(workflow)
+
+ // Make a set of submissions that represent the batch (so we can zip with the results later):
+ val submissionSet = workflow.inputsBatch.map(inputs => WorkflowSingleSubmission(
+ wdl = workflow.wdl,
+ workflowType = workflow.workflowType,
+ workflowTypeVersion = workflow.workflowTypeVersion,
+ inputsJson = Option(inputs),
+ options = workflow.options,
+ customLabels = workflow.customLabels,
+ zippedImports = workflow.zippedImports,
+ refreshToken = workflow.refreshToken))
+
+ makeRequest[List[CromwellStatus]](HttpRequest(HttpMethods.POST, batchSubmitEndpoint, List.empty[HttpHeader], requestEntity)) map { statuses =>
+ val zipped = submissionSet.zip(statuses)
+ zipped map { case (submission, status) =>
+ SubmittedWorkflow(WorkflowId.fromString(status.id), cromwellUrl, submission)
+ }
+ }
+ }
+
+ def abort(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStatus] = simpleRequest[CromwellStatus](uri = abortEndpoint(workflowId), method = HttpMethods.POST) map WorkflowStatus.apply
+ def status(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStatus] = simpleRequest[CromwellStatus](statusEndpoint(workflowId)) map WorkflowStatus.apply
+ def metadata(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowMetadata] = simpleRequest[String](metadataEndpoint(workflowId)) map WorkflowMetadata
+ def outputs(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowOutputs] = simpleRequest[WorkflowOutputs](outputsEndpoint(workflowId))
+ def logs(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowLogs] = simpleRequest[WorkflowLogsStruct](outputsEndpoint(workflowId)) map WorkflowLogs.apply
+ def callCacheDiff(workflowA: WorkflowId, callA: String, shardIndexA: ShardIndex, workflowB: WorkflowId, callB: String, shardIndexB: ShardIndex)(implicit ec: ExecutionContext): Future[CallCacheDiff] =
+ simpleRequest[CallCacheDiff](diffEndpoint(workflowA, callA, shardIndexA, workflowB, callB, shardIndexB))
+ def backends(implicit ec: ExecutionContext): Future[CromwellBackends] = simpleRequest[CromwellBackends](backendsEndpoint)
+ def version(implicit ec: ExecutionContext): Future[CromwellVersion] = simpleRequest[CromwellVersion](versionEndpoint)
+
+ private [api] def executeRequest(request: HttpRequest) = Http().singleRequest(request)
+
+ /**
+ *
+ * @tparam A The type of response expected. Must be supported by an implicit unmarshaller from ResponseEntity.
+ */
+ private def makeRequest[A](request: HttpRequest)(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = for {
+ response <- executeRequest(request)
+ decoded <- Future.fromTry(decodeResponse(response))
+ entity <- Future.fromTry(decoded.toEntity)
+ unmarshalled <- unmarshall(response, entity)(um, ec)
+ } yield unmarshalled
+
+ private def unmarshall[A](response: HttpResponse, entity: Unmarshal[ResponseEntity])(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = {
+ import CromwellFailedResponseExceptionJsonSupport._
+
+ if (response.status.isSuccess()) entity.to[A]
+ else entity.to[CromwellFailedResponseException] flatMap Future.failed
+ }
+
+ private def simpleRequest[A](uri: String, method: HttpMethod = HttpMethods.GET)(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = makeRequest[A](HttpRequest(uri = uri, method = method))
+
+ private val decoders = Map(
+ HttpEncodings.gzip -> Gzip,
+ HttpEncodings.deflate -> Deflate,
+ HttpEncodings.identity -> NoCoding
+ )
+
+ private def decodeResponse(response: HttpResponse): Try[HttpResponse] = {
+ decoders.get(response.encoding) map { decoder =>
+ Try(decoder.decodeMessage(response))
+ } getOrElse Failure(UnsuccessfulRequestException(s"No decoder for ${response.encoding}", response))
+ }
+}
+
+object CromwellClient {
+ final implicit class EnhancedHttpResponse(val response: HttpResponse) extends AnyVal {
+
+ def toEntity: Try[Unmarshal[ResponseEntity]] = response match {
+ case HttpResponse(_: StatusCodes.Success, _, entity, _) => Success(Unmarshal(entity))
+ case HttpResponse(_: StatusCodes.ServerError, _, entity, _) => Success(Unmarshal(entity))
+ case other => Failure(UnsuccessfulRequestException("Unmarshalling error", other))
+ }
+ }
+
+ final case class UnsuccessfulRequestException(message: String, httpResponse: HttpResponse) extends Exception {
+ override def getMessage: String = message + ": " + httpResponse.toString
+ }
+
+ private[api] def insertSecrets(options: Option[String], refreshToken: Option[String]): Option[String] = {
+ import DefaultJsonProtocol._
+ val tokenKey = "refresh_token"
+
+ val secretOptions = for {
+ refreshTokenValue <- refreshToken
+ optionsValue <- options
+ optionsMap = optionsValue.parseJson.asJsObject.convertTo[Map[String, JsValue]]
+ if optionsMap.contains(tokenKey)
+ secretMap = optionsMap.updated(tokenKey, JsString(refreshTokenValue))
+ secretValue = secretMap.toJson.toString
+ } yield secretValue
+
+ secretOptions orElse options
+ }
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala
new file mode 100644
index 000000000..fa4e7fb91
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala
@@ -0,0 +1,15 @@
+package cromwell.api.model
+
+import ShardIndexFormatter._
+import WorkflowIdJsonFormatter._
+import spray.json.DefaultJsonProtocol
+
+case class CallCacheDiffCallDescription(executionStatus: String, allowResultReuse: Boolean, callFqn: String, jobIndex: ShardIndex, workflowId: WorkflowId)
+case class HashDifference(hashKey: String, callA: Option[String], callB: Option[String])
+case class CallCacheDiff(callA: CallCacheDiffCallDescription, callB: CallCacheDiffCallDescription, hashDifferential: List[HashDifference])
+
+object CallCacheDiffJsonSupport extends DefaultJsonProtocol {
+ implicit val CallCacheDiffCallDescriptionFormat = jsonFormat5(CallCacheDiffCallDescription)
+ implicit val HashDifferenceFormat = jsonFormat3(HashDifference)
+ implicit val CallCacheDiffFormat = jsonFormat3(CallCacheDiff)
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellBackends.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellBackends.scala
new file mode 100644
index 000000000..007723310
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellBackends.scala
@@ -0,0 +1,9 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+
+object CromwellBackendsJsonSupport extends DefaultJsonProtocol {
+ implicit val CromwellBackendsFormat = jsonFormat2(CromwellBackends)
+}
+
+final case class CromwellBackends(defaultBackend: String, supportedBackends: List[String])
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellFailedResponseException.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellFailedResponseException.scala
new file mode 100644
index 000000000..6f58fe3bd
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellFailedResponseException.scala
@@ -0,0 +1,9 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+
+object CromwellFailedResponseExceptionJsonSupport extends DefaultJsonProtocol {
+ implicit val CromwellFailedResponseExceptionFormat = jsonFormat2(CromwellFailedResponseException)
+}
+
+case class CromwellFailedResponseException(status: String, message: String) extends Exception(message)
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala
new file mode 100644
index 000000000..7fcb0dcba
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala
@@ -0,0 +1,15 @@
+package cromwell.api.model
+
+import java.time.OffsetDateTime
+import spray.json.DefaultJsonProtocol
+import cromwell.api.model.WorkflowIdJsonFormatter._
+import cromwell.api.model.WorkflowStatusJsonFormatter._
+
+case class CromwellQueryResults(results: Seq[CromwellQueryResult])
+
+case class CromwellQueryResult(name: String, id: WorkflowId, status: WorkflowStatus, end: OffsetDateTime, start: OffsetDateTime)
+
+object CromwellQueryResultJsonFormatter extends DefaultJsonProtocol {
+ implicit val CromwellQueryResultJsonFormat = jsonFormat5(CromwellQueryResult)
+ implicit val CromwellQueryResultsJsonFormat = jsonFormat1(CromwellQueryResults)
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellStatus.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellStatus.scala
new file mode 100644
index 000000000..31433e5c7
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellStatus.scala
@@ -0,0 +1,9 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+
+object CromwellStatusJsonSupport extends DefaultJsonProtocol {
+ implicit val CromwellStatusFormat = jsonFormat2(CromwellStatus)
+}
+
+case class CromwellStatus(id: String, status: String)
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellVersion.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellVersion.scala
new file mode 100644
index 000000000..d6c71a065
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellVersion.scala
@@ -0,0 +1,9 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+
+object CromwellVersionJsonSupport extends DefaultJsonProtocol {
+ implicit val CromwellVersionFormat = jsonFormat1(CromwellVersion)
+}
+
+case class CromwellVersion(cromwell: String)
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/FailedWorkflowSubmission.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/FailedWorkflowSubmission.scala
new file mode 100644
index 000000000..c9e14cb37
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/FailedWorkflowSubmission.scala
@@ -0,0 +1,9 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+
+object FailedWorkflowSubmissionJsonSupport extends DefaultJsonProtocol {
+ implicit val FailedWorkflowSubmissionFormat = jsonFormat2(FailedWorkflowSubmission)
+}
+
+case class FailedWorkflowSubmission(status: String, message: String)
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala
new file mode 100644
index 000000000..fd9d88d21
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala
@@ -0,0 +1,16 @@
+package cromwell.api.model
+
+import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, RootJsonFormat}
+import scala.language.postfixOps
+
+object LabelsJsonFormatter extends DefaultJsonProtocol {
+ implicit object LabelJsonFormat extends RootJsonFormat[List[Label]] {
+ def write(l: List[Label]) = JsObject(l map { label => label.key -> JsString(label.value)} :_* )
+ def read(value: JsValue) = value.asJsObject.fields map {
+ case (k, JsString(v)) => Label(k, v)
+ case other => throw new UnsupportedOperationException(s"Cannot deserialize $other to a Label")
+ } toList
+ }
+}
+
+case class Label(key: String, value: String)
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala
new file mode 100644
index 000000000..405305b8a
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala
@@ -0,0 +1,18 @@
+package cromwell.api.model
+
+import spray.json.{DefaultJsonProtocol, JsNumber, JsValue, RootJsonFormat}
+
+case class ShardIndex(index: Option[Int]) extends AnyVal {
+ override def toString: String = index.getOrElse(-1).toString
+}
+
+object ShardIndexFormatter extends DefaultJsonProtocol {
+ implicit object ShardIndexJsonFormat extends RootJsonFormat[ShardIndex] {
+ def write(si: ShardIndex) = JsNumber(si.index.getOrElse(-1))
+ def read(value: JsValue) = value match {
+ case JsNumber(i) if i.equals(-1) => ShardIndex(None)
+ case JsNumber(i) if i.isValidInt && i.intValue > 0 => ShardIndex(Option(i.intValue()))
+ case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a ShardIndex")
+ }
+ }
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/SubmittedWorkflow.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/SubmittedWorkflow.scala
new file mode 100644
index 000000000..bcebcd5c9
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/SubmittedWorkflow.scala
@@ -0,0 +1,8 @@
+package cromwell.api.model
+
+import java.net.URL
+
+/**
+ * Represents information which we need to capture about a workflow sent to Cromwell.
+ */
+case class SubmittedWorkflow(id: WorkflowId, cromwellServer: URL, workflow: WorkflowSubmission)
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala
new file mode 100644
index 000000000..f52495136
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala
@@ -0,0 +1,33 @@
+package cromwell.api.model
+
+import java.util.UUID
+
+import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat}
+
+// ********* !!!!!!!!!! ********
+//
+// WARNING! This is the Cromwell API version of WorkflowId. If you aren't changing the API client, you probably
+// want cromwell.core.WorkflowId instead!
+//
+// ********* !!!!!!!!!! ********
+
+final case class WorkflowId(id: UUID) extends AnyVal {
+ override def toString = id.toString
+ def shortString = id.toString.split("-")(0)
+}
+
+object WorkflowId {
+ def fromString(id: String): WorkflowId = new WorkflowId(UUID.fromString(id))
+ def randomId() = WorkflowId(UUID.randomUUID())
+}
+
+object WorkflowIdJsonFormatter extends DefaultJsonProtocol {
+ implicit object WorkflowIdJsonFormat extends RootJsonFormat[WorkflowId] {
+ def write(id: WorkflowId) = JsString(id.id.toString)
+ def read(value: JsValue) = value match {
+ case JsString(s) => WorkflowId.fromString(s)
+ case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a ShardIndex")
+ }
+ }
+}
+
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowLogs.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowLogs.scala
new file mode 100644
index 000000000..b08d6299a
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowLogs.scala
@@ -0,0 +1,39 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+import cromwell.api.model.ShardIndexFormatter._
+
+private[api] case class CallLogStruct(stdout: String, stderr: String, backendLogs: Map[String, String], shardIndex: ShardIndex, attempt: Int)
+private[api] case class WorkflowLogsStruct(calls: Map[String, List[CallLogStruct]], id: String)
+
+
+object WorkflowLogsJsonSupport extends DefaultJsonProtocol {
+ implicit val CallLogStructFormat = jsonFormat5(CallLogStruct)
+ implicit val WorkflowLogsStructFormat = jsonFormat2(WorkflowLogsStruct)
+}
+
+/**
+ * @param logs Mapping from shard index and attempt
+ */
+case class CallLogs(logs: Map[JobLogsKey, JobLogs])
+case class JobLogsKey(shardIndex: ShardIndex, attempt: Int)
+case class JobLogs(stdout: String, stderr: String, backendLogs: Map[String, String])
+
+/**
+ * @param logs Mapping from call name to all logs for that call (including all shards and attempts)
+ */
+case class WorkflowLogs(logs: Map[String, CallLogs])
+
+object WorkflowLogs {
+ def callStructsToCallLogs(structs: List[CallLogStruct]): CallLogs = {
+ val callLogs = structs map { struct =>
+ JobLogsKey(struct.shardIndex, struct.attempt) -> JobLogs(struct.stdout, struct.stderr, struct.backendLogs)
+ }
+ CallLogs(callLogs.toMap)
+ }
+
+ def apply(struct: WorkflowLogsStruct): WorkflowLogs = {
+ val workflowLogs = struct.calls map { case (callName, structs) => callName -> callStructsToCallLogs(structs)}
+ WorkflowLogs(workflowLogs)
+ }
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowMetadata.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowMetadata.scala
new file mode 100644
index 000000000..5b7ff88ab
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowMetadata.scala
@@ -0,0 +1,3 @@
+package cromwell.api.model
+
+case class WorkflowMetadata(value: String) extends AnyVal
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowOutputs.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowOutputs.scala
new file mode 100644
index 000000000..4eb4cd0c8
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowOutputs.scala
@@ -0,0 +1,9 @@
+package cromwell.api.model
+
+import spray.json.DefaultJsonProtocol
+
+object WorkflowOutputsJsonSupport extends DefaultJsonProtocol {
+ implicit val OutputResponseFormat = jsonFormat2(WorkflowOutputs)
+}
+
+case class WorkflowOutputs(id: String, outputs: Map[String, String])
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala
new file mode 100644
index 000000000..6da1282d2
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala
@@ -0,0 +1,51 @@
+package cromwell.api.model
+
+import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat}
+
+// ********* !!!!!!!!!! ********
+//
+// WARNING! This is a Cromwell API class. If you aren't changing the API client, you probably
+// want to look elsewhere (maybe in cromwell.core?)
+//
+// ********* !!!!!!!!!! ********
+
+/**
+ * ADT tree to describe Cromwell workflow statuses, both terminal and non-terminal
+ */
+sealed trait WorkflowStatus
+
+sealed trait TerminalStatus extends WorkflowStatus
+case object Aborted extends TerminalStatus
+case object Failed extends TerminalStatus
+case object Succeeded extends TerminalStatus
+
+sealed trait NonTerminalStatus extends WorkflowStatus
+case object Submitted extends NonTerminalStatus
+case object Running extends NonTerminalStatus
+case object Aborting extends NonTerminalStatus
+
+object WorkflowStatus {
+ def apply(status: String): WorkflowStatus = {
+ status match {
+ case "Submitted" => Submitted
+ case "Running" => Running
+ case "Aborting" => Aborting
+ case "Aborted" => Aborted
+ case "Failed" => Failed
+ case "Succeeded" => Succeeded
+ case bad => throw new IllegalArgumentException(s"No such status: $bad")
+ }
+ }
+
+ def apply(workflowStatus: CromwellStatus): WorkflowStatus = apply(workflowStatus.status)
+}
+
+object WorkflowStatusJsonFormatter extends DefaultJsonProtocol {
+ implicit object WorkflowStatusJsonFormat extends RootJsonFormat[WorkflowStatus] {
+ def write(status: WorkflowStatus) = new JsString(status.toString)
+ def read(value: JsValue) = value match {
+ case JsString(string) => WorkflowStatus(string)
+ case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a WorkflowStatus")
+ }
+ }
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala
new file mode 100644
index 000000000..f20a7aa15
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala
@@ -0,0 +1,35 @@
+package cromwell.api.model
+
+import better.files.File
+
+sealed trait WorkflowSubmission {
+ val wdl: String
+ val workflowType: Option[String]
+ val workflowTypeVersion: Option[String]
+ val inputsJson: Option[String]
+ val options: Option[String]
+ val customLabels: Option[List[Label]]
+ val zippedImports: Option[File]
+ val refreshToken: Option[String]
+}
+
+final case class WorkflowSingleSubmission(wdl: String,
+ workflowType: Option[String],
+ workflowTypeVersion: Option[String],
+ inputsJson: Option[String],
+ options: Option[String],
+ customLabels: Option[List[Label]],
+ zippedImports: Option[File],
+ refreshToken: Option[String]) extends WorkflowSubmission
+
+final case class WorkflowBatchSubmission(wdl: String,
+ workflowType: Option[String],
+ workflowTypeVersion: Option[String],
+ inputsBatch: List[String],
+ options: Option[String],
+ customLabels: Option[List[Label]],
+ zippedImports: Option[File],
+ refreshToken: Option[String]) extends WorkflowSubmission {
+
+ override val inputsJson: Option[String] = Option(inputsBatch.mkString(start = "[", sep = ",", end = "]"))
+}
diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala
new file mode 100644
index 000000000..5dd6bffb1
--- /dev/null
+++ b/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala
@@ -0,0 +1,20 @@
+package cromwell.api
+
+import java.time.OffsetDateTime
+
+import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat}
+
+package object model {
+
+ implicit val OffsetDateTimeJsonFormat = OffsetDateTimeJsonFormatter.OffsetDateTimeFormat
+
+ object OffsetDateTimeJsonFormatter extends DefaultJsonProtocol {
+ object OffsetDateTimeFormat extends RootJsonFormat[OffsetDateTime] {
+ def write(odt: OffsetDateTime) = new JsString(odt.toString)
+ def read(value: JsValue) = value match {
+ case JsString(string) => OffsetDateTime.parse(string)
+ case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into an OffsetDateTime")
+ }
+ }
+ }
+}
diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala
new file mode 100644
index 000000000..5c0b43628
--- /dev/null
+++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala
@@ -0,0 +1,36 @@
+package cromwell.api
+
+import org.scalatest.prop.TableDrivenPropertyChecks
+import org.scalatest.{FlatSpec, Matchers}
+import spray.json.JsonParser.ParsingException
+
+class CromwellClientSpec extends FlatSpec with Matchers with TableDrivenPropertyChecks {
+ behavior of "CromwellClient"
+
+ val table = Table(
+ ("description", "optionsOption", "refreshTokenOption", "expected"),
+ ("ignore bad json when refresh token not provided", Option("{"), None, Option("{")),
+ ("not format json when refresh token key not found", Option("{ }"), Option("myToken"), Option("{ }")),
+ ("replace token when found", Option("""{"refresh_token" : "replace_me"}"""), Option("myToken"),
+ Option("""{"refresh_token":"myToken"}""")),
+ )
+
+ forAll(table) { (description, optionsOption, refreshTokenOption, expected) =>
+ it should description in {
+ val actual = CromwellClient.insertSecrets(optionsOption, refreshTokenOption)
+ actual should be(expected)
+ }
+ }
+
+ it should "throw an exception when inserting a refresh token into bad json" in {
+ val optionsOption = Option("{")
+ val refreshTokenOption = Option("myToken")
+ val actual = intercept[ParsingException](CromwellClient.insertSecrets(optionsOption, refreshTokenOption))
+ actual.summary should be("""Unexpected end-of-input at input index 1 (line 1, position 2), expected '"'""")
+ actual.detail should be(
+ """|
+ |{
+ | ^
+ |""".stripMargin)
+ }
+}
diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala
new file mode 100644
index 000000000..d8b2dd917
--- /dev/null
+++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala
@@ -0,0 +1,41 @@
+package cromwell.api
+
+import java.net.URL
+
+import akka.actor.ActorSystem
+import akka.http.scaladsl.model._
+import akka.stream.ActorMaterializer
+import akka.testkit.TestKit
+import cromwell.api.model.CromwellFailedResponseException
+import org.scalatest.{AsyncFlatSpecLike, BeforeAndAfterAll, Matchers}
+
+import scala.concurrent.duration._
+import scala.concurrent.{Await, Future}
+import scala.language.postfixOps
+
+class CromwellResponseFailedSpec extends TestKit(ActorSystem()) with AsyncFlatSpecLike with Matchers with BeforeAndAfterAll {
+ override def afterAll(): Unit = {
+ Await.ready(system.terminate(), 1 second)
+ super.afterAll()
+ }
+
+ implicit val materializer = ActorMaterializer()
+
+ "CromwellAPIClient" should "try to fail the Future with a CromwellFailedResponseException if the HttpResponse is unsuccessful" in {
+ val client = new CromwellClient(new URL("http://fakeurl"), "v1") {
+ override def executeRequest(request: HttpRequest): Future[HttpResponse] = Future.successful(
+ new HttpResponse(StatusCodes.ServiceUnavailable, List.empty[HttpHeader], HttpEntity(ContentTypes.`application/json`,
+ """{
+ | "status": "fail",
+ | "message": "Cromwell service shutting down"
+ |}
+ """.stripMargin), HttpProtocols.`HTTP/1.1`)
+ )
+ }
+
+ recoverToExceptionIf[CromwellFailedResponseException] { client.version(scala.concurrent.ExecutionContext.global) } map { exception =>
+ assert(exception.status == "fail")
+ assert(exception.message == "Cromwell service shutting down")
+ }
+ }
+}
diff --git a/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala
new file mode 100644
index 000000000..f1983e794
--- /dev/null
+++ b/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala
@@ -0,0 +1,45 @@
+package cromwell.api.model
+
+import java.time.OffsetDateTime
+
+import org.scalatest.{FlatSpec, Matchers}
+import spray.json._
+import cromwell.api.model.CromwellQueryResultJsonFormatter._
+
+class CromwellQueryResultJsonFormatterSpec extends FlatSpec with Matchers {
+
+ behavior of "CromwellQueryResultJsonFormat"
+
+ val sampleQueryResult = CromwellQueryResults(results = List(
+ CromwellQueryResult("switcheroo", WorkflowId.fromString("bee51f36-396d-4e22-8a81-33dedff66bf6"), Failed, OffsetDateTime.parse("2017-07-24T14:44:34.010-04:00"), OffsetDateTime.parse("2017-07-24T14:44:33.227-04:00")),
+ CromwellQueryResult("switcheroo", WorkflowId.fromString("0071495e-39eb-478e-bc98-8614b986c91e"), Succeeded, OffsetDateTime.parse("2017-07-24T15:06:45.940-04:00"), OffsetDateTime.parse("2017-07-24T15:04:54.372-04:00"))
+ ))
+
+ val sampleJson = """|{
+ | "results": [
+ | {
+ | "name": "switcheroo",
+ | "id": "bee51f36-396d-4e22-8a81-33dedff66bf6",
+ | "status": "Failed",
+ | "end": "2017-07-24T14:44:34.010-04:00",
+ | "start": "2017-07-24T14:44:33.227-04:00"
+ | },
+ | {
+ | "name": "switcheroo",
+ | "id": "0071495e-39eb-478e-bc98-8614b986c91e",
+ | "status": "Succeeded",
+ | "end": "2017-07-24T15:06:45.940-04:00",
+ | "start": "2017-07-24T15:04:54.372-04:00"
+ | }
+ | ]
+ |}""".stripMargin.parseJson.asJsObject
+
+ it should "write a query result as a structured JsObject" in {
+
+ sampleQueryResult.toJson shouldEqual sampleJson
+ }
+
+ it should "read a query result as a structured JsObject" in {
+ sampleJson.convertTo[CromwellQueryResults] shouldBe sampleQueryResult
+ }
+}
diff --git a/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala
new file mode 100644
index 000000000..db38ba6a8
--- /dev/null
+++ b/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala
@@ -0,0 +1,36 @@
+package cromwell.api.model
+
+import spray.json._
+import org.scalatest.FlatSpec
+import org.scalatest.Matchers
+
+class LabelsJsonFormatterSpec extends FlatSpec with Matchers {
+ import cromwell.api.model.LabelsJsonFormatter._
+
+ behavior of "WdlValueJsonFormat"
+
+ val sampleLabels = List(Label("key-1", "value-1"), Label("key-2", "value-2"), Label("key-3", "value-3"))
+ val sampleJson = """|{
+ | "key-1":"value-1",
+ | "key-2":"value-2",
+ | "key-3":"value-3"
+ |}""".stripMargin.parseJson.asJsObject
+
+ it should "write a Label as a structured JsObject" in {
+ val label = List(Label("test-key", "test-value"))
+ val expectedJson: JsObject =
+ """|{
+ | "test-key": "test-value"
+ |}""".stripMargin.parseJson.asJsObject
+
+ label.toJson shouldEqual expectedJson
+ }
+
+ it should "write an optional list of labels as a structured JsObject" in {
+ Option(sampleLabels).toJson shouldEqual sampleJson
+ }
+
+ it should "read a list of labels as a structured JsObject" in {
+ sampleJson.convertTo[List[Label]] shouldBe sampleLabels
+ }
+}
diff --git a/database/migration/src/main/resources/changelog.xml b/database/migration/src/main/resources/changelog.xml
index b3c05e026..e005c5ef4 100644
--- a/database/migration/src/main/resources/changelog.xml
+++ b/database/migration/src/main/resources/changelog.xml
@@ -61,6 +61,13 @@
+
+
+
+
+
+
+