diff --git a/.pullapprove.yml b/.pullapprove.yml index 67ea4ac58..c26c94ba2 100644 --- a/.pullapprove.yml +++ b/.pullapprove.yml @@ -1,16 +1,24 @@ -approve_by_comment: true -approve_regex: ':\+1:' -reset_on_push: false -author_approval: ignored -reviewers: +# enabling version 2 turns github reviews on by default +version: 2 +group_defaults: + approve_by_comment: + enabled: true + approve_regex: ':\+1:' + reset_on_push: + enabled: false +groups: + reviewers: required: 2 - members: + github_reviews: + enabled: true + author_approval: + ignored: true + users: - Horneth - cjllanwarne - francares - gauravs90 - geoffjentry - - jainh - jsotobroad - katevoss - kcibul diff --git a/.travis.yml b/.travis.yml index 1be54097a..7b8dfaa2f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,10 @@ sudo: required dist: trusty +services: + - docker language: scala scala: - - 2.11.8 + - 2.12.2 jdk: - oraclejdk8 cache: @@ -16,10 +18,12 @@ before_cache: - find $HOME/.ivy2 -name "ivydata-*.properties" -delete - find $HOME/.sbt -name "*.lock" -delete before_install: - - openssl aes-256-cbc -K "$encrypted_5ebd3ff04788_key" -iv "$encrypted_5ebd3ff04788_iv" -in src/bin/travis/resources/jesConf.tar.enc -out jesConf.tar -d || true + # https://github.com/travis-ci/travis-ci/issues/7940#issuecomment-310759657 + - sudo rm -f /etc/boto.cfg env: global: - CENTAUR_BRANCH=develop + - INTEGRATION_TESTS_DIR=src/main/resources/integrationTestCases matrix: # Setting this variable twice will cause the 'script' section to run twice with the respective env var invoked - BUILD_TYPE=sbt @@ -36,3 +40,12 @@ deploy: script: src/bin/travis/publishRelease.sh on: tags: true +notifications: + slack: + rooms: + - secure: B5KYcnhk/ujAUWlHsjzP7ROLm6MtYhaGikdYf6JYINovhMbVKnZCTlZEy7rqT3L2T5uJ25iefD500VQGk1Gn7puQ1sNq50wqjzQaj20PWEiBwoWalcV/nKBcQx1TyFT13LJv8fbFnVPxFCkC3YXoHedx8qAhDs8GH/tT5J8XOC8= + template: + - "Build <%{build_url}|#%{build_number}> (<%{compare_url}|%{commit}>) of %{repository}@%{branch} by %{author} %{result} in %{duration}" + on_success: change + on_failure: change + on_pull_requests: false diff --git a/CHANGELOG.md b/CHANGELOG.md index d6b4a0938..794a378b3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,134 @@ # Cromwell Change Log +## 29 + +### Breaking Changes + +* Request timeouts for HTTP requests on the REST API now return a 503 status code instead of 500. The response for a request timeout is no longer in JSON format. +* The metadata endpoint no longer returns gzipped responses by default. This now needs to be explicitly requested with an `Accept-Encoding: gzip` header + +* Command line usage has been extensively revised for Cromwell 29. Please see the +[README](https://github.com/broadinstitute/cromwell#command-line-usage) for details. + +* The engine endpoints are now served under `/engine`. Previousely engine endpoints were available under +`/api/engine`. Workflow endpoints are still served under `/api/workflows`. The setting `api.routeUnwrapped` has been +retired at the same time. + +* The response format of the [callcaching/diff](https://github.com/broadinstitute/cromwell#get-apiworkflowsversioncallcachingdiff) endpoint has been updated. + +### Cromwell Server + +* Cromwell now attempts to gracefully shutdown when running in server mode and receiving a `SIGINT` (`Ctrl-C`) or `SIGTERM` (`kill`) signal. This includes waiting for all pending Database writes before exiting. +A detailed explanation and information about how to configure this feature can be found in the [Cromwell Wiki](https://github.com/broadinstitute/cromwell/wiki/DevZone#graceful-server-shutdown). + +## 28 + +### Bug Fixes + +#### WDL write_* functions add a final newline + +The following WDL functions now add a newline after the final line of output (the previous behavior of not adding this +newline was inadvertent): +- `write_lines` +- `write_map` +- `write_object` +- `write_objects` +- `write_tsv` + +For example: + +``` +task writer { + Array[String] a = ["foo", "bar"] + command { + # used to output: "foo\nbar" + # now outputs: "foo\nbar\n" + cat write_lines(a) + } +} +``` + +#### `ContinueWhilePossible` + +A workflow utilizing the WorkflowFailureMode Workflow Option `ContinueWhilePossible` will now successfully reach a terminal state once all runnable jobs have completed. +#### `FailOnStderr` +When `FailOnStderr` is set to false, Cromwell no longer checks for the existence of a stderr file for that task. + +### WDL Functions + +#### New functions: floor, ceil and round: + +Enables the `floor`, `ceil` and `round` functions in WDL to convert floating point numbers to integers. + +For example we can now use the size of an input file to influence the amount of memory the task is given. In the example below a 500MB input file will result in a request for a VM with 2GB of memory: + +``` +task foo { + File in_file + command { ... } + runtime { + docker: "..." + memory: ceil(size(in_file)) * 4 + } +} +``` + +### Call Caching + +* Hash values calculated by Cromwell for a call when call caching is enabled are now published to the metadata. +It is published even if the call failed. However if the call is attempted multiple times (because it has been preempted for example), +since hash values are strictly identical for all attempts, they will only be published in the last attempt section of the metadata for this call. +If the hashes fail to be calculated, the reason is indicated in a `hashFailures` field in the `callCaching` section of the call metadata. +*Important*: Hashes are not retroactively published to the metadata. Which means only workflows run on Cromwell 28+ will have hashes in their metadata. + +See the [README](https://github.com/broadinstitute/cromwell#get-apiworkflowsversionidmetadata) for an example metadata response. + +* New endpoint returning the hash differential for 2 calls. + +`GET /api/workflows/:version/callcaching/diff` + +See the [README](https://github.com/broadinstitute/cromwell#get-apiworkflowsversioncallcachingdiff) for more details. + +### Workflow Submission + +* The workflow submission parameters `wdlSource` and `wdlDependencies` have been deprecated in favor of `workflowSource` and +`workflowDependencies` respectively. The older names are still supported in Cromwell 28 with deprecation warnings but will +be removed in a future version of Cromwell. + +### Labels +* A new `/labels` endpoint has been added to update labels for an existing workflow. See the [README](README.md#patch-apiworkflowsversionidlabels) for more information. +* Label formatting requirements have been updated, please check the [README](README.md#label-format) for more detailed documentation. + + +### JES Backend + +The JES backend now supports a `filesystems.gcs.caching.duplication-strategy` configuration entry. +It can be set to specify the desired behavior of Cromwell regarding call outputs when a call finds a hit in the cache. +The default value is `copy` which will copy all output files to the new call directory. +A second value is allowed, `reference`, that will instead point to the original output files, without copying them. + + +```hocon +filesystems { + gcs { + auth = "application-default" + + caching { + duplication-strategy = "reference" + } + } +} +``` + +A placeholder file will be placed in the execution folder of the cached call to explain the absence of output files and point to the location of the original ones. + + +### Metadata Write Batching + +Metadata write batching works the same as in previous versions of Cromwell, but the default batch size has been changed from 1 to 200. It's possible that 200 is too high in some environments, but 200 is more likely to be an appropriate value +than the previous default. + + ## 27 ### Migration diff --git a/NOTICE b/NOTICE deleted file mode 100644 index 15d3c2b54..000000000 --- a/NOTICE +++ /dev/null @@ -1,4 +0,0 @@ -cromwell.webservice/PerRequest.scala (https://github.com/NET-A-PORTER/spray-actor-per-request) -is distributed with this software under the Apache License, Version 2.0 (see the LICENSE-ASL file). In accordance -with that license, that software comes with the following notices: -    Copyright (C) 2011-2012 Ian Forsey diff --git a/README.md b/README.md index 1c228c7af..97040254e 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ A [Workflow Management System](https://en.wikipedia.org/wiki/Workflow_management * [Building](#building) * [Installing](#installing) * [Upgrading from 0.19 to 0.21](#upgrading-from-019-to-021) -* [**NEW** Command Line Usage](http://gatkforums.broadinstitute.org/wdl/discussion/8782/command-line-cromwell) (on the WDL/Cromwell Website) +* [Command Line Usage](#command-line-usage) * [Getting Started with WDL](#getting-started-with-wdl) * [WDL Support](#wdl-support) * [Configuring Cromwell](#configuring-cromwell) @@ -85,6 +85,7 @@ A [Workflow Management System](https://en.wikipedia.org/wiki/Workflow_management * [POST /api/workflows/:version/batch](#post-apiworkflowsversionbatch) * [GET /api/workflows/:version/query](#get-apiworkflowsversionquery) * [POST /api/workflows/:version/query](#post-apiworkflowsversionquery) + * [PATCH /api/workflows/:version/:id/labels](#patch-apiworkflowsversionidlabels) * [GET /api/workflows/:version/:id/status](#get-apiworkflowsversionidstatus) * [GET /api/workflows/:version/:id/outputs](#get-apiworkflowsversionidoutputs) * [GET /api/workflows/:version/:id/timing](#get-apiworkflowsversionidtiming) @@ -92,8 +93,9 @@ A [Workflow Management System](https://en.wikipedia.org/wiki/Workflow_management * [GET /api/workflows/:version/:id/metadata](#get-apiworkflowsversionidmetadata) * [POST /api/workflows/:version/:id/abort](#post-apiworkflowsversionidabort) * [GET /api/workflows/:version/backends](#get-apiworkflowsversionbackends) - * [GET /api/engine/:version/stats](#get-apiengineversionstats) - * [GET /api/engine/:version/version](#get-apiengineversionversion) + * [GET /api/workflows/:version/callcaching/diff](#get-apiworkflowsversioncallcachingdiff) + * [GET /engine/:version/stats](#get-engineversionstats) + * [GET /engine/:version/version](#get-engineversionversion) * [Error handling](#error-handling) * [Developer](#developer) * [Generating table of contents on Markdown files](#generating-table-of-contents-on-markdown-files) @@ -118,13 +120,13 @@ There is a [Cromwell gitter channel](https://gitter.im/broadinstitute/cromwell) The following is the toolchain used for development of Cromwell. Other versions may work, but these are recommended. -* [Scala 2.11.8](http://www.scala-lang.org/news/2.11.8/) +* [Scala 2.12.2](http://www.scala-lang.org/news/2.12.1#scala-212-notes) * [SBT 0.13.12](https://github.com/sbt/sbt/releases/tag/v0.13.12) * [Java 8](http://www.oracle.com/technetwork/java/javase/overview/java8-2100321.html) # Building -`sbt assembly` will build a runnable JAR in `target/scala-2.11/` +`sbt assembly` will build a runnable JAR in `target/scala-2.12/` Tests are run via `sbt test`. Note that the tests do require Docker to be running. To test this out while downloading the Ubuntu image that is required for tests, run `docker pull ubuntu:latest` prior to running `sbt test` @@ -136,6 +138,206 @@ OS X users can install Cromwell with Homebrew: `brew install cromwell`. See the [migration document](MIGRATION.md) for more details. +# Command Line Usage + +For built-in documentation of Cromwell command line usage, run the Cromwell JAR file with no arguments: + +``` +$ java -jar cromwell-.jar +``` + +For example, `$ java -jar cromwell-29.jar`. You will get a usage message like the following: + +``` +cromwell 29 +Usage: java -jar /path/to/cromwell.jar [server|run] [options] ... + + --help Cromwell - Workflow Execution Engine + --version +Command: server +Starts a web server on port 8000. See the web server documentation for more details about the API endpoints. +Command: run [options] workflow-source +Run the workflow and print out the outputs in JSON format. + workflow-source Workflow source file. + -i, --inputs Workflow inputs file. + -o, --options Workflow options file. + -t, --type Workflow type. + -v, --type-version + Workflow type version. + -l, --labels Workflow labels file. + -p, --imports A directory or zipfile to search for workflow imports. + -m, --metadata-output + An optional directory path to output metadata. +``` + +## --version + +The `--version` option prints the version of Cromwell and exits. + +## --help + +The `--help` option prints the full help text above and exits. + +## server + +The `server` command runs Cromwell as a web server. No arguments are accepted. +See the documentation for Cromwell's REST endpoints [here](#rest-api). + +## run + +The `run` command executes a single workflow in Cromwell. + +### workflow-source +The `run` command requires a single argument for the workflow source file. + +### --inputs +An optional file of workflow inputs. Although optional, it is a best practice to use an inputs file to satisfy workflow +requirements rather than hardcoding inputs directly into a workflow source file. + +### --options +An optional file of workflow options. Some options are global (supported by all backends), while others are backend-specific. +See the [workflow options](#workflow-options) documentation for more details. + +### --type +An optional parameter to specify the language for the workflow source. Any value specified for this parameter is currently +ignored and internally the value `WDL` is used. + +### --type-version +An optional parameter to specify the version of the language for the workflow source. Currently any specified value is ignored. + +### --labels +An optional parameter to specify a file of JSON key-value label pairs to associate with the workflow. + +### --imports +You have the option of importing WDL workflows or tasks to use within your workflow, known as sub-workflows. +If you use sub-workflows within your primary workflow then you must include a zip file with the WDL import files. + +For example, say you have a directory of WDL files: + +``` +wdl_library +└──cgrep.wdl +└──ps.wdl +└──wc.wdl +``` + +If you zip that directory into `wdl_library.zip`, then you can reference and use these WDLs within your primary WDL. + +This could be your primary WDL: + +``` +import "ps.wdl" as ps +import "cgrep.wdl" +import "wc.wdl" as wordCount + +workflow my_wf { + +call ps.ps as getStatus +call cgrep.cgrep { input: str = getStatus.x } +call wordCount { input: str = ... } + +} +``` + +Then to run this WDL without any inputs, workflow options, or metadata files, you would enter: + +`$ java -jar cromwell-.jar run my_wf.wdl --imports /path/to/wdl_library.zip` + +### --metadata-output + +You can include a path where Cromwell will write the workflow metadata JSON, such as start/end timestamps, status, inputs, and outputs. By default, Cromwell does not write workflow metadata. + +This example includes a metadata path called `/path/to/my_wf.metadata`: + +``` +$ java -jar cromwell-.jar run my_wf.wdl --metadata-output /path/to/my_wf.metadata +``` + +Again, Cromwell is very verbose. Here is the metadata output in my_wf.metadata: + +``` +{ + "workflowName": "my_wf", + "submittedFiles": { + "inputs": "{\"my_wf.hello.addressee\":\"m'Lord\"}", + "workflow": "\ntask hello {\n String addressee\n command {\n echo \"Hello ${addressee}!\"\n }\n output {\n String salutation = read_string(stdout())\n }\n runtime {\n +\n }\n}\n\nworkflow my_wf {\n call hello\n output {\n hello.salutation\n }\n}\n", + "options": "{\n\n}" + }, + "calls": { + "my_wf.hello": [ + { + "executionStatus": "Done", + "stdout": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068/call-hello/execution/stdout", + "backendStatus": "Done", + "shardIndex": -1, + "outputs": { + "salutation": "Hello m'Lord!" + }, + "runtimeAttributes": { + "continueOnReturnCode": "0", + "failOnStderr": "false" + }, + "callCaching": { + "allowResultReuse": false, + "effectiveCallCachingMode": "CallCachingOff" + }, + "inputs": { + "addressee": "m'Lord" + }, + "returnCode": 0, + "jobId": "28955", + "backend": "Local", + "end": "2017-04-19T10:53:25.045-04:00", + "stderr": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068/call-hello/execution/stderr", + "callRoot": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068/call-hello", + "attempt": 1, + "executionEvents": [ + { + "startTime": "2017-04-19T10:53:23.570-04:00", + "description": "PreparingJob", + "endTime": "2017-04-19T10:53:23.573-04:00" + }, + { + "startTime": "2017-04-19T10:53:23.569-04:00", + "description": "Pending", + "endTime": "2017-04-19T10:53:23.570-04:00" + }, + { + "startTime": "2017-04-19T10:53:25.040-04:00", + "description": "UpdatingJobStore", + "endTime": "2017-04-19T10:53:25.045-04:00" + }, + { + "startTime": "2017-04-19T10:53:23.570-04:00", + "description": "RequestingExecutionToken", + "endTime": "2017-04-19T10:53:23.570-04:00" + }, + { + "startTime": "2017-04-19T10:53:23.573-04:00", + "description": "RunningJob", + "endTime": "2017-04-19T10:53:25.040-04:00" + } + ], + "start": "2017-04-19T10:53:23.569-04:00" + } + ] + }, + "outputs": { + "my_wf.hello.salutation": "Hello m'Lord!" + }, + "workflowRoot": "/Users/jdoe/Documents/cromwell-executions/my_wf/cd0fe94a-984e-4a19-ab4c-8f7f07038068", + "id": "cd0fe94a-984e-4a19-ab4c-8f7f07038068", + "inputs": { + "my_wf.hello.addressee": "m'Lord" + }, + "submission": "2017-04-19T10:53:19.565-04:00", + "status": "Succeeded", + "end": "2017-04-19T10:53:25.063-04:00", + "start": "2017-04-19T10:53:23.535-04:00" +} +``` + # Getting Started with WDL For many examples on how to use WDL see [the WDL site](https://github.com/broadinstitute/wdl#getting-started-with-wdl) @@ -361,7 +563,7 @@ system { Or, via `-Dsystem.abort-jobs-on-terminate=true` command line option. -By default, this value is false when running `java -jar cromwell.jar server`, and true when running `java -jar cromwell.jar run `. +By default, this value is false when running `java -jar cromwell.jar server`, and true when running `java -jar cromwell.jar run `. # Security @@ -477,7 +679,7 @@ When Cromwell runs a workflow, it first creates a directory `//call-`. This is the ``. For example, having a `stdout` and `stderr` file is common among both backends and they both write a shell script file to the `` as well. See the descriptions below for details about backend-specific files that are written to these directories. -An example of a workflow output directory for a three-step WDL file might look like this: +An example of a workflow output directory for a three-step workflow might look like this: ``` cromwell-executions/ @@ -841,7 +1043,7 @@ backend { TES { actor-factory = "cromwell.backend.impl.tes.TesBackendLifecycleActorFactory" config { - endpoint = "https:///v1/jobs" + endpoint = "https:///v1/tasks" root = "cromwell-executions" dockerRoot = "/cromwell-executions" concurrent-job-limit = 1000 @@ -859,16 +1061,13 @@ This backend supports the following optional runtime attributes / workflow optio * docker: Docker image to use such as "Ubuntu". * dockerWorkingDir: defines the working directory in the container. -Outputs: -It will use `dockerOutputDir` runtime attribute / workflow option to resolve the folder in which the execution results will placed. If there is no `dockerWorkingDir` defined it will use `/cromwell-executions//call-/execution`. - ### CPU, Memory and Disk This backend supports CPU, memory and disk size configuration through the use of the following runtime attributes / workflow options: -* cpu: defines the amount of CPU to use. Default value: 1. Type: Integer. Ex: 4. -* memory: defines the amount of memory to use. Default value: "2 GB". Type: String. Ex: "4 GB" or "4096 MB" -* disk: defines the amount of disk to use. Default value: "2 GB". Type: String. Ex: "1 GB" or "1024 MB" +* cpu: defines the amount of CPU to use. Type: Integer. Ex: 4. +* memory: defines the amount of memory to use. Type: String. Ex: "4 GB" or "4096 MB" +* disk: defines the amount of disk to use. Type: String. Ex: "1 GB" or "1024 MB" -It they are not set, the TES backend will use default values. +If they are not set, the TES backend may use default values. ## Sun GridEngine Backend @@ -1230,7 +1429,7 @@ nativeSpecs attribute needs to be specified as String. ## Spark Backend -This backend adds support for execution of spark jobs in a workflow using the existing wdl format. +This backend adds support for execution of spark jobs in a workflow. It supports the following Spark deploy modes: @@ -1304,7 +1503,7 @@ Supported runtime attributes for a Spark Job is as follows: * appMainClass ( Spark app/job entry point) * numberOfExecutors ( Specific to cluster deploy mode) -Sample usage : +Sample usage: ```wdl task sparkjob_with_yarn_cluster { @@ -1330,8 +1529,8 @@ Supported File Systems as follows: * Network File System * Distributed file system -### Sample Wdl -Next, create a Wdl, and it's json input like so: +### Sample WDL +Next, create a WDL, and its json input like so: ```wdl task sparkjob_with_yarn_cluster { @@ -1685,16 +1884,15 @@ Valid keys and their meanings: * **google_project** - (JES backend only) Specifies which google project to execute this workflow. * **refresh_token** - (JES backend only) Only used if `localizeWithRefreshToken` is specified in the [configuration file](#configuring-cromwell). * **auth_bucket** - (JES backend only) defaults to the the value in **jes_gcs_root**. This should represent a GCS URL that only Cromwell can write to. The Cromwell account is determined by the `google.authScheme` (and the corresponding `google.userAuth` and `google.serviceAuth`) - * **monitoring_script** - (JES backend only) Specifies a GCS URL to a script that will be invoked prior to the WDL command being run. For example, if the value for monitoring_script is "gs://bucket/script.sh", it will be invoked as `./script.sh > monitoring.log &`. The value `monitoring.log` file will be automatically de-localized. + * **monitoring_script** - (JES backend only) Specifies a GCS URL to a script that will be invoked prior to the user command being run. For example, if the value for monitoring_script is "gs://bucket/script.sh", it will be invoked as `./script.sh > monitoring.log &`. The value `monitoring.log` file will be automatically de-localized. # Labels -Every call in Cromwell is labelled by Cromwell so that it can be queried about later. The current label set automatically applied is: +Every call run on the JES backend is given certain labels by default, so that Google resources can be queried by these labels later. The current default label set automatically applied is: | Key | Value | Example | Notes | |-----|-------|---------|-------| | cromwell-workflow-id | The Cromwell ID given to the root workflow (i.e. the ID returned by Cromwell on submission) | cromwell-d4b412c5-bf3d-4169-91b0-1b635ce47a26 | To fit the required [format](#label-format), we prefix with 'cromwell-' | -| cromwell-workflow-name | The name of the root workflow | my-root-workflow | | | cromwell-sub-workflow-name | The name of this job's sub-workflow | my-sub-workflow | Only present if the task is called in a subworkflow. | | wdl-task-name | The name of the WDL task | my-task | | | wdl-call-alias | The alias of the WDL call that created this job | my-task-1 | Only present if the task was called with an alias. | @@ -1712,10 +1910,15 @@ Custom labels can also be applied to every call in a workflow by specifying a cu ## Label Format -To fit in with the Google schema for labels, label key and value strings must match the regex `[a-z]([-a-z0-9]*[a-z0-9])?` and be between 1 and 63 characters in length. - -For custom labels, Cromwell will reject any request which is made containing invalid label strings. For automatically applied labels, Cromwell will modify workflow/task/call names to fit the schema, according to the following rules: +When labels are supplied to Cromwell, it will fail any request containing invalid label strings. Below are the requirements for a valid label key/value pair in Cromwell: +- Label keys and values can't contain characters other than `[a-z]`, `[0-9]` or `-`. +- Label keys must start with `[a-z]` and end with `[a-z]` or `[0-9]`. +- Label values must start and end with `[a-z]` or `[0-9]`. +- Label keys may not be empty but label values may be empty. +- Label key and values have a max char limit of 63. +Google has a different schema for labels, where label key and value strings must match the regex `[a-z]([-a-z0-9]*[a-z0-9])?` and be no more than 63 characters in length. +For automatically applied labels, Cromwell will modify workflow/task/call names to fit the schema, according to the following rules: - Any capital letters are lowercased. - Any character which is not one of `[a-z]`, `[0-9]` or `-` will be replaced with `-`. - If the start character does not match `[a-z]` then prefix with `x--` @@ -1760,12 +1963,12 @@ Cromwell also accepts two [workflow option](#workflow-options) related to call c Docker tags are a convenient way to point to a version of an image (ubuntu:14.04), or even the latest version (ubuntu:latest). For that purpose, tags are mutable, meaning that the image they point to can change, while the tag name stays the same. -While this is very convenient in some cases, using mutable, or "floating" tags in WDL affects the reproducibility of the WDL file: the same WDL using "ubuntu:latest" run now, and a year, or even a month from now will actually run with different docker images. +While this is very convenient in some cases, using mutable, or "floating" tags in tasks affects the reproducibility of a workflow: the same workflow using "ubuntu:latest" run now, and a year, or even a month from now will actually run with different docker images. This has an even bigger impact when Call Caching is turned on in Cromwell, and could lead to unpredictable behaviors if a tag is updated in the middle of a workflow or even a scatter for example. Docker provides another way of identifying an image version, using the specific digest of the image. The digest is guaranteed to be different if 2 images have different byte content. For more information see https://docs.docker.com/registry/spec/api/#/content-digests A docker image with digest can be referenced as follows : **ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950** The above image refers to a specific image of ubuntu, that does not depend on a floating tag. -A WDL containing this Docker image run now and a year from now will run in the exact same container. +A workflow containing this Docker image run now and a year from now will run in the exact same container. In order to remove unpredictable behaviors, Cromwell takes the following approach regarding floating docker tags. @@ -1837,7 +2040,7 @@ When running a job on the Config (Shared Filesystem) backend, Cromwell provides ``` # Imports -Import statements inside of a WDL file are supported by Cromwell when running in Server mode as well as Single Workflow Runner Mode. +Import statements inside of a workflow file are supported by Cromwell when running in Server mode as well as Single Workflow Runner Mode. In Single Workflow Runner Mode, you pass in a zip file which includes the WDL files referenced by the import statements. Cromwell requires the zip file to be passed in as a command line argument, as explained by the section [run](#run). @@ -1846,7 +2049,7 @@ For example, given a workflow `wf.wdl` and an imports directory `WdlImports.zip` java -jar cromwell.jar wf.wdl wf.inputs - - WdlImports.zip ``` -In Server Mode, you pass in a zip file using the parameter `wdlDependencies` via the [POST /api/workflows/:version](#post-apiworkflowsversion) endpoint. +In Server Mode, you pass in a zip file using the parameter `workflowDependencies` via the [POST /api/workflows/:version](#post-apiworkflowsversion) endpoint. # Sub Workflows @@ -2307,7 +2510,7 @@ It's also possible to set the URL query parameter `expandSubWorkflows` to `true` # REST API -The `server` subcommand on the executable JAR will start an HTTP server which can accept WDL files to run as well as check status and output of existing workflows. +The `server` subcommand on the executable JAR will start an HTTP server which can accept workflow files to run as well as check status and output of existing workflows. The following sub-sections define which HTTP Requests the web server can accept and what they will return. Example HTTP requests are given in [HTTPie](https://github.com/jkbrzt/httpie) and [cURL](https://curl.haxx.se/) @@ -2319,12 +2522,12 @@ All web server requests include an API version in the url. The current version i This endpoint accepts a POST request with a `multipart/form-data` encoded body. The form fields that may be included are: -* `wdlSource` - *Required* Contains the WDL file to submit for execution. -* `workflowInputs` - *Optional* JSON file containing the inputs. A skeleton file can be generated from [wdltool](https://github.com/broadinstitute/wdltool) using the "inputs" subcommand. +* `workflowSource` - *Required* Contains the workflow source file to submit for execution. +* `workflowInputs` - *Optional* JSON file containing the inputs. For WDL workflows a skeleton file can be generated from [wdltool](https://github.com/broadinstitute/wdltool) using the "inputs" subcommand. * `workflowInputs_n` - *Optional* Where `n` is an integer. JSON file containing the 'n'th set of auxiliary inputs. * `workflowOptions` - *Optional* JSON file containing options for this workflow execution. See the [run](#run) CLI sub-command for some more information about this. * `customLabels` - *Optional* JSON file containing a set of custom labels to apply to this workflow. See [Labels](#labels) for the expected format. -* `wdlDependencies` - *Optional* ZIP file containing WDL files that are used to resolve import statements. +* `workflowDependencies` - *Optional* ZIP file containing workflow source files that are used to resolve import statements. Regarding the workflowInputs parameter, in case of key conflicts between multiple input JSON files, higher values of x in workflowInputs_x override lower values. For example, an input specified in workflowInputs_3 will override an input with the same name in workflowInputs or workflowInputs_2. Similarly, an input key specified in workflowInputs_5 will override an identical input key in any other input file. @@ -2335,13 +2538,13 @@ Additionally, although Swagger has a limit of 5 JSON input files, the REST endpo cURL: ``` -$ curl -v "localhost:8000/api/workflows/v1" -F wdlSource=@src/main/resources/3step.wdl -F workflowInputs=@test.json +$ curl -v "localhost:8000/api/workflows/v1" -F workflowSource=@src/main/resources/3step.wdl -F workflowInputs=@test.json ``` HTTPie: ``` -$ http --print=hbHB --form POST localhost:8000/api/workflows/v1 wdlSource=@src/main/resources/3step.wdl workflowInputs@inputs.json +$ http --print=hbHB --form POST localhost:8000/api/workflows/v1 workflowSource=@src/main/resources/3step.wdl workflowInputs@inputs.json ``` Request: @@ -2357,7 +2560,7 @@ Host: localhost:8000 User-Agent: HTTPie/0.9.2 --64128d499e9e4616adea7d281f695dca -Content-Disposition: form-data; name="wdlSource" +Content-Disposition: form-data; name="workflowSource" task ps { command { @@ -2427,13 +2630,13 @@ To specify workflow options as well: cURL: ``` -$ curl -v "localhost:8000/api/workflows/v1" -F wdlSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0.json -F workflowOptions=@options.json +$ curl -v "localhost:8000/api/workflows/v1" -F workflowSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0.json -F workflowOptions=@options.json ``` HTTPie: ``` -http --print=HBhb --form POST http://localhost:8000/api/workflows/v1 wdlSource=@wdl/jes0.wdl workflowInputs@wdl/jes0.json workflowOptions@options.json +http --print=HBhb --form POST http://localhost:8000/api/workflows/v1 workflowSource=@wdl/jes0.wdl workflowInputs@wdl/jes0.json workflowOptions@options.json ``` Request (some parts truncated for brevity): @@ -2449,7 +2652,7 @@ Host: localhost:8000 User-Agent: HTTPie/0.9.2 --f3fd038395644de596c460257626edd7 -Content-Disposition: form-data; name="wdlSource" +Content-Disposition: form-data; name="workflowSource" task x { ... } task y { ... } @@ -2485,28 +2688,28 @@ Content-Disposition: form-data; name="workflowOptions"; filename="options.json" This endpoint accepts a POST request with a `multipart/form-data` encoded body. The form fields that may be included are: -* `wdlSource` - *Required* Contains the WDL file to submit for +* `workflowSource` - *Required* Contains the workflow source file to submit for execution. * `workflowInputs` - *Required* JSON file containing the inputs in a -JSON array. A skeleton file for a single inputs json element can be +JSON array. For WDL workflows a skeleton file for a single inputs json element can be generated from [wdltool](https://github.com/broadinstitute/wdltool) using the "inputs" subcommand. The orderded endpoint responses will contain one workflow submission response for each input, respectively. * `workflowOptions` - *Optional* JSON file containing options for this workflow execution. See the [run](#run) CLI sub-command for some more information about this. -* `wdlDependencies` - *Optional* ZIP file containing WDL files that are used to resolve import statements. Applied equally to all workflowInput sets. +* `workflowDependencies` - *Optional* ZIP file containing workflow source files that are used to resolve import statements. Applied equally to all workflowInput sets. cURL: ``` -$ curl -v "localhost:8000/api/workflows/v1/batch" -F wdlSource=@src/main/resources/3step.wdl -F workflowInputs=@test_array.json +$ curl -v "localhost:8000/api/workflows/v1/batch" -F workflowSource=@src/main/resources/3step.wdl -F workflowInputs=@test_array.json ``` HTTPie: ``` -$ http --print=hbHB --form POST localhost:8000/api/workflows/v1/batch wdlSource=@src/main/resources/3step.wdl workflowInputs@inputs_array.json +$ http --print=hbHB --form POST localhost:8000/api/workflows/v1/batch workflowSource=@src/main/resources/3step.wdl workflowInputs@inputs_array.json ``` Request: @@ -2522,7 +2725,7 @@ Host: localhost:8000 User-Agent: HTTPie/0.9.2 --64128d499e9e4616adea7d281f695dcb -Content-Disposition: form-data; name="wdlSource" +Content-Disposition: form-data; name="workflowSource" task ps { command { @@ -2603,13 +2806,13 @@ To specify workflow options as well: cURL: ``` -$ curl -v "localhost:8000/api/workflows/v1/batch" -F wdlSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0_array.json -F workflowOptions=@options.json +$ curl -v "localhost:8000/api/workflows/v1/batch" -F workflowSource=@wdl/jes0.wdl -F workflowInputs=@wdl/jes0_array.json -F workflowOptions=@options.json ``` HTTPie: ``` -http --print=HBhb --form POST http://localhost:8000/api/workflows/v1/batch wdlSource=@wdl/jes0.wdl workflowInputs@wdl/jes0_array.json workflowOptions@options.json +http --print=HBhb --form POST http://localhost:8000/api/workflows/v1/batch workflowSource=@wdl/jes0.wdl workflowInputs@wdl/jes0_array.json workflowOptions@options.json ``` Request (some parts truncated for brevity): @@ -2625,7 +2828,7 @@ Host: localhost:8000 User-Agent: HTTPie/0.9.2 --f3fd038395644de596c460257626edd8 -Content-Disposition: form-data; name="wdlSource" +Content-Disposition: form-data; name="workflowSource" task x { ... } task y { ... } @@ -2855,6 +3058,37 @@ Server: spray-can/1.3.3 } ``` +## PATCH /api/workflows/:version/:id/labels + +This endpoint is used to update multiple labels for an existing workflow. When supplying a label with a key unique to the workflow submission, a new label key/value entry is appended to that workflow's metadata. When supplying a label with a key that is already associated to the workflow submission, the original label value is updated with the new value for that workflow's metadata. + +The [labels](#labels) must be a mapping of key/value pairs in JSON format that are sent via the PATCH body. The request content type must be +`application/json`. + +cURL: + +``` +$ curl -X PATCH --header "Content-Type: application/json" -d "{\"label-key-1\":\"label-value-1\", \"label-key-2\": \"label-value-2\"}" "http://localhost:8000/api/workflows/v1/c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5/labels" +``` + +HTTPie: + +``` +$ echo '{"label-key-1":"label-value-1", "label-key-2": "label-value-2"}' | http PATCH "http://localhost:8000/api/workflows/v1/c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5/labels" +``` + +Response: +``` +{ "id": "c4c6339c-8cc9-47fb-acc5-b5cb8d2809f5", + "labels": + { + "label-key-1": "label-value-1", + "label-key-2": "label-value-2" + } +} +``` + + ## GET /api/workflows/:version/:id/status cURL: @@ -3017,11 +3251,18 @@ Content-Type: application/json; charset=UTF-8 Content-Length: 7286 { "workflowName": "sc_test", + "submittedFiles": { + "inputs": "{}", + "workflow": "task do_prepare {\n File input_file\n command {\n split -l 1 ${input_file} temp_ && ls -1 temp_?? > files.list\n }\n output {\n Array[File] split_files = read_lines(\"files.list\")\n }\n}\n# count the number of words in the input file, writing the count to an output file overkill in this case, but simulates a real scatter-gather that would just return an Int (map)\ntask do_scatter {\n File input_file\n command {\n wc -w ${input_file} > output.txt\n }\n output {\n File count_file = \"output.txt\"\n }\n}\n# aggregate the results back together (reduce)\ntask do_gather {\n Array[File] input_files\n command <<<\n cat ${sep = ' ' input_files} | awk '{s+=$$1} END {print s}'\n >>>\n output {\n Int sum = read_int(stdout())\n }\n}\nworkflow sc_test {\n call do_prepare\n scatter(f in do_prepare.split_files) {\n call do_scatter {\n input: input_file = f\n }\n }\n call do_gather {\n input: input_files = do_scatter.count_file\n }\n}", + "options": "{\n\n}", + "workflowType": "WDL" + }, "calls": { "sc_test.do_prepare": [ { "executionStatus": "Done", "stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_prepare/stdout", + "backendStatus": "Done", "shardIndex": -1, "outputs": { "split_files": [ @@ -3036,6 +3277,30 @@ Content-Length: 7286 "failOnStderr": "true", "continueOnReturnCode": "0" }, + "callCaching": { + "allowResultReuse": true, + "hit": false, + "result": "Cache Miss", + "hashes": { + "output count": "C4CA4238A0B923820DCC509A6F75849B", + "runtime attribute": { + "docker": "N/A", + "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA", + "failOnStderr": "68934A3E9455FA72420237EB05902327" + }, + "output expression": { + "Array": "D856082E6599CF6EC9F7F42013A2EC4C" + }, + "input count": "C4CA4238A0B923820DCC509A6F75849B", + "backend name": "509820290D57F333403F490DDE7316F4", + "command template": "9F5F1F24810FACDF917906BA4EBA807D", + "input": { + "File input_file": "11fa6d7ed15b42f2f73a455bf5864b49" + } + }, + "effectiveCallCachingMode": "ReadAndWriteCache" + }, + "jobId": "34479", "returnCode": 0, "backend": "Local", "end": "2016-02-04T13:47:56.000-05:00", @@ -3049,15 +3314,40 @@ Content-Length: 7286 { "executionStatus": "Preempted", "stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/stdout", + "backendStatus": "Preempted", "shardIndex": 0, "outputs": {}, "runtimeAttributes": { "failOnStderr": "true", "continueOnReturnCode": "0" }, + "callCaching": { + "allowResultReuse": true, + "hit": false, + "result": "Cache Miss", + "hashes": { + "output count": "C4CA4238A0B923820DCC509A6F75849B", + "runtime attribute": { + "docker": "N/A", + "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA", + "failOnStderr": "68934A3E9455FA72420237EB05902327" + }, + "output expression": { + "File count_file": "EF1B47FFA9990E8D058D177073939DF7" + }, + "input count": "C4CA4238A0B923820DCC509A6F75849B", + "backend name": "509820290D57F333403F490DDE7316F4", + "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7", + "input": { + "File input_file": "a53794d214dc5dedbcecdf827bf683a2" + } + }, + "effectiveCallCachingMode": "ReadAndWriteCache" + }, "inputs": { "input_file": "f" }, + "jobId": "34496", "backend": "Local", "end": "2016-02-04T13:47:56.000-05:00", "stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/stderr", @@ -3068,6 +3358,7 @@ Content-Length: 7286 { "executionStatus": "Done", "stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/stdout", + "backendStatus": "Done", "shardIndex": 0, "outputs": { "count_file": "/home/jdoe/cromwell/cromwell-test-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/output.txt" @@ -3076,10 +3367,34 @@ Content-Length: 7286 "failOnStderr": "true", "continueOnReturnCode": "0" }, + "callCaching": { + "allowResultReuse": true, + "hit": false, + "result": "Cache Miss", + "hashes": { + "output count": "C4CA4238A0B923820DCC509A6F75849B", + "runtime attribute": { + "docker": "N/A", + "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA", + "failOnStderr": "68934A3E9455FA72420237EB05902327" + }, + "output expression": { + "File count_file": "EF1B47FFA9990E8D058D177073939DF7" + }, + "input count": "C4CA4238A0B923820DCC509A6F75849B", + "backend name": "509820290D57F333403F490DDE7316F4", + "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7", + "input": { + "File input_file": "a53794d214dc5dedbcecdf827bf683a2" + } + }, + "effectiveCallCachingMode": "ReadAndWriteCache" + }, "inputs": { "input_file": "f" }, "returnCode": 0, + "jobId": "34965", "end": "2016-02-04T13:47:56.000-05:00", "stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/stderr", "attempt": 2, @@ -3089,6 +3404,7 @@ Content-Length: 7286 { "executionStatus": "Done", "stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-1/stdout", + "backendStatus": "Done", "shardIndex": 1, "outputs": { "count_file": "/home/jdoe/cromwell/cromwell-test-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-1/output.txt" @@ -3097,10 +3413,34 @@ Content-Length: 7286 "failOnStderr": "true", "continueOnReturnCode": "0" }, + "callCaching": { + "allowResultReuse": true, + "hit": false, + "result": "Cache Miss", + "hashes": { + "output count": "C4CA4238A0B923820DCC509A6F75849B", + "runtime attribute": { + "docker": "N/A", + "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA", + "failOnStderr": "68934A3E9455FA72420237EB05902327" + }, + "output expression": { + "File count_file": "EF1B47FFA9990E8D058D177073939DF7" + }, + "input count": "C4CA4238A0B923820DCC509A6F75849B", + "backend name": "509820290D57F333403F490DDE7316F4", + "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7", + "input": { + "File input_file": "d3410ade53df34c78488544285cf743c" + } + }, + "effectiveCallCachingMode": "ReadAndWriteCache" + }, "inputs": { "input_file": "f" }, "returnCode": 0, + "jobId": "34495", "backend": "Local", "end": "2016-02-04T13:47:56.000-05:00", "stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-1/stderr", @@ -3113,6 +3453,7 @@ Content-Length: 7286 { "executionStatus": "Done", "stdout": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_gather/stdout", + "backendStatus": "Done", "shardIndex": -1, "outputs": { "sum": 12 @@ -3121,6 +3462,29 @@ Content-Length: 7286 "failOnStderr": "true", "continueOnReturnCode": "0" }, + "callCaching": { + "allowResultReuse": true, + "hit": false, + "result": "Cache Miss", + "hashes": { + "output count": "C4CA4238A0B923820DCC509A6F75849B", + "runtime attribute": { + "docker": "N/A", + "continueOnReturnCode": "CFCD208495D565EF66E7DFF9F98764DA", + "failOnStderr": "68934A3E9455FA72420237EB05902327" + }, + "output expression": { + "File count_file": "EF1B47FFA9990E8D058D177073939DF7" + }, + "input count": "C4CA4238A0B923820DCC509A6F75849B", + "backend name": "509820290D57F333403F490DDE7316F4", + "command template": "FD00A1B0AB6A0C97B0737C83F179DDE7", + "input": { + "File input_file": "e0ef752ab4824939d7947f6012b7c141" + } + }, + "effectiveCallCachingMode": "ReadAndWriteCache" + }, "inputs": { "input_files": [ "/home/jdoe/cromwell/cromwell-test-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_scatter/shard-0/attempt-2/output.txt", @@ -3128,6 +3492,7 @@ Content-Length: 7286 ] }, "returnCode": 0, + "jobId": "34494", "backend": "Local", "end": "2016-02-04T13:47:57.000-05:00", "stderr": "/home/jdoe/cromwell/cromwell-executions/sc_test/8e592ed8-ebe5-4be0-8dcb-4073a41fe180/call-do_gather/stderr", @@ -3152,6 +3517,10 @@ Content-Length: 7286 "inputs": { "sc_test.do_prepare.input_file": "/home/jdoe/cromwell/11.txt" }, + "labels": { + "cromwell-workflow-name": "sc_test", + "cromwell-workflow-id": "cromwell-17633f21-11a9-414f-a95b-2e21431bd67d" + }, "submission": "2016-02-04T13:47:55.000-05:00", "status": "Succeeded", "end": "2016-02-04T13:47:57.000-05:00", @@ -3270,6 +3639,16 @@ The `call` and `workflow` may optionally contain failures shaped like this: ] ``` +### Compressing the metadata response + +The response from the metadata endpoint can be quite large depending on the workflow. To help with this Cromwell supports gzip encoding the metadata prior to sending it back to the client. In order to enable this, make sure your client is sending the `Accept-Encoding: gzip` header. + +For instance, with cURL: + +``` +$ curl -H "Accept-Encoding: gzip" http://localhost:8000/api/workflows/v1/b3e45584-9450-4e73-9523-fc3ccf749848/metadata +``` + ## POST /api/workflows/:version/:id/abort cURL: @@ -3326,18 +3705,186 @@ Server: spray-can/1.3.3 } ``` -## GET /api/engine/:version/stats +## GET /api/workflows/:version/callcaching/diff + +**Disclaimer**: This endpoint depends on hash values being published to the metadata, which only happens as of Cromwell 28. +Workflows run with prior versions of Cromwell cannot be used with this endpoint. +A `404 NotFound` will be returned when trying to use this endpoint if either workflow has been run on a prior version. + +This endpoint returns the hash differences between 2 *completed* (successfully or not) calls. +The following query parameters are supported: + +| Parameter | Description | Required | +|:---------:|:-----------------------------------------------------------------------------------------:|:--------:| +| workflowA | Workflow ID of the first call | yes | +| callA | Fully qualified name of the first call. **Including workflow name**. (see example below) | yes | +| indexA | Shard index of the first call | depends | +| workflowB | Workflow ID of the second call | yes | +| callB | Fully qualified name of the second call. **Including workflow name**. (see example below) | yes | +| indexB | Shard index of the second call | depends | + +About the `indexX` parameters: It is required if the call was in a scatter. Otherwise it should *not* be specified. +If an index parameter is wrongly specified, the call will not be found and the request will result in a 404 response. + +cURL: + +``` +$ curl "http://localhost:8000/api/workflows/v1/callcaching/diff?workflowA=85174842-4a44-4355-a3a9-3a711ce556f1&callA=wf_hello.hello&workflowB=7479f8a8-efa4-46e4-af0d-802addc66e5d&callB=wf_hello.hello" +``` + +HTTPie: + +``` +$ http "http://localhost:8000/api/workflows/v1/callcaching/diff?workflowA=85174842-4a44-4355-a3a9-3a711ce556f1&callA=wf_hello.hello&workflowB=7479f8a8-efa4-46e4-af0d-802addc66e5d&callB=wf_hello.hello" +``` + +Response: +``` +HTTP/1.1 200 OK +Content-Length: 1274 +Content-Type: application/json; charset=UTF-8 +Date: Tue, 06 Jun 2017 16:44:33 GMT +Server: spray-can/1.3.3 + +{ + "callA": { + "executionStatus": "Done", + "workflowId": "85174842-4a44-4355-a3a9-3a711ce556f1", + "callFqn": "wf_hello.hello", + "jobIndex": -1, + "allowResultReuse": true + }, + "callB": { + "executionStatus": "Done", + "workflowId": "7479f8a8-efa4-46e4-af0d-802addc66e5d", + "callFqn": "wf_hello.hello", + "jobIndex": -1, + "allowResultReuse": true + }, + "hashDifferential": [ + { + "hashKey": "command template", + "callA": "4EAADE3CD5D558C5A6CFA4FD101A1486", + "callB": "3C7A0CA3D7A863A486DBF3F7005D4C95" + }, + { + "hashKey": "input count", + "callA": "C4CA4238A0B923820DCC509A6F75849B", + "callB": "C81E728D9D4C2F636F067F89CC14862C" + }, + { + "hashKey": "input: String addressee", + "callA": "D4CC65CB9B5F22D8A762532CED87FE8D", + "callB": "7235E005510D99CB4D5988B21AC97B6D" + }, + { + "hashKey": "input: String addressee2", + "callA": "116C7E36B4AE3EAFD07FA4C536CE092F", + "callB": null + } + ] +} +``` + +The response is a JSON object with 3 fields: + +- `callA` reports information about the first call, including its `allowResultReuse` value that will be used to determine whether or not this call can be cached to. +- `callB` reports information about the second call, including its `allowResultReuse` value that will be used to determine whether or not this call can be cached to. +- `hashDifferential` is an array in which each element represents a difference between the hashes of `callA` and `callB`. + +*If this array is empty, `callA` and `callB` have the same hashes*. + +Differences can be of 3 kinds: + +- `callA` and `callB` both have the same hash key but their values are different. +For instance, in the example above, + +```json +{ + "hashKey": "input: String addressee", + "callA": "D4CC65CB9B5F22D8A762532CED87FE8D", + "callB": "7235E005510D99CB4D5988B21AC97B6D" +} +``` + +indicates that both `callA` and `callB` have a `String` input called `addressee`, but different values were used at runtime, resulting in different MD5 hashes. + +- `callA` has a hash key that `callB` doesn't have +For instance, in the example above, + +```json +{ + "hashKey": "input: String addressee2", + "callA": "116C7E36B4AE3EAFD07FA4C536CE092F", + "callB": null +} +``` + +indicates that `callA` has a `String` input called `addressee2` that doesn't exist in `callB`. For that reason the value of the second field is `null`. + +- `callB` has a hash key that `callA` doesn't have. This is the same case as above but reversed. + +If no cache entry for `callA` or `callB` can be found, the response will be in the following format: + +``` +HTTP/1.1 404 NotFound +Content-Length: 178 +Content-Type: application/json; charset=UTF-8 +Date: Tue, 06 Jun 2017 17:02:15 GMT +Server: spray-can/1.3.3 + +{ + "status": "error", + "message": "Cannot find call 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1" +} +``` + +If neither `callA` nor `callB` can be found, the response will be in the following format: + + +``` +HTTP/1.1 404 NotFound +Content-Length: 178 +Content-Type: application/json; charset=UTF-8 +Date: Tue, 06 Jun 2017 17:02:15 GMT +Server: spray-can/1.3.3 + +{ + "status": "error", + "message": "Cannot find calls 5174842-4a44-4355-a3a9-3a711ce556f1:wf_hello.hello:-1, 479f8a8-efa4-46e4-af0d-802addc66e5d:wf_hello.hello:-1" +} +``` + +If the query is malformed and required parameters are missing, the response will be in the following format: + +``` +HTTP/1.1 400 BadRequest +Content-Length: 178 +Content-Type: application/json; charset=UTF-8 +Date: Tue, 06 Jun 2017 17:02:15 GMT +Server: spray-can/1.3.3 +{ + "status": "fail", + "message": "Wrong parameters for call cache diff query:\nmissing workflowA query parameter\nmissing callB query parameter", + "errors": [ + "missing workflowA query parameter", + "missing callB query parameter" + ] +} +``` + +## GET /engine/:version/stats This endpoint returns some basic statistics on the current state of the engine. At the moment that includes the number of running workflows and the number of active jobs. cURL: ``` -$ curl http://localhost:8000/api/engine/v1/stats +$ curl http://localhost:8000/engine/v1/stats ``` HTTPie: ``` -$ http http://localhost:8000/api/engine/v1/stats +$ http http://localhost:8000/engine/v1/stats ``` Response: @@ -3353,18 +3900,18 @@ Response: } ``` -## GET /api/engine/:version/version +## GET /engine/:version/version This endpoint returns the version of the Cromwell engine. cURL: ``` -$ curl http://localhost:8000/api/engine/v1/version +$ curl http://localhost:8000/engine/v1/version ``` HTTPie: ``` -$ http http://localhost:8000/api/engine/v1/version +$ http http://localhost:8000/engine/v1/version ``` Response: @@ -3379,8 +3926,6 @@ Response: } ``` - - ## Error handling Requests that Cromwell can't process return a failure in the form of a JSON response respecting the following JSON schema: @@ -3434,25 +3979,3 @@ e.g. The `message` field contains a short description of the error. The `errors` field is optional and may contain additional information about why the request failed. - -# Developer - -## Generating table of contents on Markdown files - -``` -$ pip install mdtoc -$ mdtoc --check-links README.md -``` - -## Generating and Hosting ScalaDoc - -Essentially run `sbt doc` then commit the generated code into the `gh-pages` branch on this repository - -``` -$ sbt doc -$ git co gh-pages -$ mv target/scala-2.11/api scaladoc -$ git add scaladoc -$ git commit -m "API Docs" -$ git push origin gh-pages -``` diff --git a/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala b/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala index 3fa6e9c19..c6ddcbcc4 100644 --- a/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala +++ b/backend/src/main/scala/cromwell/backend/BackendJobBreadCrumb.scala @@ -3,9 +3,9 @@ package cromwell.backend import cromwell.backend.io.JobPaths import cromwell.core.path.Path import cromwell.core.{JobKey, WorkflowId} -import wdl4s.Workflow +import wdl4s.wdl.WdlWorkflow -case class BackendJobBreadCrumb(workflow: Workflow, id: WorkflowId, jobKey: JobKey) { +case class BackendJobBreadCrumb(workflow: WdlWorkflow, id: WorkflowId, jobKey: JobKey) { def toPath(root: Path): Path = { val workflowPart = root.resolve(workflow.unqualifiedName).resolve(id.toString) JobPaths.callPathBuilder(workflowPart, jobKey) diff --git a/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala b/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala index fa93810cb..068859172 100644 --- a/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala @@ -7,8 +7,8 @@ import cromwell.backend.BackendLifecycleActor._ import cromwell.backend.wdl.OutputEvaluator import cromwell.core.path.Path import cromwell.core.{CallOutputs, ExecutionEvent, JobKey} -import wdl4s.expression.WdlStandardLibraryFunctions -import wdl4s.values.WdlValue +import wdl4s.wdl.expression.WdlStandardLibraryFunctions +import wdl4s.wdl.values.WdlValue import scala.concurrent.Future import scala.util.{Success, Try} diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala index a6a09cff4..5c1031726 100644 --- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala @@ -3,7 +3,7 @@ package cromwell.backend import akka.actor.{Actor, ActorRef} import cromwell.backend.BackendLifecycleActor._ import cromwell.core.logging.{JobLogging, WorkflowLogging} -import wdl4s.TaskCall +import wdl4s.wdl.WdlTaskCall import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} @@ -65,7 +65,7 @@ trait BackendWorkflowLifecycleActor extends BackendLifecycleActor with WorkflowL /** * The subset of calls which this backend will be expected to run */ - protected def calls: Set[TaskCall] + protected def calls: Set[WdlTaskCall] } trait BackendJobLifecycleActor extends BackendLifecycleActor with JobLogging { diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala index d0da2c2c5..c255fdc12 100644 --- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala +++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala @@ -6,8 +6,8 @@ import cromwell.backend.io.WorkflowPathsWithDocker import cromwell.core.CallOutputs import cromwell.core.JobExecutionToken.JobExecutionTokenType import cromwell.core.path.Path -import wdl4s.TaskCall -import wdl4s.expression.{PureStandardLibraryFunctions, WdlStandardLibraryFunctions} +import wdl4s.wdl.WdlTaskCall +import wdl4s.wdl.expression.{PureStandardLibraryFunctions, WdlStandardLibraryFunctions} trait BackendLifecycleActorFactory { @@ -18,8 +18,9 @@ trait BackendLifecycleActorFactory { def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, - calls: Set[TaskCall], - serviceRegistryActor: ActorRef): Option[Props] = None + calls: Set[WdlTaskCall], + serviceRegistryActor: ActorRef, + restarting: Boolean): Option[Props] = None /* ****************************** */ /* Job Execution */ @@ -39,7 +40,7 @@ trait BackendLifecycleActorFactory { def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, - calls: Set[TaskCall], + calls: Set[WdlTaskCall], jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, initializationData: Option[BackendInitializationData]): Option[Props] = None diff --git a/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala b/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala index a7ad6f5c7..18e341ad7 100644 --- a/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala @@ -9,10 +9,10 @@ import cromwell.backend.validation.ContinueOnReturnCodeValidation import cromwell.core.{WorkflowMetadataKeys, WorkflowOptions} import cromwell.services.metadata.MetadataService.PutMetadataAction import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue} -import wdl4s._ -import wdl4s.expression.PureStandardLibraryFunctions -import wdl4s.types._ -import wdl4s.values.WdlValue +import wdl4s.wdl._ +import wdl4s.wdl.expression.PureStandardLibraryFunctions +import wdl4s.wdl.types._ +import wdl4s.wdl.values.WdlValue import scala.concurrent.Future import scala.util.{Failure, Success, Try} @@ -38,7 +38,7 @@ object BackendWorkflowInitializationActor { trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor with ActorLogging { def serviceRegistryActor: ActorRef - def calls: Set[TaskCall] + def calls: Set[WdlTaskCall] /** * This method is meant only as a "pre-flight check" validation of runtime attribute expressions during workflow @@ -125,7 +125,7 @@ trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor w defaultRuntimeAttributes.get(name) } - def badRuntimeAttrsForTask(task: Task) = { + def badRuntimeAttrsForTask(task: WdlTask) = { runtimeAttributeValidators map { case (attributeName, validator) => val value = task.runtimeAttributes.attrs.get(attributeName) orElse defaultRuntimeAttribute(attributeName) attributeName -> ((value, validator(value))) diff --git a/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala b/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala index 506bbe4eb..7e548df2f 100644 --- a/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala +++ b/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala @@ -3,9 +3,9 @@ package cromwell.backend import cromwell.core.WorkflowOptions import cromwell.util.JsonFormatting.WdlValueJsonFormatter import lenthall.util.TryUtil -import wdl4s.{WdlExpressionException, _} -import wdl4s.expression.WdlStandardLibraryFunctions -import wdl4s.values.WdlValue +import wdl4s.wdl.{WdlExpressionException, _} +import wdl4s.wdl.expression.WdlStandardLibraryFunctions +import wdl4s.wdl.values.WdlValue import scala.util.{Success, Try} diff --git a/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala b/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala index 79503c927..dd6091f42 100644 --- a/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala +++ b/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala @@ -2,7 +2,7 @@ package cromwell.backend.async import cromwell.core.path.Path import lenthall.exception.ThrowableAggregation -import wdl4s.values.WdlValue +import wdl4s.wdl.values.WdlValue abstract class KnownJobFailureException extends Exception { def stderrPath: Option[Path] diff --git a/backend/src/main/scala/cromwell/backend/backend.scala b/backend/src/main/scala/cromwell/backend/backend.scala index aad2b1b55..527a514f9 100644 --- a/backend/src/main/scala/cromwell/backend/backend.scala +++ b/backend/src/main/scala/cromwell/backend/backend.scala @@ -6,15 +6,15 @@ import cromwell.core.callcaching.MaybeCallCachingEligible import cromwell.core.labels.Labels import cromwell.core.{CallKey, WorkflowId, WorkflowOptions} import cromwell.services.keyvalue.KeyValueServiceActor.KvResponse -import wdl4s._ -import wdl4s.values.WdlValue +import wdl4s.wdl._ +import wdl4s.wdl.values.WdlValue import scala.util.Try /** * For uniquely identifying a job which has been or will be sent to the backend. */ -case class BackendJobDescriptorKey(call: TaskCall, index: Option[Int], attempt: Int) extends CallKey { +case class BackendJobDescriptorKey(call: WdlTaskCall, index: Option[Int], attempt: Int) extends CallKey { def scope = call private val indexString = index map { _.toString } getOrElse "NA" val tag = s"${call.fullyQualifiedName}:$indexString:$attempt" @@ -37,7 +37,7 @@ case class BackendJobDescriptor(workflowDescriptor: BackendWorkflowDescriptor, object BackendWorkflowDescriptor { def apply(id: WorkflowId, - workflow: Workflow, + workflow: WdlWorkflow, knownValues: Map[FullyQualifiedName, WdlValue], workflowOptions: WorkflowOptions, customLabels: Labels) = { @@ -49,7 +49,7 @@ object BackendWorkflowDescriptor { * For passing to a BackendActor construction time */ case class BackendWorkflowDescriptor(id: WorkflowId, - workflow: Workflow, + workflow: WdlWorkflow, knownValues: Map[FullyQualifiedName, WdlValue], workflowOptions: WorkflowOptions, customLabels: Labels, diff --git a/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala b/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala index 9ece33cdd..7af985153 100644 --- a/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala @@ -2,15 +2,15 @@ package cromwell.backend.io import cromwell.backend.BackendJobDescriptor import cromwell.core.CallContext -import wdl4s.TaskCall -import wdl4s.expression.{NoFunctions, PureStandardLibraryFunctionsLike} -import wdl4s.values._ +import wdl4s.wdl.WdlTaskCall +import wdl4s.wdl.expression.{NoFunctions, PureStandardLibraryFunctionsLike} +import wdl4s.wdl.values._ trait GlobFunctions extends PureStandardLibraryFunctionsLike { def callContext: CallContext - def findGlobOutputs(call: TaskCall, jobDescriptor: BackendJobDescriptor): Set[WdlGlobFile] = { + def findGlobOutputs(call: WdlTaskCall, jobDescriptor: BackendJobDescriptor): Set[WdlGlobFile] = { val globOutputs = call.task.findOutputFiles(jobDescriptor.fullyQualifiedInputs, NoFunctions) collect { case glob: WdlGlobFile => glob } diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index 1446ce92b..0d572de10 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -19,8 +19,8 @@ import cromwell.services.keyvalue.KvClient import cromwell.services.metadata.CallMetadataKeys import lenthall.util.TryUtil import net.ceedubs.ficus.Ficus._ -import wdl4s._ -import wdl4s.values.{WdlFile, WdlGlobFile, WdlSingleFile, WdlValue} +import wdl4s.wdl._ +import wdl4s.wdl.values._ import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future, Promise} import scala.util.{Failure, Success, Try} @@ -251,9 +251,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta * * @return the execution handle for the job. */ - def executeAsync()(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - Future.fromTry(Try(execute())) - } + def executeAsync(): Future[ExecutionHandle] = Future.fromTry(Try(execute())) /** * Recovers the specified job id, or starts a new job. The default implementation simply calls execute(). @@ -269,9 +267,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta * @param jobId The previously recorded job id. * @return the execution handle for the job. */ - def recoverAsync(jobId: StandardAsyncJob)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - Future.fromTry(Try(recover(jobId))) - } + def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = Future.fromTry(Try(recover(jobId))) /** * Returns the run status for the job. @@ -289,10 +285,7 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta * @param handle The handle of the running job. * @return The status of the job. */ - def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle) - (implicit ec: ExecutionContext): Future[StandardAsyncRunStatus] = { - Future.fromTry(Try(pollStatus(handle))) - } + def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[StandardAsyncRunStatus] = Future.fromTry(Try(pollStatus(handle))) /** * Adds custom behavior invoked when polling fails due to some exception. By default adds nothing. @@ -637,7 +630,9 @@ trait StandardAsyncExecutionActor extends AsyncBackendJobExecutionActor with Sta val stderrSizeAndReturnCode = for { returnCodeAsString <- contentAsStringAsync(jobPaths.returnCode) - stderrSize <- sizeAsync(jobPaths.stderr) + // Only check stderr size if we need to, otherwise this results in a lot of unnecessary I/O that + // may fail due to race conditions on quickly-executing jobs. + stderrSize <- if (failOnStdErr) sizeAsync(jobPaths.stderr) else Future.successful(0L) } yield (stderrSize, returnCodeAsString) stderrSizeAndReturnCode flatMap { diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala b/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala index 7b259ccdc..c6c0b5183 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala @@ -8,7 +8,7 @@ import cromwell.backend.validation.{RuntimeAttributesValidation, ValidatedRuntim import cromwell.core.logging.JobLogging import cromwell.core.path.Path import cromwell.services.metadata.CallMetadataKeys -import wdl4s.TaskCall +import wdl4s.wdl.WdlTaskCall import scala.util.Try @@ -46,7 +46,7 @@ trait StandardCachingActorHelper extends JobCachingActorHelper { */ lazy val workflowDescriptor: BackendWorkflowDescriptor = jobDescriptor.workflowDescriptor - lazy val call: TaskCall = jobDescriptor.key.call + lazy val call: WdlTaskCall = jobDescriptor.key.call lazy val standardInitializationData: StandardInitializationData = BackendInitializationData. as[StandardInitializationData](backendInitializationDataOption) @@ -79,7 +79,7 @@ trait StandardCachingActorHelper extends JobCachingActorHelper { } /** - * Returns any custom medatata for the backend. + * Returns any custom metadata for the backend. * * @return any custom metadata for the backend. */ diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala b/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala index 3b29b796d..010240319 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala @@ -4,8 +4,8 @@ import cromwell.backend.io.GlobFunctions import cromwell.backend.wdl.{ReadLikeFunctions, WriteFunctions} import cromwell.core.CallContext import cromwell.core.path.{Path, PathBuilder} -import wdl4s.expression.PureStandardLibraryFunctionsLike -import wdl4s.values.{WdlFile, WdlValue} +import wdl4s.wdl.expression.PureStandardLibraryFunctionsLike +import wdl4s.wdl.values.{WdlFile, WdlValue} import scala.util.{Success, Try} diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala index 6acbe6cb7..09d80a77e 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala @@ -6,14 +6,14 @@ import cromwell.backend.io.WorkflowPaths import cromwell.core.CallOutputs import cromwell.core.Dispatcher.IoDispatcher import cromwell.core.path.{Path, PathCopier} -import wdl4s.TaskCall +import wdl4s.wdl.WdlTaskCall import scala.concurrent.Future trait StandardFinalizationActorParams { def workflowDescriptor: BackendWorkflowDescriptor - def calls: Set[TaskCall] + def calls: Set[WdlTaskCall] def jobExecutionMap: JobExecutionMap @@ -27,7 +27,7 @@ trait StandardFinalizationActorParams { case class DefaultStandardFinalizationActorParams ( workflowDescriptor: BackendWorkflowDescriptor, - calls: Set[TaskCall], + calls: Set[WdlTaskCall], jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, initializationDataOption: Option[BackendInitializationData], @@ -45,7 +45,7 @@ class StandardFinalizationActor(val standardParams: StandardFinalizationActorPar extends BackendWorkflowFinalizationActor { override lazy val workflowDescriptor: BackendWorkflowDescriptor = standardParams.workflowDescriptor - override lazy val calls: Set[TaskCall] = standardParams.calls + override lazy val calls: Set[WdlTaskCall] = standardParams.calls lazy val initializationDataOption: Option[BackendInitializationData] = standardParams.initializationDataOption lazy val jobExecutionMap: JobExecutionMap = standardParams.jobExecutionMap lazy val workflowOutputs: CallOutputs = standardParams.workflowOutputs diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala index 89e3c48b2..c5f2c5ed8 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala @@ -7,8 +7,8 @@ import cromwell.backend.wfs.WorkflowPathBuilder import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor, BackendWorkflowInitializationActor} import cromwell.core.WorkflowOptions import cromwell.core.path.{DefaultPathBuilder, PathBuilder} -import wdl4s.TaskCall -import wdl4s.values.WdlValue +import wdl4s.wdl.WdlTaskCall +import wdl4s.wdl.values.WdlValue import scala.concurrent.Future import scala.util.Try @@ -16,7 +16,7 @@ import scala.util.Try trait StandardInitializationActorParams { def workflowDescriptor: BackendWorkflowDescriptor - def calls: Set[TaskCall] + def calls: Set[WdlTaskCall] def serviceRegistryActor: ActorRef @@ -27,9 +27,10 @@ case class DefaultInitializationActorParams ( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, - calls: Set[TaskCall], + calls: Set[WdlTaskCall], serviceRegistryActor: ActorRef, - configurationDescriptor: BackendConfigurationDescriptor + configurationDescriptor: BackendConfigurationDescriptor, + restarting: Boolean ) extends StandardInitializationActorParams /** @@ -44,21 +45,21 @@ class StandardInitializationActor(val standardParams: StandardInitializationActo override lazy val serviceRegistryActor: ActorRef = standardParams.serviceRegistryActor - override lazy val calls: Set[TaskCall] = standardParams.calls + override lazy val calls: Set[WdlTaskCall] = standardParams.calls override def beforeAll(): Future[Option[BackendInitializationData]] = { - Future.fromTry(Try(Option(initializationData))) + initializationData map Option.apply } - lazy val initializationData: StandardInitializationData = - new StandardInitializationData(workflowPaths, runtimeAttributesBuilder, classOf[StandardExpressionFunctions]) + lazy val initializationData: Future[StandardInitializationData] = + workflowPaths map { new StandardInitializationData(_, runtimeAttributesBuilder, classOf[StandardExpressionFunctions]) } lazy val expressionFunctions: Class[_ <: StandardExpressionFunctions] = classOf[StandardExpressionFunctions] - lazy val pathBuilders: List[PathBuilder] = List(DefaultPathBuilder) + lazy val pathBuilders: Future[List[PathBuilder]] = Future.successful(List(DefaultPathBuilder)) - lazy val workflowPaths: WorkflowPaths = - WorkflowPathBuilder.workflowPaths(configurationDescriptor, workflowDescriptor, pathBuilders) + lazy val workflowPaths: Future[WorkflowPaths] = + pathBuilders map { WorkflowPathBuilder.workflowPaths(configurationDescriptor, workflowDescriptor, _) } /** * Returns the runtime attribute builder for this backend. diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala index 6b3b8eb63..9c692c9a2 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala @@ -7,8 +7,8 @@ import cromwell.backend.standard.callcaching._ import cromwell.core.Dispatcher.BackendDispatcher import cromwell.core.path.Path import cromwell.core.{CallOutputs, Dispatcher} -import wdl4s.TaskCall -import wdl4s.expression.WdlStandardLibraryFunctions +import wdl4s.wdl.WdlTaskCall +import wdl4s.wdl.expression.WdlStandardLibraryFunctions /** * May be extended for using the standard sync/async backend pattern. @@ -73,16 +73,16 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { */ lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] = Option(classOf[StandardFinalizationActor]) - override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall], - serviceRegistryActor: ActorRef): Option[Props] = { - val params = workflowInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor) + override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall], + serviceRegistryActor: ActorRef, restart: Boolean): Option[Props] = { + val params = workflowInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, restart) val props = Props(initializationActorClass, params).withDispatcher(Dispatcher.BackendDispatcher) Option(props) } - def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall], - serviceRegistryActor: ActorRef): StandardInitializationActorParams = { - DefaultInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, configurationDescriptor) + def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall], + serviceRegistryActor: ActorRef, restarting: Boolean): StandardInitializationActorParams = { + DefaultInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, configurationDescriptor, restarting) } override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor, @@ -152,7 +152,7 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, configurationDescriptor) } - override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall], + override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall], jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, initializationData: Option[BackendInitializationData]): Option[Props] = { finalizationActorClassOption map { finalizationActorClass => @@ -162,7 +162,7 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { } } - def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[TaskCall], + def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[WdlTaskCall], jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, initializationDataOption: Option[BackendInitializationData]): StandardFinalizationActorParams = { diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala index c3cb05143..180f4d345 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala @@ -10,7 +10,6 @@ import cromwell.core.Dispatcher import cromwell.services.keyvalue.KeyValueServiceActor._ import scala.concurrent.{Future, Promise} -import scala.language.existentials trait StandardSyncExecutionActorParams extends StandardJobExecutionActorParams { /** The class for creating an async backend. */ diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala index 700c710e0..e79f2e863 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala @@ -19,9 +19,8 @@ import cromwell.core.io._ import cromwell.core.logging.JobLogging import cromwell.core.path.{Path, PathCopier} import cromwell.core.simpleton.{WdlValueBuilder, WdlValueSimpleton} -import wdl4s.values.WdlFile +import wdl4s.wdl.values.WdlFile -import scala.language.postfixOps import scala.util.{Failure, Success, Try} /** @@ -55,15 +54,52 @@ object StandardCacheHitCopyingActor { sealed trait StandardCacheHitCopyingActorState case object Idle extends StandardCacheHitCopyingActorState - case object WaitingForCopyResponses extends StandardCacheHitCopyingActorState + case object WaitingForIoResponses extends StandardCacheHitCopyingActorState + case object FailedState extends StandardCacheHitCopyingActorState + case object WaitingForOnSuccessResponse extends StandardCacheHitCopyingActorState - case class StandardCacheHitCopyingActorData(copyCommandsToWaitFor: Set[IoCopyCommand], - copiedJobOutputs: CallOutputs, - copiedDetritus: DetritusMap, + // TODO: this mechanism here is very close to the one in CallCacheHashingJobActorData + // Abstracting it might be valuable + /** + * The head subset of commandsToWaitFor is sent to the IoActor as a bulk. + * When a response comes back, the corresponding command is removed from the head set. + * When the head set is empty, it is removed and the next subset is sent, until there is no subset left. + * If at any point a response comes back as a failure. Other responses for the current set will be awaited for + * but subsequent sets will not be sent and the actor will send back a failure message. + */ + case class StandardCacheHitCopyingActorData(commandsToWaitFor: List[Set[IoCommand[_]]], + newJobOutputs: CallOutputs, + newDetritus: DetritusMap, returnCode: Option[Int] ) { - def remove(copyCommand: IoCopyCommand) = copy(copyCommandsToWaitFor = copyCommandsToWaitFor filterNot { _ == copyCommand }) + + /** + * Removes the command from commandsToWaitFor + * returns a pair of the new state data and CommandSetState giving information about what to do next + */ + def commandComplete(command: IoCommand[_]): (StandardCacheHitCopyingActorData, CommandSetState) = commandsToWaitFor match { + // If everything was already done send back current data and AllCommandsDone + case Nil => (this, AllCommandsDone) + case lastSubset :: Nil => + val updatedSubset = lastSubset - command + // If the last subset is now empty, we're done + if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = List.empty), AllCommandsDone) + // otherwise update commandsToWaitFor and keep waiting + else (this.copy(commandsToWaitFor = List(updatedSubset)), StillWaiting) + case currentSubset :: otherSubsets => + val updatedSubset = currentSubset - command + // This subset is done but there are other ones, remove it from commandsToWaitFor and return the next round of commands + if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = otherSubsets), NextSubSet(otherSubsets.head)) + // otherwise update the head susbset and keep waiting + else (this.copy(commandsToWaitFor = List(updatedSubset) ++ otherSubsets), StillWaiting) + } } + + // Internal ADT to keep track of command set states + private[callcaching] sealed trait CommandSetState + private[callcaching] case object StillWaiting extends CommandSetState + private[callcaching] case object AllCommandsDone extends CommandSetState + private[callcaching] case class NextSubSet(commands: Set[IoCommand[_]]) extends CommandSetState } class DefaultStandardCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams) with DefaultIoCommandBuilder @@ -92,37 +128,83 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit when(Idle) { case Event(CopyOutputsCommand(simpletons, jobDetritus, returnCode), None) => - val sourceCallRootPath = lookupSourceCallRootPath(jobDetritus) - - val processed = for { - (callOutputs, simpletonCopyPairs) <- processSimpletons(simpletons, sourceCallRootPath) - (destinationDetritus, detritusCopyPairs) <- processDetritus(jobDetritus) - } yield (callOutputs, destinationDetritus, simpletonCopyPairs ++ detritusCopyPairs) - - processed match { - case Success((callOutputs, destinationDetritus, allCopyPairs)) => - duplicate(allCopyPairs) match { - case Some(Success(_)) => succeedAndStop(returnCode, callOutputs, destinationDetritus) - case Some(Failure(failure)) => failAndStop(failure) - case None => - val allCopyCommands = allCopyPairs map { case (source, destination) => copyCommand(source, destination, overwrite = true) } - - allCopyCommands foreach { sendIoCommand(_) } - goto(WaitingForCopyResponses) using Option(StandardCacheHitCopyingActorData(allCopyCommands, callOutputs, destinationDetritus, returnCode)) + // Try to make a Path of the callRootPath from the detritus + lookupSourceCallRootPath(jobDetritus) match { + case Success(sourceCallRootPath) => + + // process simpletons and detritus to get updated paths and corresponding IoCommands + val processed = for { + (destinationCallOutputs, simpletonIoCommands) <- processSimpletons(simpletons, sourceCallRootPath) + (destinationDetritus, detritusIoCommands) <- processDetritus(jobDetritus) + } yield (destinationCallOutputs, destinationDetritus, simpletonIoCommands ++ detritusIoCommands) + + processed match { + case Success((destinationCallOutputs, destinationDetritus, detritusAndOutputsIoCommands)) => + duplicate(ioCommandsToCopyPairs(detritusAndOutputsIoCommands)) match { + // Use the duplicate override if exists + case Some(Success(_)) => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus) + case Some(Failure(failure)) => failAndStop(failure) + // Otherwise send the first round of IoCommands (file outputs and detritus) if any + case None if detritusAndOutputsIoCommands.nonEmpty => + detritusAndOutputsIoCommands foreach sendIoCommand + + // Add potential additional commands to the list + val additionalCommands = additionalIoCommands(sourceCallRootPath, simpletons, destinationCallOutputs, jobDetritus, destinationDetritus) + val allCommands = List(detritusAndOutputsIoCommands) ++ additionalCommands + + goto(WaitingForIoResponses) using Option(StandardCacheHitCopyingActorData(allCommands, destinationCallOutputs, destinationDetritus, returnCode)) + case _ => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus) + } + + case Failure(failure) => failAndStop(failure) } case Failure(failure) => failAndStop(failure) } } - when(WaitingForCopyResponses) { - case Event(IoSuccess(copyCommand: IoCopyCommand, _), Some(data)) => - val newData = data.remove(copyCommand) - if (newData.copyCommandsToWaitFor.isEmpty) succeedAndStop(data.returnCode, data.copiedJobOutputs, data.copiedDetritus) - else stay() using Option(newData) - case Event(IoFailure(copyCommand: IoCopyCommand, failure), _) => - failAndStop(failure) + when(WaitingForIoResponses) { + case Event(IoSuccess(command: IoCommand[_], _), Some(data)) => + val (newData, commandState) = data.commandComplete(command) + + commandState match { + case StillWaiting => stay() using Option(newData) + case AllCommandsDone => succeedAndStop(newData.returnCode, newData.newJobOutputs, newData.newDetritus) + case NextSubSet(commands) => + commands foreach sendIoCommand + stay() using Option(newData) + } + case Event(IoFailure(command: IoCommand[_], failure), Some(data)) => + // any failure is fatal + context.parent ! JobFailedNonRetryableResponse(jobDescriptor.key, failure, None) + + val (newData, commandState) = data.commandComplete(command) + + commandState match { + // If we're still waiting for some responses, go to failed state + case StillWaiting => goto(FailedState) using Option(newData) + // Otherwise we're done + case _ => + context stop self + stay() + } + // Should not be possible + case Event(IoFailure(_: IoCommand[_], failure), None) => failAndStop(failure) + } + + when(FailedState) { + // At this point success or failure doesn't matter, we've already failed this hit + case Event(response: IoAck[_], Some(data)) => + val (newData, commandState) = data.commandComplete(response.command) + commandState match { + // If we're still waiting for some responses, stay + case StillWaiting => stay() using Option(newData) + // Otherwise we're done + case _ => + context stop self + stay() + } } whenUnhandled { @@ -154,57 +236,79 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit stay() } - private def lookupSourceCallRootPath(sourceJobDetritusFiles: Map[String, String]): Path = { - sourceJobDetritusFiles.get(JobPaths.CallRootPathKey).map(getPath).get recover { - case failure => - throw new RuntimeException(s"${JobPaths.CallRootPathKey} wasn't found for call ${jobDescriptor.call.fullyQualifiedName}", failure) - } get + protected def lookupSourceCallRootPath(sourceJobDetritusFiles: Map[String, String]): Try[Path] = { + sourceJobDetritusFiles.get(JobPaths.CallRootPathKey) match { + case Some(source) => getPath(source) + case None => Failure(new RuntimeException(s"${JobPaths.CallRootPathKey} wasn't found for call ${jobDescriptor.call.fullyQualifiedName}")) + } + } + + private def ioCommandsToCopyPairs(commands: Set[IoCommand[_]]): Set[PathPair] = commands collect { + case copyCommand: IoCopyCommand => copyCommand.source -> copyCommand.destination } /** * Returns a pair of the list of simpletons with copied paths, and copy commands necessary to perform those copies. */ - private def processSimpletons(wdlValueSimpletons: Seq[WdlValueSimpleton], sourceCallRootPath: Path): Try[(CallOutputs, Set[PathPair])] = Try { - val (destinationSimpletons, ioCommands): (List[WdlValueSimpleton], Set[PathPair]) = wdlValueSimpletons.toList.foldMap({ + protected def processSimpletons(wdlValueSimpletons: Seq[WdlValueSimpleton], sourceCallRootPath: Path): Try[(CallOutputs, Set[IoCommand[_]])] = Try { + val (destinationSimpletons, ioCommands): (List[WdlValueSimpleton], Set[IoCommand[_]]) = wdlValueSimpletons.toList.foldMap({ case WdlValueSimpleton(key, wdlFile: WdlFile) => val sourcePath = getPath(wdlFile.value).get val destinationPath = PathCopier.getDestinationFilePath(sourceCallRootPath, sourcePath, destinationCallRootPath) val destinationSimpleton = WdlValueSimpleton(key, WdlFile(destinationPath.pathAsString)) - List(destinationSimpleton) -> Set(sourcePath -> destinationPath) - case nonFileSimpleton => (List(nonFileSimpleton), Set.empty[PathPair]) + List(destinationSimpleton) -> Set(copyCommand(sourcePath, destinationPath, overwrite = true)) + case nonFileSimpleton => (List(nonFileSimpleton), Set.empty[IoCommand[_]]) }) (WdlValueBuilder.toJobOutputs(jobDescriptor.call.task.outputs, destinationSimpletons), ioCommands) } /** - * Returns a pair of the detritus with copied paths, and copy commands necessary to perform those copies. + * Returns the file (and ONLY the file detritus) intersection between the cache hit and this call. */ - private def processDetritus(sourceJobDetritusFiles: Map[String, String]): Try[(Map[String, Path], Set[PathPair])] = Try { + protected final def detritusFileKeys(sourceJobDetritusFiles: Map[String, String]) = { val sourceKeys = sourceJobDetritusFiles.keySet val destinationKeys = destinationJobDetritusPaths.keySet - val fileKeys = sourceKeys.intersect(destinationKeys).filterNot(_ == JobPaths.CallRootPathKey) + sourceKeys.intersect(destinationKeys).filterNot(_ == JobPaths.CallRootPathKey) + } + + /** + * Returns a pair of the detritus with copied paths, and copy commands necessary to perform those copies. + */ + protected def processDetritus(sourceJobDetritusFiles: Map[String, String]): Try[(Map[String, Path], Set[IoCommand[_]])] = Try { + val fileKeys = detritusFileKeys(sourceJobDetritusFiles) - val zero = (Map.empty[String, Path], Set.empty[PathPair]) + val zero = (Map.empty[String, Path], Set.empty[IoCommand[_]]) val (destinationDetritus, ioCommands) = fileKeys.foldLeft(zero)({ case ((detrituses, commands), detritus) => val sourcePath = getPath(sourceJobDetritusFiles(detritus)).get val destinationPath = destinationJobDetritusPaths(detritus) - + val newDetrituses = detrituses + (detritus -> destinationPath) - - (newDetrituses, commands + ((sourcePath, destinationPath))) + + (newDetrituses, commands + copyCommand(sourcePath, destinationPath, overwrite = true)) }) - + (destinationDetritus + (JobPaths.CallRootPathKey -> destinationCallRootPath), ioCommands) } + /** + * Additional IoCommands that will be sent after (and only after) output and detritus commands complete successfully. + * See StandardCacheHitCopyingActorData + */ + protected def additionalIoCommands(sourceCallRootPath: Path, + originalSimpletons: Seq[WdlValueSimpleton], + newOutputs: CallOutputs, + originalDetritus: Map[String, String], + newDetritus: Map[String, Path]): List[Set[IoCommand[_]]] = List.empty + override protected def onTimeout(message: Any, to: ActorRef): Unit = { val exceptionMessage = message match { case copyCommand: IoCopyCommand => s"The Cache hit copying actor timed out waiting for a response to copy ${copyCommand.source.pathAsString} to ${copyCommand.destination.pathAsString}" + case touchCommand: IoTouchCommand => s"The Cache hit copying actor timed out waiting for a response to touch ${touchCommand.file.pathAsString}" case other => s"The Cache hit copying actor timed out waiting for an unknown I/O operation: $other" } diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala index 53a05ae8d..bcb08d333 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala @@ -9,7 +9,7 @@ import cromwell.core.JobKey import cromwell.core.callcaching._ import cromwell.core.io._ import cromwell.core.logging.JobLogging -import wdl4s.values.WdlFile +import wdl4s.wdl.values.WdlFile import scala.util.{Failure, Success, Try} @@ -72,11 +72,11 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor } // Hash Success - case (fileHashRequest: SingleFileHashRequest, response @ IoSuccess(_, result: String)) => + case (fileHashRequest: SingleFileHashRequest, IoSuccess(_, result: String)) => context.parent ! FileHashResponse(HashResult(fileHashRequest.hashKey, HashValue(result))) // Hash Failure - case (fileHashRequest: SingleFileHashRequest, response @ IoFailure(_, failure: Throwable)) => + case (fileHashRequest: SingleFileHashRequest, IoFailure(_, failure: Throwable)) => context.parent ! HashingFailedMessage(fileHashRequest.file.value, failure) case other => diff --git a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala index 0639b4f42..ed2bcfad0 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala @@ -1,6 +1,6 @@ package cromwell.backend.validation -import wdl4s.types.{WdlArrayType, WdlBooleanType, WdlIntegerType, WdlType} +import wdl4s.wdl.types._ object ContinueOnReturnCode { val validWdlTypes = Set[WdlType](WdlArrayType(WdlIntegerType), WdlBooleanType, WdlIntegerType) diff --git a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala index 03e9d5b1a..27ae8170e 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala @@ -7,8 +7,8 @@ import cats.syntax.validated._ import com.typesafe.config.Config import cromwell.backend.validation.RuntimeAttributesValidation._ import lenthall.validation.ErrorOr._ -import wdl4s.types.{WdlArrayType, WdlIntegerType, WdlStringType, WdlType} -import wdl4s.values.{WdlArray, WdlBoolean, WdlInteger, WdlString, WdlValue} +import wdl4s.wdl.types._ +import wdl4s.wdl.values._ import scala.util.Try diff --git a/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala b/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala index 81cce8d0d..60950ed00 100644 --- a/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/CpuValidation.scala @@ -3,8 +3,8 @@ package cromwell.backend.validation import cats.syntax.validated._ import com.typesafe.config.Config import lenthall.validation.ErrorOr.ErrorOr -import wdl4s.types.WdlIntegerType -import wdl4s.values.{WdlInteger, WdlValue} +import wdl4s.wdl.types.WdlIntegerType +import wdl4s.wdl.values.{WdlInteger, WdlValue} /** * Validates the "cpu" runtime attribute an Integer greater than 0, returning the value as an `Int`. @@ -18,6 +18,7 @@ import wdl4s.values.{WdlInteger, WdlValue} */ object CpuValidation { lazy val instance: RuntimeAttributesValidation[Int] = new CpuValidation + lazy val optional: OptionalRuntimeAttributesValidation[Int] = instance.optional lazy val default: WdlValue = WdlInteger(1) def configDefaultWdlValue(config: Option[Config]): Option[WdlValue] = instance.configDefaultWdlValue(config) } diff --git a/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala b/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala index f793d1397..788f7a7e6 100644 --- a/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala @@ -2,7 +2,7 @@ package cromwell.backend.validation import cats.syntax.validated._ import lenthall.validation.ErrorOr.ErrorOr -import wdl4s.values.{WdlString, WdlValue} +import wdl4s.wdl.values.{WdlString, WdlValue} /** * Validates the "docker" runtime attribute as a String, returning it as `String`. diff --git a/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala b/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala index c57aaa246..bf4f024c6 100644 --- a/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala @@ -1,7 +1,7 @@ package cromwell.backend.validation import com.typesafe.config.Config -import wdl4s.values.{WdlBoolean, WdlValue} +import wdl4s.wdl.values.{WdlBoolean, WdlValue} /** * Validates the "failOnStderr" runtime attribute as a Boolean or a String 'true' or 'false', returning the value as a diff --git a/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala b/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala index bc0fb32da..c2c97343a 100644 --- a/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala @@ -5,8 +5,8 @@ import com.typesafe.config.Config import cromwell.backend.MemorySize import lenthall.validation.ErrorOr._ import wdl4s.parser.MemoryUnit -import wdl4s.types.{WdlIntegerType, WdlStringType} -import wdl4s.values.{WdlInteger, WdlString, WdlValue} +import wdl4s.wdl.types.{WdlIntegerType, WdlStringType} +import wdl4s.wdl.values.{WdlInteger, WdlString, WdlValue} import scala.util.{Failure, Success} @@ -39,31 +39,31 @@ object MemoryValidation { } private[validation] val wrongAmountFormat = - s"Expecting ${RuntimeAttributesKeys.MemoryKey} runtime attribute value greater than 0 but got %s" + "Expecting %s runtime attribute value greater than 0 but got %s" private[validation] val wrongTypeFormat = - s"Expecting ${RuntimeAttributesKeys.MemoryKey} runtime attribute to be an Integer or String with format '8 GB'." + - s" Exception: %s" + "Expecting %s runtime attribute to be an Integer or String with format '8 GB'." + + " Exception: %s" - private[validation] def validateMemoryString(wdlString: WdlString): ErrorOr[MemorySize] = - validateMemoryString(wdlString.value) + private[validation] def validateMemoryString(attributeName: String, wdlString: WdlString): ErrorOr[MemorySize] = + validateMemoryString(attributeName, wdlString.value) - private[validation] def validateMemoryString(value: String): ErrorOr[MemorySize] = { + private[validation] def validateMemoryString(attributeName: String, value: String): ErrorOr[MemorySize] = { MemorySize.parse(value) match { case scala.util.Success(memorySize: MemorySize) if memorySize.amount > 0 => memorySize.to(MemoryUnit.GB).validNel case scala.util.Success(memorySize: MemorySize) => - wrongAmountFormat.format(memorySize.amount).invalidNel + wrongAmountFormat.format(attributeName, memorySize.amount).invalidNel case scala.util.Failure(throwable) => - wrongTypeFormat.format(throwable.getMessage).invalidNel + wrongTypeFormat.format(attributeName, throwable.getMessage).invalidNel } } - private[validation] def validateMemoryInteger(wdlInteger: WdlInteger): ErrorOr[MemorySize] = - validateMemoryInteger(wdlInteger.value) + private[validation] def validateMemoryInteger(attributeName: String, wdlInteger: WdlInteger): ErrorOr[MemorySize] = + validateMemoryInteger(attributeName, wdlInteger.value) - private[validation] def validateMemoryInteger(value: Int): ErrorOr[MemorySize] = { + private[validation] def validateMemoryInteger(attributeName: String, value: Int): ErrorOr[MemorySize] = { if (value <= 0) - wrongAmountFormat.format(value).invalidNel + wrongAmountFormat.format(attributeName, value).invalidNel else MemorySize(value.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB).validNel } @@ -78,9 +78,9 @@ class MemoryValidation(attributeName: String = RuntimeAttributesKeys.MemoryKey) override def coercion = Seq(WdlIntegerType, WdlStringType) override protected def validateValue: PartialFunction[WdlValue, ErrorOr[MemorySize]] = { - case WdlInteger(value) => MemoryValidation.validateMemoryInteger(value) - case WdlString(value) => MemoryValidation.validateMemoryString(value) + case WdlInteger(value) => MemoryValidation.validateMemoryInteger(key, value) + case WdlString(value) => MemoryValidation.validateMemoryString(key, value) } - override def missingValueMessage: String = wrongTypeFormat.format("Not supported WDL type value") + override def missingValueMessage: String = wrongTypeFormat.format(key, "Not supported WDL type value") } diff --git a/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala index aa07afcdc..477f9f9c7 100644 --- a/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala @@ -2,8 +2,8 @@ package cromwell.backend.validation import cats.syntax.validated._ import lenthall.validation.ErrorOr.ErrorOr -import wdl4s.types._ -import wdl4s.values.{WdlBoolean, WdlFloat, WdlInteger, WdlPrimitive, WdlString, WdlValue} +import wdl4s.wdl.types._ +import wdl4s.wdl.values._ /** * Validates one of the wdl primitive types: Boolean, Float, Integer, or String. WdlFile is not supported. diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala index 30ffe1043..a111fe554 100644 --- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala +++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala @@ -4,8 +4,8 @@ import cats.data.ValidatedNel import cats.syntax.validated._ import cromwell.core.{EvaluatedRuntimeAttributes, OptionNotFoundException, WorkflowOptions} import lenthall.util.TryUtil -import wdl4s.types.WdlType -import wdl4s.values.WdlValue +import wdl4s.wdl.types.WdlType +import wdl4s.wdl.values.WdlValue import scala.util.{Failure, Try} diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala index 6635747a1..ffba5bbd8 100644 --- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala @@ -6,10 +6,10 @@ import com.typesafe.config.Config import cromwell.backend.{MemorySize, RuntimeAttributeDefinition} import lenthall.validation.ErrorOr._ import org.slf4j.Logger -import wdl4s.expression.PureStandardLibraryFunctions -import wdl4s.types.{WdlBooleanType, WdlIntegerType, WdlStringType, WdlType} -import wdl4s.values._ -import wdl4s.{NoLookup, WdlExpression} +import wdl4s.wdl.expression.PureStandardLibraryFunctions +import wdl4s.wdl.types.{WdlBooleanType, WdlIntegerType, WdlStringType, WdlType} +import wdl4s.wdl.values._ +import wdl4s.wdl.{NoLookup, WdlExpression} import scala.util.{Failure, Success} @@ -65,12 +65,12 @@ object RuntimeAttributesValidation { } } - def parseMemoryString(s: WdlString): ErrorOr[MemorySize] = { - MemoryValidation.validateMemoryString(s) + def parseMemoryString(k: String, s: WdlString): ErrorOr[MemorySize] = { + MemoryValidation.validateMemoryString(k, s) } - def parseMemoryInteger(i: WdlInteger): ErrorOr[MemorySize] = { - MemoryValidation.validateMemoryInteger(i) + def parseMemoryInteger(k: String, i: WdlInteger): ErrorOr[MemorySize] = { + MemoryValidation.validateMemoryInteger(k, i) } def withDefault[ValidatedType](validation: RuntimeAttributesValidation[ValidatedType], diff --git a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala index ed3981817..c125e24b8 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala @@ -6,8 +6,8 @@ import cromwell.backend.RuntimeAttributeDefinition import lenthall.exception.MessageAggregation import lenthall.validation.ErrorOr._ import org.slf4j.Logger -import wdl4s.types.WdlType -import wdl4s.values.WdlValue +import wdl4s.wdl.types.WdlType +import wdl4s.wdl.values.WdlValue final case class ValidatedRuntimeAttributes(attributes: Map[String, Any]) diff --git a/backend/src/main/scala/cromwell/backend/wdl/Command.scala b/backend/src/main/scala/cromwell/backend/wdl/Command.scala index b0e3c9294..5e0d1d424 100644 --- a/backend/src/main/scala/cromwell/backend/wdl/Command.scala +++ b/backend/src/main/scala/cromwell/backend/wdl/Command.scala @@ -1,9 +1,9 @@ package cromwell.backend.wdl import cromwell.backend.BackendJobDescriptor -import wdl4s.EvaluatedTaskInputs -import wdl4s.expression.WdlFunctions -import wdl4s.values.WdlValue +import wdl4s.wdl.EvaluatedTaskInputs +import wdl4s.wdl.expression.WdlFunctions +import wdl4s.wdl.values.WdlValue import scala.util.{Success, Try} diff --git a/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala b/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala index d41d25b19..93d3563b6 100644 --- a/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala +++ b/backend/src/main/scala/cromwell/backend/wdl/OutputEvaluator.scala @@ -2,9 +2,9 @@ package cromwell.backend.wdl import cromwell.backend.BackendJobDescriptor import cromwell.core.JobOutput -import wdl4s.LocallyQualifiedName -import wdl4s.expression.WdlStandardLibraryFunctions -import wdl4s.values.WdlValue +import wdl4s.wdl.LocallyQualifiedName +import wdl4s.wdl.expression.WdlStandardLibraryFunctions +import wdl4s.wdl.values.WdlValue import scala.util.{Success, Try} diff --git a/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala b/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala index 073bb6a91..349cefbe6 100644 --- a/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/wdl/ReadLikeFunctions.scala @@ -2,10 +2,10 @@ package cromwell.backend.wdl import cromwell.backend.MemorySize import cromwell.core.path.PathFactory -import wdl4s.expression.WdlStandardLibraryFunctions +import wdl4s.wdl.expression.WdlStandardLibraryFunctions import wdl4s.parser.MemoryUnit -import wdl4s.types.{WdlArrayType, WdlFileType, WdlObjectType, WdlStringType} -import wdl4s.values._ +import wdl4s.wdl.types._ +import wdl4s.wdl.values._ import scala.util.{Failure, Success, Try} @@ -54,7 +54,7 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions fileSize <- fileSize(fileName) _ = if (fileSize > limit) { val errorMsg = s"Use of $fileName failed because the file was too big ($fileSize bytes when only files of up to $limit bytes are permissible" - throw new FileSizeTooBig(errorMsg) + throw FileSizeTooBig(errorMsg) } } yield () @@ -119,14 +119,39 @@ trait ReadLikeFunctions extends PathFactory { this: WdlStandardLibraryFunctions override def read_boolean(params: Seq[Try[WdlValue]]): Try[WdlBoolean] = read_string(params) map { s => WdlBoolean(java.lang.Boolean.parseBoolean(s.value.trim.toLowerCase)) } + protected def size(file: WdlValue): Try[Double] = Try(buildPath(file.valueString).size.toDouble) + + /** + * Gets the size of a file. + * + * @param params First parameter must be a File or File? or coerceable to one. The second is an optional string containing the size unit (eg "MB", "GiB") + */ override def size(params: Seq[Try[WdlValue]]): Try[WdlFloat] = { + // Inner function: get the memory unit from the second (optional) parameter def toUnit(wdlValue: Try[WdlValue]) = wdlValue flatMap { unit => Try(MemoryUnit.fromSuffix(unit.valueString)) } + // Inner function: is this a file type, or an optional containing a file type? + def isOptionalOfFileType(wdlType: WdlType): Boolean = wdlType match { + case f if WdlFileType.isCoerceableFrom(f) => true + case WdlOptionalType(inner) => isOptionalOfFileType(inner) + case _ => false + } + + // Inner function: Get the file size, allowing for unpacking of optionals + def optionalSafeFileSize(value: WdlValue): Try[Double] = value match { + case f if f.isInstanceOf[WdlFile] || WdlFileType.isCoerceableFrom(f.wdlType) => size(f) + case WdlOptionalValue(_, Some(o)) => optionalSafeFileSize(o) + case WdlOptionalValue(f, None) if isOptionalOfFileType(f) => Success(0d) + case _ => Failure(new Exception(s"The 'size' method expects a 'File' or 'File?' argument but instead got ${value.wdlType.toWdlString}.")) + } + + // Inner function: get the file size and convert into the requested memory unit def fileSize(wdlValue: Try[WdlValue], convertTo: Try[MemoryUnit] = Success(MemoryUnit.Bytes)) = { for { value <- wdlValue unit <- convertTo - } yield MemorySize(buildPath(value.valueString).size.toDouble, MemoryUnit.Bytes).to(unit).amount + fileSize <- optionalSafeFileSize(value) + } yield MemorySize(fileSize, MemoryUnit.Bytes).to(unit).amount } params match { diff --git a/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala b/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala index 1c39c4d89..746b71ecd 100644 --- a/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala +++ b/backend/src/main/scala/cromwell/backend/wdl/WdlFileMapper.scala @@ -1,7 +1,7 @@ package cromwell.backend.wdl import lenthall.util.TryUtil -import wdl4s.values.{WdlArray, WdlFile, WdlMap, WdlOptionalValue, WdlPair, WdlValue} +import wdl4s.wdl.values._ import scala.util.{Success, Try} diff --git a/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala b/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala index b8568f840..28ff91145 100644 --- a/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/wdl/WriteFunctions.scala @@ -1,10 +1,10 @@ package cromwell.backend.wdl import cromwell.core.path.Path -import wdl4s.TsvSerializable -import wdl4s.expression.WdlStandardLibraryFunctions -import wdl4s.types._ -import wdl4s.values._ +import wdl4s.wdl.TsvSerializable +import wdl4s.wdl.expression.WdlStandardLibraryFunctions +import wdl4s.wdl.types._ +import wdl4s.wdl.values._ import scala.util.{Failure, Try} diff --git a/backend/src/test/scala/cromwell/backend/BackendSpec.scala b/backend/src/test/scala/cromwell/backend/BackendSpec.scala index bc0f36a75..ec7d92949 100644 --- a/backend/src/test/scala/cromwell/backend/BackendSpec.scala +++ b/backend/src/test/scala/cromwell/backend/BackendSpec.scala @@ -11,9 +11,9 @@ import org.scalatest.concurrent.ScalaFutures import org.scalatest.time.{Millis, Seconds, Span} import org.specs2.mock.Mockito import spray.json.{JsObject, JsValue} -import wdl4s._ -import wdl4s.expression.NoFunctions -import wdl4s.values.WdlValue +import wdl4s.wdl._ +import wdl4s.wdl.expression.NoFunctions +import wdl4s.wdl.values.WdlValue trait BackendSpec extends ScalaFutures with Matchers with Mockito { @@ -23,13 +23,13 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito { executeJobAndAssertOutputs(backend, workflow.expectedResponse) } - def buildWorkflowDescriptor(wdl: WdlSource, + def buildWorkflowDescriptor(workflowSource: WorkflowSource, inputs: Map[String, WdlValue] = Map.empty, options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])), runtime: String = "") = { BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(wdl.replaceAll("RUNTIME", runtime), Seq.empty[ImportResolver]).get.workflow, // Get ok, this is a test! + WdlNamespaceWithWorkflow.load(workflowSource.replaceAll("RUNTIME", runtime), Seq.empty[ImportResolver]).get.workflow, // Get ok, this is a test! inputs, options, Labels.empty @@ -58,7 +58,7 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito { BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, Map.empty) } - def jobDescriptorFromSingleCallWorkflow(wdl: WdlSource, + def jobDescriptorFromSingleCallWorkflow(wdl: WorkflowSource, options: WorkflowOptions, runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition]): BackendJobDescriptor = { val workflowDescriptor = buildWorkflowDescriptor(wdl) @@ -70,7 +70,7 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito { BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, Map.empty) } - def jobDescriptorFromSingleCallWorkflow(wdl: WdlSource, + def jobDescriptorFromSingleCallWorkflow(wdl: WorkflowSource, runtime: String, attempt: Int, options: WorkflowOptions, diff --git a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala index d26df12f6..7438282ba 100644 --- a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala @@ -7,9 +7,9 @@ import cromwell.backend.validation.{ContinueOnReturnCodeFlag, ContinueOnReturnCo import cromwell.core.{TestKitSuite, WorkflowOptions} import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.{FlatSpecLike, Matchers} -import wdl4s.types._ -import wdl4s.values.{WdlArray, WdlBoolean, WdlFloat, WdlInteger, WdlString, WdlValue} -import wdl4s.{TaskCall, WdlExpression} +import wdl4s.wdl.types._ +import wdl4s.wdl.values.{WdlArray, WdlBoolean, WdlFloat, WdlInteger, WdlString, WdlValue} +import wdl4s.wdl.{WdlTaskCall, WdlExpression} import scala.concurrent.Future import scala.util.Try @@ -183,7 +183,7 @@ class TestPredicateBackendWorkflowInitializationActor extends BackendWorkflowIni override val serviceRegistryActor: ActorRef = context.system.deadLetters - override def calls: Set[TaskCall] = throw new NotImplementedError("calls") + override def calls: Set[WdlTaskCall] = throw new NotImplementedError("calls") override protected def runtimeAttributeValidators: Map[String, (Option[WdlValue]) => Boolean] = throw new NotImplementedError("runtimeAttributeValidators") diff --git a/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala index 793680b6c..eeccc39e1 100644 --- a/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala +++ b/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala @@ -4,7 +4,7 @@ import com.typesafe.config.ConfigFactory import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptorKey, BackendSpec, TestConfig} import cromwell.core.path.DefaultPathBuilder import org.scalatest.{FlatSpec, Matchers} -import wdl4s.TaskCall +import wdl4s.wdl.WdlTaskCall class JobPathsSpec extends FlatSpec with Matchers with BackendSpec { @@ -29,7 +29,7 @@ class JobPathsSpec extends FlatSpec with Matchers with BackendSpec { "JobPaths" should "provide correct paths for a job" in { val wd = buildWorkflowDescriptor(TestWorkflows.HelloWorld) - val call: TaskCall = wd.workflow.taskCalls.head + val call: WdlTaskCall = wd.workflow.taskCalls.head val jobKey = BackendJobDescriptorKey(call, None, 1) val workflowPaths = new WorkflowPathsWithDocker(wd, backendConfig) val jobPaths = new JobPathsWithDocker(workflowPaths, jobKey) diff --git a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala index c16348da9..66b48467b 100644 --- a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala +++ b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala @@ -6,7 +6,7 @@ import cromwell.core.path.DefaultPathBuilder import cromwell.core.{JobKey, WorkflowId} import org.mockito.Mockito._ import org.scalatest.{FlatSpec, Matchers} -import wdl4s.{Call, Workflow} +import wdl4s.wdl.{WdlCall, WdlWorkflow} class WorkflowPathsSpec extends FlatSpec with Matchers with BackendSpec { @@ -29,22 +29,22 @@ class WorkflowPathsSpec extends FlatSpec with Matchers with BackendSpec { when(backendConfig.getString(any[String])).thenReturn("local-cromwell-executions") // This is the folder defined in the config as the execution root dir val rootWd = mock[BackendWorkflowDescriptor] - val rootWorkflow = mock[Workflow] + val rootWorkflow = mock[WdlWorkflow] val rootWorkflowId = WorkflowId.randomId() rootWorkflow.unqualifiedName returns "rootWorkflow" rootWd.workflow returns rootWorkflow rootWd.id returns rootWorkflowId val subWd = mock[BackendWorkflowDescriptor] - val subWorkflow = mock[Workflow] + val subWorkflow = mock[WdlWorkflow] val subWorkflowId = WorkflowId.randomId() subWorkflow.unqualifiedName returns "subWorkflow" subWd.workflow returns subWorkflow subWd.id returns subWorkflowId - val call1 = mock[Call] + val call1 = mock[WdlCall] call1.unqualifiedName returns "call1" - val call2 = mock[Call] + val call2 = mock[WdlCall] call2.unqualifiedName returns "call2" val jobKey = new JobKey { diff --git a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala index e4e18bbfb..7c78177b3 100644 --- a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.{Matchers, WordSpecLike} import org.slf4j.{Logger, LoggerFactory} import org.specs2.mock.Mockito import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsValue} -import wdl4s.values.{WdlBoolean, WdlInteger, WdlString, WdlValue} +import wdl4s.wdl.values.{WdlBoolean, WdlInteger, WdlString, WdlValue} class StandardValidatedRuntimeAttributesBuilderSpec extends WordSpecLike with Matchers with Mockito { diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala index 7420c2d89..05edd089e 100644 --- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala +++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala @@ -4,8 +4,8 @@ import cromwell.backend.validation.RuntimeAttributesDefault._ import cromwell.core.WorkflowOptions import org.scalatest.{FlatSpec, Matchers} import spray.json._ -import wdl4s.types._ -import wdl4s.values.{WdlArray, WdlBoolean, WdlInteger, WdlString} +import wdl4s.wdl.types._ +import wdl4s.wdl.values.{WdlArray, WdlBoolean, WdlInteger, WdlString} class RuntimeAttributesDefaultSpec extends FlatSpec with Matchers { diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala index 4cd60eef7..76e47ce2a 100644 --- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala +++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala @@ -5,8 +5,8 @@ import cats.syntax.validated._ import com.typesafe.config.{Config, ConfigFactory} import cromwell.backend.TestConfig import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} -import wdl4s.types.{WdlArrayType, WdlIntegerType, WdlStringType} -import wdl4s.values.{WdlArray, WdlBoolean, WdlInteger, WdlString} +import wdl4s.wdl.types.{WdlArrayType, WdlIntegerType, WdlStringType} +import wdl4s.wdl.values.{WdlArray, WdlBoolean, WdlInteger, WdlString} class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with BeforeAndAfterAll { @@ -37,7 +37,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateDocker(dockerValue, "Failed to get Docker mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Failed to get Docker mandatory key from runtime attributes") } } @@ -47,7 +47,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateDocker(dockerValue, "Failed to get Docker mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting docker runtime attribute to be a String") } } @@ -87,7 +87,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateFailOnStderr(failOnStderrValue, "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") } } @@ -146,7 +146,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") } @@ -167,7 +167,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") } } @@ -197,7 +197,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateMemory(memoryValue, "Failed to get memory mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting memory runtime attribute value greater than 0 but got -1") } } @@ -218,7 +218,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateMemory(memoryValue, "Failed to get memory mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting memory runtime attribute value greater than 0 but got 0.0") } } @@ -228,7 +228,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateMemory(memoryValue, "Failed to get memory mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: value should be of the form 'X Unit' where X is a number, e.g. 8 GB") } } @@ -238,7 +238,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateMemory(memoryValue, "Failed to get memory mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: Not supported WDL type value") } } @@ -248,7 +248,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateMemory(memoryValue, "Failed to get memory mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Failed to get memory mandatory key from runtime attributes") } } @@ -268,7 +268,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateCpu(cpuValue, "Failed to get cpu mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting cpu runtime attribute value greater than 0") } } @@ -278,7 +278,7 @@ class RuntimeAttributesValidationSpec extends WordSpecLike with Matchers with Be val result = RuntimeAttributesValidation.validateMemory(cpuValue, "Failed to get cpu mandatory key from runtime attributes".invalidNel) result match { - case Valid(x) => fail("A failure was expected.") + case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Failed to get cpu mandatory key from runtime attributes") } } diff --git a/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala b/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala index b0cc1166c..bc265b9aa 100644 --- a/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala +++ b/backend/src/test/scala/cromwell/backend/wdl/FileSizeSpec.scala @@ -8,7 +8,7 @@ import cromwell.backend.standard.{DefaultStandardExpressionFunctionsParams, Stan import cromwell.core.CallContext import cromwell.core.path.DefaultPathBuilder import org.scalatest.{FlatSpec, Matchers} -import wdl4s.values._ +import wdl4s.wdl.values._ import com.google.common.io.Files import fs2.{Task, Stream} @@ -73,7 +73,7 @@ class FileSizeSpec extends FlatSpec with Matchers { def testOver() = { testInner(n + 1, { - case Failure(s: FileSizeTooBig) => //success + case Failure(_: FileSizeTooBig) => //success case t => throw new RuntimeException(s"should not have eaten this file that is too big! msg: $t") }) } @@ -81,8 +81,8 @@ class FileSizeSpec extends FlatSpec with Matchers { def testUnder() = { testInner(n - 1, { case Success(_) => - case Failure(nfe: NumberFormatException) => //we're not testing parsing - case Failure(uoe: UnsupportedOperationException) => //we're not testing tsv compatibility + case Failure(_: NumberFormatException) => //we're not testing parsing + case Failure(_: UnsupportedOperationException) => //we're not testing tsv compatibility case Failure(t) => throw t }) } diff --git a/backend/src/test/scala/cromwell/backend/wdl/ReadLikeFunctionsSpec.scala b/backend/src/test/scala/cromwell/backend/wdl/ReadLikeFunctionsSpec.scala new file mode 100644 index 000000000..ad4716b95 --- /dev/null +++ b/backend/src/test/scala/cromwell/backend/wdl/ReadLikeFunctionsSpec.scala @@ -0,0 +1,99 @@ +package cromwell.backend.wdl + +import cromwell.core.path.PathBuilder +import org.apache.commons.lang3.NotImplementedException +import org.scalatest.{FlatSpec, Matchers} +import wdl4s.wdl.expression.PureStandardLibraryFunctionsLike +import wdl4s.wdl.types.{WdlFileType, WdlIntegerType, WdlOptionalType} +import wdl4s.wdl.values.{WdlFloat, WdlInteger, WdlOptionalValue, WdlSingleFile, WdlString, WdlValue} + +import scala.util.{Failure, Success, Try} + +class ReadLikeFunctionsSpec extends FlatSpec with Matchers { + + behavior of "ReadLikeFunctions.size" + + it should "correctly report a 2048 byte file, in bytes by default" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlSingleFile("blah")))) should be(Success(WdlFloat(2048d))) + } + + it should "correctly report a 2048 byte file, in bytes" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlSingleFile("blah")), Success(WdlString("B")))) should be(Success(WdlFloat(2048d))) + } + + it should "correctly report a 2048 byte file, in KB" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlSingleFile("blah")), Success(WdlString("KB")))) should be(Success(WdlFloat(2.048d))) + } + + it should "correctly report a 2048 byte file, in KiB" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlSingleFile("blah")), Success(WdlString("Ki")))) should be(Success(WdlFloat(2d))) + } + + it should "correctly report the size of a supplied, optional, 2048 byte file" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, Some(WdlSingleFile("blah")))))) should be(Success(WdlFloat(2048d))) + } + + it should "correctly report the size of a supplied, optional optional, 2048 byte file" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlOptionalValue(WdlOptionalType(WdlFileType), Some(WdlOptionalValue(WdlFileType, Some(WdlSingleFile("blah")))))))) should be(Success(WdlFloat(2048d))) + } + + it should "correctly report the size of a supplied, optional, 2048 byte file, in MB" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, Some(WdlSingleFile("blah")))), Success(WdlString("MB")))) should be(Success(WdlFloat(0.002048d))) + } + + it should "correctly report that an unsupplied optional file is empty" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, None)))) should be(Success(WdlFloat(0d))) + } + + it should "correctly report that an unsupplied File?? is empty" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlOptionalValue(WdlOptionalType(WdlFileType), None)))) should be(Success(WdlFloat(0d))) + } + + it should "correctly report that an unsupplied optional file is empty, even in MB" in { + val readLike = new TestReadLikeFunctions(Success(2048d)) + readLike.size(Seq(Success(WdlOptionalValue(WdlFileType, None)), Success(WdlString("MB")))) should be(Success(WdlFloat(0d))) + } + + it should "refuse to report file sizes for Ints" in { + val readLike = new TestReadLikeFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size"))) + val oops = readLike.size(Seq(Success(WdlInteger(7)))) + oops match { + case Success(x) => fail(s"Expected a string to not have a file length but instead got $x") + case Failure(e) => e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int.") + } + } + + it should "refuse to report file sizes for Int?s" in { + val readLike = new TestReadLikeFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size"))) + val oops = readLike.size(Seq(Success(WdlOptionalValue(WdlIntegerType, None)))) + oops match { + case Success(x) => fail(s"Expected a string to not have a file length but instead got $x") + case Failure(e) => e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int?.") + } + } + + it should "pass on underlying size reading errors" in { + val readLike = new TestReadLikeFunctions(Failure(new Exception("'size' inner exception, expect me to be passed on"))) + val oops = readLike.size(Seq(Success(WdlSingleFile("blah")))) + oops match { + case Success(_) => fail(s"The 'size' engine function didn't return the error generated in the inner 'size' method") + case Failure(e) => e.getMessage should be("'size' inner exception, expect me to be passed on") + } + } +} + + +class TestReadLikeFunctions(sizeResult: Try[Double]) extends PureStandardLibraryFunctionsLike with ReadLikeFunctions { + override protected def size(file: WdlValue): Try[Double] = sizeResult + override def pathBuilders: List[PathBuilder] = throw new NotImplementedException("Didn't expect ReadLikefunctionsSpec to need pathBuilders") +} + diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 9ab8cf2c2..865559f87 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -59,11 +59,33 @@ akka { # Note that without further configuration, all other actors run on the default dispatcher } + + coordinated-shutdown.phases { + abort-all-workflows { + # This phase is used to give time to Cromwell to abort all workflows upon shutdown. + # It's only used if system.abort-jobs-on-terminate = true + # This timeout can be adusted to give more or less time to Cromwell to abort workflows + timeout = 1 hour + depends-on = [service-unbind] + } + + stop-io-activity{ + # Adjust this timeout according to the maximum amount of time Cromwell + # should be allowed to spend flushing its database queues + timeout = 30 minutes + depends-on = [service-stop] + } + } } system { # If 'true', a SIGINT will trigger Cromwell to attempt to abort all currently running jobs before exiting - #abort-jobs-on-terminate = false + abort-jobs-on-terminate = false + + # If 'true', a SIGTERM or SIGINT will trigger Cromwell to attempt to gracefully shutdown in server mode, + # in particular clearing up all queued database writes before letting the JVM shut down. + # The shutdown is a multi-phase process, each phase having its own configurable timeout. See the Dev Wiki for more details. + graceful-server-shutdown = true # If 'true' then when Cromwell starts up, it tries to restart incomplete workflows workflow-restart = true @@ -82,13 +104,13 @@ system { # Default number of cache read workers number-of-cache-read-workers = 25 - + io { # Global Throttling - This is mostly useful for GCS and can be adjusted to match # the quota availble on the GCS API number-of-requests = 100000 per = 100 seconds - + # Number of times an I/O operation should be attempted before giving up and failing it. number-of-attempts = 5 } @@ -200,11 +222,11 @@ engine { # You will need to provide the engine with a gcs filesystem # Note that the default filesystem (local) is always available. filesystems { - # gcs { - # auth = "application-default" - # } + # gcs { + # auth = "application-default" + # } local { - enabled: true + enabled: true } } } @@ -242,7 +264,7 @@ backend { ${docker} ${script} """ - + # Root directory where Cromwell writes job results. This directory must be # visible and writeable by the Cromwell process as well as the jobs that Cromwell @@ -342,7 +364,7 @@ backend { # job-id-regex = "Job <(\\d+)>.*" # } #} - + #SLURM { # actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory" # config { @@ -498,12 +520,26 @@ backend { # # # Endpoint for APIs, no reason to change this unless directed by Google. # endpoint-url = "https://genomics.googleapis.com/" + # + # # Restrict access to VM metadata. Useful in cases when untrusted containers are running under a service + # # account not owned by the submitting user + # restrict-metadata-access = false # } # # filesystems { # gcs { # # A reference to a potentially different auth for manipulating files via engine functions. # auth = "application-default" + # + # caching { + # # When a cache hit is found, the following duplication strategy will be followed to use the cached outputs + # # Possible values: "copy", "reference". Defaults to "copy" + # # "copy": Copy the output files + # # "reference": DO NOT copy the output files but point to the original output files instead. + # # Will still make sure than all the original output files exist and are accessible before + # # going forward with the cache hit. + # duplication-strategy = "copy" + # } # } # } # @@ -548,10 +584,10 @@ services { # performance but will both lead to a higher memory usage as well as increase the risk that metadata events # might not have been persisted in the event of a Cromwell crash. # - # For normal usage the default value of 1 (effectively no batching) should be fine but for larger/production - # environments we recommend a value of at least 500. There'll be no one size fits all number here so we recommend - # benchmarking performance and tuning the value to match your environment - # db-batch-size = 1 + # For normal usage the default value of 200 should be fine but for larger/production environments we recommend a + # value of at least 500. There'll be no one size fits all number here so we recommend benchmarking performance and + # tuning the value to match your environment. + # db-batch-size = 200 # # Periodically the stored metadata events will be forcibly written to the DB regardless of if the batch size # has been reached. This is to prevent situations where events wind up never being written to an incomplete batch diff --git a/core/src/main/scala/cromwell/core/CallKey.scala b/core/src/main/scala/cromwell/core/CallKey.scala index 547eadabd..02a987431 100644 --- a/core/src/main/scala/cromwell/core/CallKey.scala +++ b/core/src/main/scala/cromwell/core/CallKey.scala @@ -1,7 +1,7 @@ package cromwell.core -import wdl4s.Call +import wdl4s.wdl.WdlCall trait CallKey extends JobKey { - def scope: Call + def scope: WdlCall } diff --git a/core/src/main/scala/cromwell/core/ConfigUtil.scala b/core/src/main/scala/cromwell/core/ConfigUtil.scala index 881fec686..40dae4784 100644 --- a/core/src/main/scala/cromwell/core/ConfigUtil.scala +++ b/core/src/main/scala/cromwell/core/ConfigUtil.scala @@ -7,7 +7,7 @@ import cats.syntax.validated._ import com.typesafe.config.{Config, ConfigException, ConfigValue} import org.slf4j.LoggerFactory -import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ import scala.reflect.{ClassTag, classTag} object ConfigUtil { @@ -15,7 +15,7 @@ object ConfigUtil { val validationLogger = LoggerFactory.getLogger("ConfigurationValidation") implicit class EnhancedConfig(val config: Config) extends AnyVal { - def keys = config.entrySet().toSet map { v: java.util.Map.Entry[String, ConfigValue] => v.getKey } + def keys = config.entrySet().asScala.toSet map { v: java.util.Map.Entry[String, ConfigValue] => v.getKey } /** * For keys that are in the configuration but not in the reference keySet, log a warning. @@ -37,14 +37,14 @@ object ConfigUtil { def validateString(key: String): ValidatedNel[String, String] = try { config.getString(key).validNel } catch { - case e: ConfigException.Missing => s"Could not find key: $key".invalidNel + case _: ConfigException.Missing => s"Could not find key: $key".invalidNel } def validateConfig(key: String): ValidatedNel[String, Config] = try { config.getConfig(key).validNel } catch { - case e: ConfigException.Missing => s"Could not find key: $key".invalidNel - case e: ConfigException.WrongType => s"key $key cannot be parsed to a Config".invalidNel + case _: ConfigException.Missing => s"Could not find key: $key".invalidNel + case _: ConfigException.WrongType => s"key $key cannot be parsed to a Config".invalidNel } } diff --git a/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala b/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala index 52af92e5c..79e1621ca 100644 --- a/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala +++ b/core/src/main/scala/cromwell/core/CromwellUserGuardianStrategy.scala @@ -5,7 +5,7 @@ import akka.actor.{ActorInitializationException, OneForOneStrategy, SupervisorSt class CromwellUserGuardianStrategy extends SupervisorStrategyConfigurator { override def create(): SupervisorStrategy = OneForOneStrategy() { - case aie: ActorInitializationException => Escalate + case _: ActorInitializationException => Escalate case t => akka.actor.SupervisorStrategy.defaultDecider.applyOrElse(t, (_: Any) => Escalate) } } diff --git a/core/src/main/scala/cromwell/core/ExecutionStatus.scala b/core/src/main/scala/cromwell/core/ExecutionStatus.scala index 25319404c..b4ee84f9e 100644 --- a/core/src/main/scala/cromwell/core/ExecutionStatus.scala +++ b/core/src/main/scala/cromwell/core/ExecutionStatus.scala @@ -3,7 +3,8 @@ package cromwell.core object ExecutionStatus extends Enumeration { type ExecutionStatus = Value val NotStarted, QueuedInCromwell, Starting, Running, Failed, RetryableFailure, Done, Bypassed, Aborted = Value - val TerminalStatuses = Set(Failed, Done, Aborted, RetryableFailure, Bypassed) + val TerminalStatuses = Set(Failed, Done, Aborted, Bypassed) + val TerminalOrRetryableStatuses = TerminalStatuses + RetryableFailure implicit val ExecutionStatusOrdering = Ordering.by { status: ExecutionStatus => status match { @@ -20,9 +21,9 @@ object ExecutionStatus extends Enumeration { } implicit class EnhancedExecutionStatus(val status: ExecutionStatus) extends AnyVal { - def isTerminal: Boolean = { - TerminalStatuses contains status - } + def isTerminal: Boolean = TerminalStatuses contains status + + def isTerminalOrRetryable: Boolean = TerminalOrRetryableStatuses contains status def isDoneOrBypassed: Boolean = status == Done || status == Bypassed } diff --git a/core/src/main/scala/cromwell/core/JobKey.scala b/core/src/main/scala/cromwell/core/JobKey.scala index 9fd22b31e..f230134ce 100644 --- a/core/src/main/scala/cromwell/core/JobKey.scala +++ b/core/src/main/scala/cromwell/core/JobKey.scala @@ -1,9 +1,9 @@ package cromwell.core -import wdl4s.{GraphNode, Scope} +import wdl4s.wdl.{Scope, WdlGraphNode} trait JobKey { - def scope: Scope with GraphNode + def scope: Scope with WdlGraphNode def index: Option[Int] def attempt: Int def tag: String diff --git a/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala b/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala new file mode 100644 index 000000000..53f0e4de6 --- /dev/null +++ b/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala @@ -0,0 +1,46 @@ +package cromwell.core + +import akka.actor.{Actor, ActorLogging, ActorRef, Props} +import cromwell.core.MonitoringCompanionActor._ +import cromwell.util.GracefulShutdownHelper.ShutdownCommand + +import scala.concurrent.duration._ +import scala.language.postfixOps + +object MonitoringCompanionActor { + sealed trait MonitoringCompanionCommand + private [core] case object AddWork extends MonitoringCompanionCommand + private [core] case object RemoveWork extends MonitoringCompanionCommand + private [core] def props(actorToMonitor: ActorRef) = Props(new MonitoringCompanionActor(actorToMonitor)) +} + +private [core] class MonitoringCompanionActor(actorToMonitor: ActorRef) extends Actor with ActorLogging { + private var workCount: Int = 0 + + override def receive = { + case AddWork => workCount += 1 + case RemoveWork => workCount -= 1 + case ShutdownCommand if workCount <= 0 => + context stop actorToMonitor + context stop self + case ShutdownCommand => + log.info(s"{} is still processing {} messages", actorToMonitor.path.name, workCount) + context.system.scheduler.scheduleOnce(1 second, self, ShutdownCommand)(context.dispatcher) + () + } +} + +trait MonitoringCompanionHelper { this: Actor => + private val monitoringActor = context.actorOf(MonitoringCompanionActor.props(self)) + private var shuttingDown: Boolean = false + + def addWork() = monitoringActor ! AddWork + def removeWork() = monitoringActor ! RemoveWork + + val monitoringReceive: Receive = { + case ShutdownCommand if !shuttingDown => + shuttingDown = true + monitoringActor ! ShutdownCommand + case ShutdownCommand => // Ignore if we're already shutting down + } +} diff --git a/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala b/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala index 1a7843e6d..00afb2b61 100644 --- a/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala +++ b/core/src/main/scala/cromwell/core/WorkflowMetadataKeys.scala @@ -20,5 +20,9 @@ object WorkflowMetadataKeys { val SubmissionSection_Inputs = "inputs" val SubmissionSection_Options = "options" val SubmissionSection_Imports = "imports" + val SubmissionSection_WorkflowType = "workflowType" + val SubmissionSection_Labels = "labels" + val SubmissionSection_WorkflowTypeVersion = "workflowTypeVersion" + val Labels = "labels" } diff --git a/core/src/main/scala/cromwell/core/WorkflowOptions.scala b/core/src/main/scala/cromwell/core/WorkflowOptions.scala index 709e667e1..701ea5428 100644 --- a/core/src/main/scala/cromwell/core/WorkflowOptions.scala +++ b/core/src/main/scala/cromwell/core/WorkflowOptions.scala @@ -152,7 +152,7 @@ case class WorkflowOptions(jsObject: JsObject) { } lazy val defaultRuntimeOptions = jsObject.fields.get(defaultRuntimeOptionKey) match { - case Some(jsObj: JsObject) => TryUtil.sequenceMap(jsObj.fields map { case (k, v) => k -> WorkflowOptions.getAsJson(k, jsObj) }) + case Some(jsObj: JsObject) => TryUtil.sequenceMap(jsObj.fields map { case (k, _) => k -> WorkflowOptions.getAsJson(k, jsObj) }) case Some(jsVal) => Failure(new IllegalArgumentException(s"Unsupported JsValue for $defaultRuntimeOptionKey: $jsVal. Expected a JSON object.")) case None => Failure(OptionNotFoundException(s"Cannot find definition for default runtime attributes")) } diff --git a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala index 55fa68b47..281cdf653 100644 --- a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala +++ b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala @@ -1,59 +1,58 @@ package cromwell.core -import wdl4s.{WdlJson, WdlSource} +import wdl4s.wdl.{WorkflowJson, WorkflowSource} /** * Represents the collection of source files that a user submits to run a workflow */ sealed trait WorkflowSourceFilesCollection { - def wdlSource: WdlSource - def inputsJson: WdlJson + def workflowSource: WorkflowSource + def inputsJson: WorkflowJson def workflowOptionsJson: WorkflowOptionsJson - def labelsJson: WdlJson - + def labelsJson: WorkflowJson + def workflowType: Option[WorkflowType] + def workflowTypeVersion: Option[WorkflowTypeVersion] def importsZipFileOption: Option[Array[Byte]] = this match { case _: WorkflowSourceFilesWithoutImports => None - case WorkflowSourceFilesWithDependenciesZip(_, _, _, _, importsZip) => Option(importsZip) // i.e. Some(importsZip) if our wiring is correct + case w: WorkflowSourceFilesWithDependenciesZip => Option(w.importsZip) // i.e. Some(importsZip) if our wiring is correct } def copyOptions(workflowOptions: WorkflowOptionsJson) = this match { - case w: WorkflowSourceFilesWithoutImports => WorkflowSourceFilesWithoutImports( - wdlSource = w.wdlSource, - inputsJson = w.inputsJson, - workflowOptionsJson = workflowOptions, - labelsJson = w.labelsJson) - - case w: WorkflowSourceFilesWithDependenciesZip => WorkflowSourceFilesWithDependenciesZip( - wdlSource = w.wdlSource, - inputsJson = w.inputsJson, - workflowOptionsJson = workflowOptions, - labelsJson = w.labelsJson, - importsZip = w.importsZip) + case w: WorkflowSourceFilesWithoutImports => w.copy(workflowOptionsJson = workflowOptions) + case w: WorkflowSourceFilesWithDependenciesZip => w.copy(workflowOptionsJson = workflowOptions) } } object WorkflowSourceFilesCollection { - def apply(wdlSource: WdlSource, - inputsJson: WdlJson, + def apply(workflowSource: WorkflowSource, + workflowType: Option[WorkflowType], + workflowTypeVersion: Option[WorkflowTypeVersion], + inputsJson: WorkflowJson, workflowOptionsJson: WorkflowOptionsJson, - labelsJson: WdlJson, + labelsJson: WorkflowJson, importsFile: Option[Array[Byte]]): WorkflowSourceFilesCollection = importsFile match { - case Some(imports) => WorkflowSourceFilesWithDependenciesZip(wdlSource, inputsJson, workflowOptionsJson, labelsJson, imports) - case None => WorkflowSourceFilesWithoutImports(wdlSource, inputsJson, workflowOptionsJson, labelsJson) + case Some(imports) => + WorkflowSourceFilesWithDependenciesZip(workflowSource, workflowType, workflowTypeVersion, inputsJson, workflowOptionsJson, labelsJson, imports) + case None => + WorkflowSourceFilesWithoutImports(workflowSource, workflowType, workflowTypeVersion, inputsJson, workflowOptionsJson, labelsJson) } } -final case class WorkflowSourceFilesWithoutImports(wdlSource: WdlSource, - inputsJson: WdlJson, +final case class WorkflowSourceFilesWithoutImports(workflowSource: WorkflowSource, + workflowType: Option[WorkflowType], + workflowTypeVersion: Option[WorkflowTypeVersion], + inputsJson: WorkflowJson, workflowOptionsJson: WorkflowOptionsJson, - labelsJson: WdlJson) extends WorkflowSourceFilesCollection + labelsJson: WorkflowJson) extends WorkflowSourceFilesCollection -final case class WorkflowSourceFilesWithDependenciesZip(wdlSource: WdlSource, - inputsJson: WdlJson, +final case class WorkflowSourceFilesWithDependenciesZip(workflowSource: WorkflowSource, + workflowType: Option[WorkflowType], + workflowTypeVersion: Option[WorkflowTypeVersion], + inputsJson: WorkflowJson, workflowOptionsJson: WorkflowOptionsJson, - labelsJson: WdlJson, + labelsJson: WorkflowJson, importsZip: Array[Byte]) extends WorkflowSourceFilesCollection { - override def toString = s"WorkflowSourceFilesWithDependenciesZip($wdlSource, $inputsJson, $workflowOptionsJson, $labelsJson, <>)" + override def toString = s"WorkflowSourceFilesWithDependenciesZip($workflowSource, $inputsJson, $workflowOptionsJson, $labelsJson, <>)" } diff --git a/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala b/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala index 75be80654..db5be437b 100644 --- a/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala +++ b/core/src/main/scala/cromwell/core/actor/BatchingDbWriter.scala @@ -1,7 +1,9 @@ package cromwell.core.actor -import akka.actor.ActorRef +import akka.actor.{ActorRef, Cancellable, FSM} import cats.data.NonEmptyVector +import cromwell.core.actor.BatchingDbWriter._ +import cromwell.util.GracefulShutdownHelper.ShutdownCommand import org.slf4j.LoggerFactory import scala.util.{Failure, Success, Try} @@ -52,3 +54,53 @@ object BatchingDbWriter { case class CommandAndReplyTo[C](command: C, replyTo: ActorRef) } + +/** + * Trait that contains some common batch-related and graceful shutdown logic. + * Be careful NOT to add a custom whenUnhandled state function when mixing in this trait as it will override the + * graceful shutdown handling logic. + * + * Note that there is more common logic that could be abstracted here. + */ +trait BatchingDbWriterActor { this: FSM[BatchingDbWriterState, BatchingDbWriterData] => + import scala.concurrent.duration._ + + private var shuttingDown: Boolean = false + + def isShuttingDown: Boolean = shuttingDown + def dbFlushRate: FiniteDuration + var periodicFlush: Option[Cancellable] = None + + override def preStart(): Unit = { + periodicFlush = Option(context.system.scheduler.schedule(0.seconds, dbFlushRate, self, ScheduledFlushToDb)(context.dispatcher)) + } + + /** + * WhenUnhandled state function that handles reception of ShutdownCommand and acts appropriately + */ + private val whenUnhandledFunction: StateFunction = { + case Event(ShutdownCommand, NoData) if stateName == WaitingToWrite => + periodicFlush foreach { _.cancel() } + context stop self + stay() + case Event(ShutdownCommand, _) if stateName == WaitingToWrite => + logger.info("{} flushing database writes...", self.path.name) + shuttingDown = true + // transitioning to WritingToDb triggers a FlushBatchToDb to be sent to self + goto(WritingToDb) + case Event(ShutdownCommand, _) if stateName == WritingToDb => + logger.info("{} waiting for database writes to be flushed...", self.path.name) + shuttingDown = true + stay() + } + + whenUnhandled(whenUnhandledFunction) + + onTransition { + case WaitingToWrite -> WritingToDb => self ! FlushBatchToDb + // When transitioning back to WaitingToWrite, if there's no data left to process, and we're trying to shutdown, then stop + case _ -> WaitingToWrite if shuttingDown && nextStateData == NoData => + periodicFlush foreach { _.cancel() } + context stop self + } +} diff --git a/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala b/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala index 1e87ea59b..cb4dd6e67 100644 --- a/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala +++ b/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala @@ -7,6 +7,7 @@ import akka.stream.QueueOfferResult.{Dropped, Enqueued, QueueClosed} import akka.stream.scaladsl.{Sink, Source, SourceQueueWithComplete} import cromwell.core.actor.StreamActorHelper.{ActorRestartException, StreamCompleted, StreamFailed} import cromwell.core.actor.StreamIntegration._ +import cromwell.util.GracefulShutdownHelper.ShutdownCommand import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} @@ -46,10 +47,8 @@ trait StreamActorHelper[T <: StreamContext] { this: Actor with ActorLogging => override def preStart(): Unit = { stream.watchCompletion() onComplete { - case Success(_) => - self ! StreamCompleted - case Failure(failure) => - self ! StreamFailed(failure) + case Success(_) => self ! StreamCompleted + case Failure(failure) => self ! StreamFailed(failure) } } @@ -71,17 +70,17 @@ trait StreamActorHelper[T <: StreamContext] { this: Actor with ActorLogging => } private def streamReceive: Receive = { - case EnqueueResponse(Enqueued, commandContext: T @unchecked) => // Good ! + case ShutdownCommand => stream.complete() + case EnqueueResponse(Enqueued, _: T @unchecked) => // Good ! case EnqueueResponse(Dropped, commandContext) => backpressure(commandContext) // In any of the cases below, the stream is in a failed state, which will be caught by the watchCompletion hook and the // actor will be restarted case EnqueueResponse(QueueClosed, commandContext) => backpressure(commandContext) - case EnqueueResponse(QueueOfferResult.Failure(failure), commandContext) => backpressure(commandContext) - case FailedToEnqueue(throwable, commandContext) => backpressure(commandContext) + case EnqueueResponse(QueueOfferResult.Failure(_), commandContext) => backpressure(commandContext) + case FailedToEnqueue(_, commandContext) => backpressure(commandContext) - // Those 2 cases should never happen, as long as the strategy is Resume, but in case it does... - case StreamCompleted => restart(new IllegalStateException("Stream was completed unexpectedly")) + case StreamCompleted => context stop self case StreamFailed(failure) => restart(failure) } diff --git a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala index 0867d916f..9233061cd 100644 --- a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala +++ b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala @@ -1,6 +1,14 @@ package cromwell.core.callcaching -case class HashKey(key: String, checkForHitOrMiss: Boolean = true) + +object HashKey { + def apply(keyComponents: String*) = new HashKey(true, keyComponents.toList) + def apply(checkForHitOrMiss: Boolean, keyComponents: String*) = new HashKey(checkForHitOrMiss, keyComponents.toList) +} + +case class HashKey(checkForHitOrMiss: Boolean, keyComponents: List[String]) { + val key = keyComponents.mkString(": ") +} case class HashValue(value: String) case class HashResult(hashKey: HashKey, hashValue: HashValue) diff --git a/core/src/main/scala/cromwell/core/core.scala b/core/src/main/scala/cromwell/core/core.scala index 8cfe7c2f1..7a5831bbe 100644 --- a/core/src/main/scala/cromwell/core/core.scala +++ b/core/src/main/scala/cromwell/core/core.scala @@ -2,7 +2,7 @@ package cromwell.core import cromwell.core.path.Path import lenthall.exception.ThrowableAggregation -import wdl4s.values.WdlValue +import wdl4s.wdl.values.WdlValue case class CallContext(root: Path, stdout: String, stderr: String) diff --git a/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala b/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala new file mode 100644 index 000000000..a05961b4a --- /dev/null +++ b/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala @@ -0,0 +1,25 @@ +package cromwell.core.io + +import better.files.File.OpenOptions +import cromwell.core.path.Path + +object DefaultIoCommand { + case class DefaultIoCopyCommand(override val source: Path, + override val destination: Path, + override val overwrite: Boolean) extends IoCopyCommand( + source, destination, overwrite + ) + case class DefaultIoContentAsStringCommand(override val file: Path) extends IoContentAsStringCommand(file) + case class DefaultIoSizeCommand(override val file: Path) extends IoSizeCommand(file) + case class DefaultIoWriteCommand(override val file: Path, + override val content: String, + override val openOptions: OpenOptions) extends IoWriteCommand( + file, content, openOptions + ) + case class DefaultIoDeleteCommand(override val file: Path, + override val swallowIOExceptions: Boolean) extends IoDeleteCommand( + file, swallowIOExceptions + ) + case class DefaultIoHashCommand(override val file: Path) extends IoHashCommand(file) + case class DefaultIoTouchCommand(override val file: Path) extends IoTouchCommand(file) +} diff --git a/core/src/main/scala/cromwell/core/io/IoClientHelper.scala b/core/src/main/scala/cromwell/core/io/IoClientHelper.scala index 01cdabf2a..b6f13b50e 100644 --- a/core/src/main/scala/cromwell/core/io/IoClientHelper.scala +++ b/core/src/main/scala/cromwell/core/io/IoClientHelper.scala @@ -19,7 +19,11 @@ trait IoClientHelper extends RobustClientHelper { this: Actor with ActorLogging def ioReceive = robustReceive orElse ioResponseReceive - def sendIoCommand(ioCommand: IoCommand[_], timeout: FiniteDuration = RobustClientHelper.DefaultRequestLostTimeout) = { + def sendIoCommand(ioCommand: IoCommand[_]) = { + sendIoCommandWithCustomTimeout(ioCommand, RobustClientHelper.DefaultRequestLostTimeout) + } + + def sendIoCommandWithCustomTimeout(ioCommand: IoCommand[_], timeout: FiniteDuration) = { robustSend(ioCommand, ioActor, timeout) } diff --git a/core/src/main/scala/cromwell/core/io/IoCommand.scala b/core/src/main/scala/cromwell/core/io/IoCommand.scala index 6244bb79a..fcc2d72a3 100644 --- a/core/src/main/scala/cromwell/core/io/IoCommand.scala +++ b/core/src/main/scala/cromwell/core/io/IoCommand.scala @@ -39,21 +39,21 @@ trait IoCommand[+T] { * Copy source -> destination * Will create the destination directory if it doesn't exist. */ -class IoCopyCommand(val source: Path, val destination: Path, val overwrite: Boolean) extends IoCommand[Unit] { +abstract class IoCopyCommand(val source: Path, val destination: Path, val overwrite: Boolean) extends IoCommand[Unit] { override def toString = s"copy ${source.pathAsString} to ${destination.pathAsString} with overwrite = $overwrite" } /** * Read file as a string (load the entire content in memory) */ -class IoContentAsStringCommand(val file: Path) extends IoCommand[String] { +abstract class IoContentAsStringCommand(val file: Path) extends IoCommand[String] { override def toString = s"read content of ${file.pathAsString}" } /** * Return the size of file */ -class IoSizeCommand(val file: Path) extends IoCommand[Long] { +abstract class IoSizeCommand(val file: Path) extends IoCommand[Long] { override def toString = s"get size of ${file.pathAsString}" } @@ -61,20 +61,27 @@ class IoSizeCommand(val file: Path) extends IoCommand[Long] { * Write content in file * Will create the destination directory if it doesn't exist. */ -class IoWriteCommand(val file: Path, val content: String, val openOptions: OpenOptions) extends IoCommand[Unit] { +abstract class IoWriteCommand(val file: Path, val content: String, val openOptions: OpenOptions) extends IoCommand[Unit] { override def toString = s"write to ${file.pathAsString}" } /** * Delete file */ -class IoDeleteCommand(val file: Path, val swallowIOExceptions: Boolean) extends IoCommand[Unit] { +abstract class IoDeleteCommand(val file: Path, val swallowIOExceptions: Boolean) extends IoCommand[Unit] { override def toString = s"delete ${file.pathAsString}" } /** * Get Hash value for file */ -class IoHashCommand(val file: Path) extends IoCommand[String] { +abstract class IoHashCommand(val file: Path) extends IoCommand[String] { override def toString = s"get hash of ${file.pathAsString}" } + +/** + * Touch a file + */ +abstract class IoTouchCommand(val file: Path) extends IoCommand[Unit] { + override def toString = s"touch ${file.pathAsString}" +} diff --git a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala index d79e67438..ad26449b4 100644 --- a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala +++ b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala @@ -1,5 +1,6 @@ package cromwell.core.io +import cromwell.core.io.DefaultIoCommand._ import cromwell.core.path.BetterFileMethods.OpenOptions import cromwell.core.path.Path @@ -10,13 +11,15 @@ trait IoCommandBuilder { def deleteCommand(path: Path, swallowIoExceptions: Boolean): IoDeleteCommand def copyCommand(src: Path, dest: Path, overwrite: Boolean): IoCopyCommand def hashCommand(file: Path): IoHashCommand + def touchCommand(file: Path): IoTouchCommand } trait DefaultIoCommandBuilder extends IoCommandBuilder { - def contentAsStringCommand(path: Path) = new IoContentAsStringCommand(path) - def writeCommand(path: Path, content: String, options: OpenOptions) = new IoWriteCommand(path, content, options) - def sizeCommand(path: Path) = new IoSizeCommand(path) - def deleteCommand(path: Path, swallowIoExceptions: Boolean) = new IoDeleteCommand(path, swallowIoExceptions) - def copyCommand(src: Path, dest: Path, overwrite: Boolean) = new IoCopyCommand(src, dest, overwrite) - def hashCommand(file: Path) = new IoHashCommand(file) + def contentAsStringCommand(path: Path): IoContentAsStringCommand = DefaultIoContentAsStringCommand(path) + def writeCommand(path: Path, content: String, options: OpenOptions): IoWriteCommand = DefaultIoWriteCommand(path, content, options) + def sizeCommand(path: Path): IoSizeCommand = DefaultIoSizeCommand(path) + def deleteCommand(path: Path, swallowIoExceptions: Boolean): IoDeleteCommand = DefaultIoDeleteCommand(path, swallowIoExceptions) + def copyCommand(src: Path, dest: Path, overwrite: Boolean): IoCopyCommand = DefaultIoCopyCommand(src, dest, overwrite) + def hashCommand(file: Path): IoHashCommand = DefaultIoHashCommand(file) + def touchCommand(file: Path): IoTouchCommand = DefaultIoTouchCommand(file) } diff --git a/core/src/main/scala/cromwell/core/labels/Label.scala b/core/src/main/scala/cromwell/core/labels/Label.scala index 4fc1e2959..cee9ac91a 100644 --- a/core/src/main/scala/cromwell/core/labels/Label.scala +++ b/core/src/main/scala/cromwell/core/labels/Label.scala @@ -5,69 +5,40 @@ import cats.data.Validated._ import cats.syntax.cartesian._ import cats.syntax.validated._ +import scala.util.matching.Regex + sealed abstract case class Label(key: String, value: String) object Label { - // Yes, 63. Not a typo for 64. - // See 'labels' in https://cloud.google.com/genomics/reference/rpc/google.genomics.v1alpha2#google.genomics.v1alpha2.RunPipelineArgs - private val MaxLabelLength = 63 - val LabelRegexPattern = "[a-z]([-a-z0-9]*[a-z0-9])?" + val MaxLabelLength = 63 + val LabelKeyRegex = "[a-z]([-a-z0-9]*[a-z0-9])?" + val LabelValueRegex = "([a-z0-9]*[-a-z0-9]*[a-z0-9])?" - def validateName(s: String): ErrorOr[String] = { - if (LabelRegexPattern.r.pattern.matcher(s).matches) { - if (s.length <= MaxLabelLength) s.validNel else s"Invalid label: $s was ${s.length} characters. The maximum is $MaxLabelLength".invalidNel - } else { - s"Invalid label: $s did not match the regex $LabelRegexPattern".invalidNel - } - } + val LabelExpectationsMessage = + s"A Label key must match the pattern `$LabelKeyRegex` and a label value must match the pattern `$LabelValueRegex`." - def validateLabel(key: String, value: String): ErrorOr[Label] = { - val validatedKey = validateName(key) - val validatedValue = validateName(value) - - (validatedKey |@| validatedValue) map { case (k, v) => new Label(k, v) {} } + def validateLabelRegex(s: String, regexAllowed: Regex): ErrorOr[String] = { + (regexAllowed.pattern.matcher(s).matches, s.length <= MaxLabelLength) match { + case (true, true) => s.validNel + case (false, false) => s"Invalid label: `$s` did not match regex $regexAllowed and it is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel + case (false, _) => s"Invalid label: `$s` did not match the regex $regexAllowed.".invalidNel + case (_, false) => s"Invalid label: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel + } } - /** - * Change to meet the constraint: - * - To match the regex LabelRegexPattern - * - To be between 1 and MaxLabelLength characters total - */ - def safeName(mainText: String): String = { + def validateLabelKey(key: String): ErrorOr[String] = validateLabelRegex(key, LabelKeyRegex.r) - validateName(mainText) match { - case Valid(labelText) => labelText - case _ => - def appendSafe(current: String, nextChar: Char): String = { - nextChar match { - case c if c.isLetterOrDigit || c == '-' => current + c.toLower - case _ => current + '-' - } - } + def validateLabelValue(key: String): ErrorOr[String] = validateLabelRegex(key, LabelValueRegex.r) - val foldResult = mainText.toCharArray.foldLeft("")(appendSafe) - - val startsValid = foldResult.headOption.exists(_.isLetter) - val endsValid = foldResult.lastOption.exists(_.isLetterOrDigit) - - val validStart = if (startsValid) foldResult else "x--" + foldResult - val validStartAndEnd = if (endsValid) validStart else validStart + "--x" - - val length = validStartAndEnd.length - val tooLong = length > MaxLabelLength + def validateLabel(key: String, value: String): ErrorOr[Label] = { + val validatedKey = validateLabelKey(key) + val validatedValue = validateLabelValue(value) - if (tooLong) { - val middleSeparator = "---" - val subSectionLength = (MaxLabelLength - middleSeparator.length) / 2 - validStartAndEnd.substring(0, subSectionLength) + middleSeparator + validStartAndEnd.substring(length - subSectionLength, length) - } else { - validStartAndEnd - } - } + (validatedKey |@| validatedValue) map { case (k, v) => new Label(k, v) {} } } - def safeLabel(key: String, value: String): Label = { - new Label(safeName(key), safeName(value)) {} + def apply(key: String, value: String) = { + new Label(key, value) {} } } diff --git a/core/src/main/scala/cromwell/core/labels/Labels.scala b/core/src/main/scala/cromwell/core/labels/Labels.scala index 8e891fb51..e5fe3f575 100644 --- a/core/src/main/scala/cromwell/core/labels/Labels.scala +++ b/core/src/main/scala/cromwell/core/labels/Labels.scala @@ -1,18 +1,31 @@ package cromwell.core.labels +import cats.data.Validated._ +import cats.instances.vector._ +import cats.syntax.traverse._ +import lenthall.validation.ErrorOr +import lenthall.validation.ErrorOr.ErrorOr + import scala.collection.JavaConverters._ case class Labels(value: Vector[Label]) { - def asJesLabels = (value map { label => label.key -> label.value }).toMap.asJava + def asTuple: Vector[(String, String)] = value.map(label => label.key -> label.value) + + def asMap: Map[String, String] = asTuple.toMap + + def asJavaMap = asMap.asJava def ++(that: Labels) = Labels(value ++ that.value) } object Labels { def apply(values: (String, String)*): Labels = { - val kvps: Seq[(String, String)] = values.toSeq - Labels((kvps map { case (k, v) => Label.safeLabel(k, v) }).to[Vector]) + Labels(values.toVector map (Label.apply _).tupled) + } + + def validateMapOfLabels(labels: Map[String, String]): ErrorOr[Labels] = { + labels.toVector traverse { Label.validateLabel _ }.tupled map Labels.apply } def empty = Labels(Vector.empty) diff --git a/core/src/main/scala/cromwell/core/package.scala b/core/src/main/scala/cromwell/core/package.scala index def878003..e94dbc809 100644 --- a/core/src/main/scala/cromwell/core/package.scala +++ b/core/src/main/scala/cromwell/core/package.scala @@ -1,13 +1,34 @@ package cromwell -import wdl4s.values.WdlValue +import cats.data.Validated._ +import cats.syntax.validated._ +import lenthall.validation.ErrorOr.ErrorOr +import wdl4s.wdl.values.WdlValue + +import scala.util.{Failure, Success, Try} package object core { type LocallyQualifiedName = String type FullyQualifiedName = String type WorkflowOutputs = Map[FullyQualifiedName, JobOutput] type WorkflowOptionsJson = String + type WorkflowType = String + type WorkflowTypeVersion = String type CallOutputs = Map[LocallyQualifiedName, JobOutput] type HostInputs = Map[String, WdlValue] type EvaluatedRuntimeAttributes = Map[String, WdlValue] + + implicit class toErrorOr[A](val trySomething: Try[A]) { + def tryToErrorOr: ErrorOr[A] = trySomething match { + case Success(options) => options.validNel + case Failure(err) => err.getMessage.invalidNel + } + } + + implicit class toTry[A](val validatedSomething: ErrorOr[A]) { + def errorOrToTry: Try[A] = validatedSomething match { + case Valid(options) => Success(options) + case Invalid(err) => Failure(new RuntimeException(s"Error(s): ${err.toList.mkString(",")}")) + } + } } diff --git a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala index 5346ec70c..94c780a84 100644 --- a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala +++ b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala @@ -214,11 +214,11 @@ trait BetterFileMethods { betterFile.bufferedReader(codec) final def newBufferedWriter(implicit codec: Codec, openOptions: OpenOptions = OpenOptions.default): BufferedWriter = - betterFile.newBufferedWriter(codec) + betterFile.newBufferedWriter(codec, openOptions) final def bufferedWriter(implicit codec: Codec, openOptions: OpenOptions = OpenOptions.default): ManagedResource[BufferedWriter] = - betterFile.bufferedWriter(codec) + betterFile.bufferedWriter(codec, openOptions) final def newFileReader: FileReader = betterFile.newFileReader diff --git a/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala b/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala index 5339fae3c..234a19d79 100644 --- a/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala +++ b/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala @@ -3,6 +3,8 @@ package cromwell.core.path import akka.actor.ActorSystem import cromwell.core.WorkflowOptions +import scala.concurrent.{ExecutionContext, Future} + case object DefaultPathBuilderFactory extends PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem) = DefaultPathBuilder + override def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem, ec: ExecutionContext) = Future.successful(DefaultPathBuilder) } diff --git a/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala b/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala index 7ee20eb2d..63a91e02b 100644 --- a/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala +++ b/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala @@ -3,9 +3,11 @@ package cromwell.core.path import akka.actor.ActorSystem import cromwell.core.WorkflowOptions +import scala.concurrent.{ExecutionContext, Future} + /** * Provide a method that can instantiate a path builder with the specified workflow options. */ trait PathBuilderFactory { - def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem): PathBuilder + def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] } diff --git a/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala b/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala index 774c1b5ce..238553f2a 100644 --- a/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala +++ b/core/src/main/scala/cromwell/core/simpleton/WdlValueBuilder.scala @@ -1,12 +1,12 @@ package cromwell.core.simpleton -import wdl4s.TaskOutput -import wdl4s.types._ -import wdl4s.values.{WdlArray, WdlMap, WdlOptionalValue, WdlPair, WdlValue} +import cromwell.core.simpleton.WdlValueSimpleton._ +import cromwell.core.{CallOutputs, JobOutput} +import wdl4s.wdl.TaskOutput +import wdl4s.wdl.types._ +import wdl4s.wdl.values.{WdlArray, WdlMap, WdlOptionalValue, WdlPair, WdlValue} import scala.language.postfixOps -import cromwell.core.{CallOutputs, JobOutput} -import cromwell.core.simpleton.WdlValueSimpleton._ /** @@ -88,7 +88,7 @@ object WdlValueBuilder { // Group tuples by key using a Map with key type `K`. def group[K](tuples: Traversable[(K, SimpletonComponent)]): Map[K, Traversable[SimpletonComponent]] = { - tuples groupBy { case (i, _) => i } mapValues { _ map { case (i, s) => s} } + tuples groupBy { case (i, _) => i } mapValues { _ map { case (_, s) => s} } } outputType match { diff --git a/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala b/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala index 1f5e04375..aa5397d55 100644 --- a/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala +++ b/core/src/main/scala/cromwell/core/simpleton/WdlValueSimpleton.scala @@ -1,6 +1,6 @@ package cromwell.core.simpleton -import wdl4s.values._ +import wdl4s.wdl.values._ case class WdlValueSimpleton(simpletonKey: String, simpletonValue: WdlPrimitive) diff --git a/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala b/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala new file mode 100644 index 000000000..5ed66fb5e --- /dev/null +++ b/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala @@ -0,0 +1,34 @@ +package cromwell.util + +import akka.actor.{Actor, ActorLogging, ActorRef, Terminated} +import akka.pattern.GracefulStopSupport +import cats.data.NonEmptyList +import cromwell.util.GracefulShutdownHelper.ShutdownCommand + +object GracefulShutdownHelper { + case object ShutdownCommand +} + +trait GracefulShutdownHelper extends GracefulStopSupport { this: Actor with ActorLogging => + private var shuttingDown: Boolean = false + private var shutdownList: Set[ActorRef] = Set.empty + + def isShuttingDown: Boolean = shuttingDown + + def waitForActorsAndShutdown(actorsLists: NonEmptyList[ActorRef]): Unit = { + if (shuttingDown) { + log.error("Programmer error, this actor has already initiated its shutdown. Only call this once per actor !") + } else { + shuttingDown = true + shutdownList = actorsLists.toList.toSet + shutdownList foreach context.watch + shutdownList foreach { _ ! ShutdownCommand } + + context become { + case Terminated(actor) if shuttingDown && shutdownList.contains(actor) => + shutdownList = shutdownList - actor + if (shutdownList.isEmpty) context stop self + } + } + } +} diff --git a/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala b/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala index dc7f55fe5..53bdd4293 100644 --- a/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala +++ b/core/src/main/scala/cromwell/util/JsonFormatting/WdlValueJsonFormatter.scala @@ -1,9 +1,10 @@ package cromwell.util.JsonFormatting import spray.json._ -import wdl4s.WdlExpression -import wdl4s.types.{WdlArrayType, WdlMapType, WdlStringType} -import wdl4s.values._ +import wdl4s.wdl.WdlExpression +import wdl4s.wdl.types._ +import wdl4s.wdl.values._ +import wdl4s.wdl.values.{WdlBoolean, WdlFloat, WdlInteger, WdlString, WdlValue} object WdlValueJsonFormatter extends DefaultJsonProtocol { implicit object WdlValueJsonFormat extends RootJsonFormat[WdlValue] { diff --git a/core/src/main/scala/cromwell/util/PromiseActor.scala b/core/src/main/scala/cromwell/util/PromiseActor.scala index 58aea267a..bd5efa5b0 100644 --- a/core/src/main/scala/cromwell/util/PromiseActor.scala +++ b/core/src/main/scala/cromwell/util/PromiseActor.scala @@ -1,7 +1,7 @@ package cromwell.util import akka.actor._ - +import cromwell.core.Dispatcher.EngineDispatcher import scala.concurrent.{Future, Promise} private class PromiseActor(promise: Promise[Any], sendTo: ActorRef, msg: Any) extends Actor with ActorLogging { @@ -42,7 +42,7 @@ object PromiseActor { promise.future } - def props(promise: Promise[Any], sendTo: ActorRef, msg: Any): Props = Props(new PromiseActor(promise, sendTo, msg)) + def props(promise: Promise[Any], sendTo: ActorRef, msg: Any): Props = Props(new PromiseActor(promise, sendTo, msg)).withDispatcher(EngineDispatcher) implicit class EnhancedActorRef(val actorRef: ActorRef) extends AnyVal { def askNoTimeout(message: Any)(implicit actorRefFactory: ActorRefFactory): Future[Any] = { diff --git a/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala b/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala new file mode 100644 index 000000000..a25b311a6 --- /dev/null +++ b/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala @@ -0,0 +1,35 @@ +package cromwell.core.callcaching + +import org.scalatest.{FlatSpec, Matchers} + +class HashKeySpec extends FlatSpec with Matchers { + + "HashKey" should "produce consistent key value" in { + val keys = Set( + HashKey("command template"), + HashKey("backend name"), + HashKey("input count"), + HashKey("output count"), + HashKey("runtime attribute", "failOnStderr"), + HashKey(checkForHitOrMiss = false, "runtime attribute", "cpu"), + HashKey("runtime attribute", "continueOnReturnCode"), + HashKey("input", "String stringInput"), + HashKey("output", "String myOutput"), + HashKey("runtime attribute", "docker") + ) + + keys map { _.key } should contain theSameElementsAs Set( + "command template", + "backend name", + "input count", + "output count", + "runtime attribute: failOnStderr", + "runtime attribute: cpu", + "runtime attribute: continueOnReturnCode", + "input: String stringInput", + "output: String myOutput", + "runtime attribute: docker" + ) + } + +} diff --git a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala index 6f92f3003..1411a47c6 100644 --- a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala +++ b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala @@ -3,6 +3,7 @@ package cromwell.core.io import akka.actor.{Actor, ActorLogging, ActorRef} import akka.testkit.{TestActorRef, TestProbe} import cromwell.core.TestKitSuite +import cromwell.core.io.DefaultIoCommand.DefaultIoSizeCommand import cromwell.core.path.Path import org.scalatest.mockito.MockitoSugar import org.scalatest.{FlatSpecLike, Matchers} @@ -22,7 +23,7 @@ class IoClientHelperSpec extends TestKitSuite with FlatSpecLike with Matchers wi val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backpressureTimeout, noResponseTimeout)) - val command = new IoSizeCommand(mock[Path]) + val command = DefaultIoSizeCommand(mock[Path]) val response = IoSuccess(command, 5) // Send the command @@ -53,7 +54,7 @@ class IoClientHelperSpec extends TestKitSuite with FlatSpecLike with Matchers wi val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backpressureTimeout, noResponseTimeout)) val commandContext = "context" - val command = new IoSizeCommand(mock[Path]) + val command = DefaultIoSizeCommand(mock[Path]) val response = IoSuccess(command, 5) // Send the command @@ -91,7 +92,7 @@ class IoClientHelperSpec extends TestKitSuite with FlatSpecLike with Matchers wi } def sendMessage(command: IoCommand[_]) = { - sendIoCommand(command, noResponseTimeout) + sendIoCommandWithCustomTimeout(command, noResponseTimeout) } def sendMessageWithContext(context: Any, command: IoCommand[_]) = { diff --git a/core/src/test/scala/cromwell/core/labels/LabelSpec.scala b/core/src/test/scala/cromwell/core/labels/LabelSpec.scala index 47060b6d6..52c331bec 100644 --- a/core/src/test/scala/cromwell/core/labels/LabelSpec.scala +++ b/core/src/test/scala/cromwell/core/labels/LabelSpec.scala @@ -10,40 +10,44 @@ class LabelSpec extends FlatSpec with Matchers { /** * In the format 'to validate', 'expected result' */ - val goodLabelStrings = List( + val goodLabelKeys = List( "cromwell-root-workflow-id", "cromwell-11f2468c-39d6-4be3-85c8-32735c01e66b", "just-the-right-length-just-the-right-length-just-the-right-leng" ) - val badLabelConversions = List( - "11f2468c-39d6-4be3-85c8-32735c01e66b" -> "x--11f2468c-39d6-4be3-85c8-32735c01e66b", - "0-cromwell-root-workflow-id" -> "x--0-cromwell-root-workflow-id", - "" -> "x----x", - "cromwell-root-workflow-id-" -> "cromwell-root-workflow-id---x", - "0-cromwell-root-workflow-id-" -> "x--0-cromwell-root-workflow-id---x", - "Cromwell-root-workflow-id" -> "cromwell-root-workflow-id", - "cromwell_root_workflow_id" -> "cromwell-root-workflow-id", - "too-long-too-long-too-long-too-long-too-long-too-long-too-long-t" -> "too-long-too-long-too-long-too---g-too-long-too-long-too-long-t", - "0-too-long-and-invalid-too-long-and-invalid-too-long-and-invali+" -> "x--0-too-long-and-invalid-too----nvalid-too-long-and-invali---x" + val goodLabelValues = List( + "11f2468c-39d6-4be3-85c8-32735c01e66b", + "" ) - goodLabelStrings foreach { label => - it should s"validate the good label string '$label'" in { - Label.validateName(label) should be(Valid(label)) + val badLabelKeys = List( + "11f2468c-39d6-4be3-85c8-32735c01e66b", + "0-cromwell-root-workflow-id", + "", + "cromwell-root-workflow-id-", + "0-cromwell-root-workflow-id-", + "Cromwell-root-workflow-id" + ) + + goodLabelKeys foreach { key => + it should s"validate a good label key '$key'" in { + Label.validateLabelKey(key) should be(Valid(key)) } } - badLabelConversions foreach { case (label: String, conversion: String) => - it should s"not validate the bad label string '$label'" in { - Label.validateName(label) match { - case Invalid(_) => // Good! - case Valid(_) => fail(s"Label validation succeeded but should have failed.") - } + goodLabelValues foreach { value => + it should s"validate a good label value '$value'" in { + Label.validateLabelValue(value) should be(Valid(value)) } + } - it should s"convert the bad label string '$label' into the safe label string '$conversion'" in { - Label.safeName(label) should be(conversion) + badLabelKeys foreach { key => + it should s"not validate a bad label key $key" in { + Label.validateLabelKey(key) match { + case Invalid(_) => // Good! + case Valid(_) => fail(s"Label key validation succeeded but should have failed.") + } } } } diff --git a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala index 27f24076c..f62b49d47 100644 --- a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala +++ b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala @@ -34,7 +34,7 @@ class RetrySpec extends TestKitSuite("retry-spec") with FlatSpecLike with Matche isFatal: Throwable => Boolean = Retry.throwableToFalse): Future[Int] = { withRetry( - f = work.doIt, + f = () => work.doIt(), maxRetries = Option(retries), isTransient = isTransient, isFatal = isFatal diff --git a/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala b/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala index 1e558ceb1..514051e22 100644 --- a/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala +++ b/core/src/test/scala/cromwell/core/simpleton/WdlValueBuilderSpec.scala @@ -4,9 +4,9 @@ import cromwell.core.simpleton.WdlValueBuilderSpec._ import org.scalatest.{FlatSpec, Matchers} import org.specs2.mock.Mockito import wdl4s.parser.WdlParser.Ast -import wdl4s.types.{WdlArrayType, WdlIntegerType, WdlMapType, WdlStringType} -import wdl4s.values.{WdlArray, WdlInteger, WdlMap, WdlPair, WdlString, WdlValue} -import wdl4s.{TaskOutput, WdlExpression} +import wdl4s.wdl.types.{WdlArrayType, WdlIntegerType, WdlMapType, WdlStringType} +import wdl4s.wdl.values.{WdlArray, WdlInteger, WdlMap, WdlPair, WdlString, WdlValue} +import wdl4s.wdl.{TaskOutput, WdlExpression} object WdlValueBuilderSpec { // WdlValueBuilder doesn't care about this expression, but something needs to be passed to the TaskOutput constructor. @@ -115,9 +115,9 @@ class WdlValueBuilderSpec extends FlatSpec with Matchers with Mockito { it should "round trip everything together with no losses" in { - val wdlValues = (simpletonConversions map { case SimpletonConversion(name, wdlValue, simpletons) => name -> wdlValue }).toMap + val wdlValues = (simpletonConversions map { case SimpletonConversion(name, wdlValue, _) => name -> wdlValue }).toMap val taskOutputs = wdlValues map { case (k, wv) => TaskOutput(k, wv.wdlType, IgnoredExpression, mock[Ast], None) } - val allSimpletons = simpletonConversions flatMap { case SimpletonConversion(name, wdlValue, simpletons) => simpletons } + val allSimpletons = simpletonConversions flatMap { case SimpletonConversion(_, _, simpletons) => simpletons } import WdlValueSimpleton._ diff --git a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala index 10b05dc2b..1633a7d1d 100644 --- a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala +++ b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala @@ -8,7 +8,7 @@ object AkkaTestUtil { implicit class EnhancedTestProbe(probe: TestProbe) { def props = Props(new Actor with ActorLogging { def receive = { - case outbound if sender == probe.ref => + case outbound @ _ if sender == probe.ref => val msg = "Unexpected outbound message from Probe. You're doing something wrong!" log.error(msg) throw new RuntimeException(msg) diff --git a/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala b/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala new file mode 100644 index 000000000..4d93073dc --- /dev/null +++ b/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala @@ -0,0 +1,42 @@ +package cromwell.util + +import akka.actor.{Actor, ActorLogging, Props} +import akka.testkit.TestProbe +import cats.data.NonEmptyList +import cromwell.core.TestKitSuite +import cromwell.util.GracefulShutdownHelper.ShutdownCommand +import org.scalatest.{FlatSpecLike, Matchers} + +class GracefulShutdownHelperSpec extends TestKitSuite with FlatSpecLike with Matchers { + behavior of "GracefulShutdownHelper" + + it should "send ShutdownCommand to actors, wait for them to shutdown, then shut itself down" in { + val testProbeA = TestProbe() + val testProbeB = TestProbe() + + val testActor = system.actorOf(Props(new Actor with GracefulShutdownHelper with ActorLogging { + override def receive: Receive = { + case ShutdownCommand => waitForActorsAndShutdown(NonEmptyList.of(testProbeA.ref, testProbeB.ref)) + } + })) + + watch(testActor) + + testActor ! ShutdownCommand + + testProbeA.expectMsg(ShutdownCommand) + testProbeB.expectMsg(ShutdownCommand) + + // Make sure it's still alive + expectNoMsg() + + system stop testProbeA.ref + + // Make sure it's still alive + expectNoMsg() + + system stop testProbeB.ref + + expectTerminated(testActor) + } +} diff --git a/core/src/test/scala/cromwell/util/SampleWdl.scala b/core/src/test/scala/cromwell/util/SampleWdl.scala index 5a3940b81..b7d07fee9 100644 --- a/core/src/test/scala/cromwell/util/SampleWdl.scala +++ b/core/src/test/scala/cromwell/util/SampleWdl.scala @@ -4,18 +4,29 @@ import java.util.UUID import cromwell.core.WorkflowSourceFilesWithoutImports import cromwell.core.path.{DefaultPathBuilder, Path} -import cromwell.core.WorkflowSourceFilesWithoutImports import spray.json._ -import wdl4s._ -import wdl4s.types.{WdlArrayType, WdlStringType} -import wdl4s.values._ +import wdl4s.wdl.types.{WdlArrayType, WdlStringType} +import wdl4s.wdl.values._ +import wdl4s.wdl.{WorkflowJson, WorkflowRawInputs, WorkflowSource} import scala.language.postfixOps trait SampleWdl extends TestFileUtil { - def wdlSource(runtime: String = ""): WdlSource - def asWorkflowSources(runtime: String = "", workflowOptions: String = "{}", labels: String = "{}") = - WorkflowSourceFilesWithoutImports(wdlSource = wdlSource(runtime), inputsJson = wdlJson, workflowOptionsJson = workflowOptions, labelsJson = labels) + def workflowSource(runtime: String = ""): WorkflowSource + def asWorkflowSources(runtime: String = "", + workflowOptions: String = "{}", + labels: String = "{}", + workflowType: Option[String] = Option("WDL"), + workflowTypeVersion: Option[String] = None) = { + WorkflowSourceFilesWithoutImports( + workflowSource = workflowSource(runtime), + inputsJson = workflowJson, + workflowOptionsJson = workflowOptions, + labelsJson = labels, + workflowType = workflowType, + workflowTypeVersion = workflowTypeVersion) + } + val rawInputs: WorkflowRawInputs def name = getClass.getSimpleName.stripSuffix("$") @@ -54,7 +65,7 @@ trait SampleWdl extends TestFileUtil { def read(value: JsValue) = throw new NotImplementedError(s"Reading JSON not implemented: $value") } - def wdlJson: WdlJson = rawInputs.toJson.prettyPrint + def workflowJson: WorkflowJson = rawInputs.toJson.prettyPrint def deleteFile(path: Path) = path.delete() } @@ -62,7 +73,7 @@ trait SampleWdl extends TestFileUtil { object SampleWdl { object HelloWorld extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" |task hello { | String addressee @@ -87,7 +98,7 @@ object SampleWdl { } object HelloWorldWithoutWorkflow extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" |task hello { | String addressee @@ -107,7 +118,7 @@ object SampleWdl { } object GoodbyeWorld extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = """ |task goodbye { | command { @@ -128,7 +139,7 @@ object SampleWdl { } object EmptyString extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" |task hello { | command { @@ -170,13 +181,13 @@ object SampleWdl { object EmptyWorkflow extends SampleWdl { - override def wdlSource(runtime: String = "") = "workflow empty_workflow {}" + override def workflowSource(runtime: String = "") = "workflow empty_workflow {}" val rawInputs = Map.empty[String, Any] } object CoercionNotDefined extends SampleWdl { - override def wdlSource(runtime: String = "") = { + override def workflowSource(runtime: String = "") = { s""" |task summary { | String bfile @@ -207,7 +218,7 @@ object SampleWdl { } trait ThreeStepTemplate extends SampleWdl { - override def wdlSource(runtime: String = "") = sourceString().replaceAll("RUNTIME", runtime) + override def workflowSource(runtime: String = "") = sourceString().replaceAll("RUNTIME", runtime) private val outputSectionPlaceholder = "OUTPUTSECTIONPLACEHOLDER" def sourceString(outputsSection: String = "") = { val withPlaceholders = @@ -268,7 +279,7 @@ object SampleWdl { object ThreeStep extends ThreeStepTemplate object ThreeStepWithOutputsSection extends ThreeStepTemplate { - override def wdlSource(runtime: String = "") = sourceString(outputsSection = + override def workflowSource(runtime: String = "") = sourceString(outputsSection = """ |output { | cgrep.count @@ -278,7 +289,7 @@ object SampleWdl { } object ThreeStepWithInputsInTheOutputsSection extends ThreeStepTemplate { - override def wdlSource(runtime: String = "") = sourceString(outputsSection = + override def workflowSource(runtime: String = "") = sourceString(outputsSection = """ |output { | cgrep.pattern @@ -293,7 +304,7 @@ object SampleWdl { object WorkflowOutputsWithFiles extends SampleWdl { // ASCII art from http://www.chris.com/ascii/joan/www.geocities.com/SoHo/7373/flag.html with pipes // replaced by exclamation points to keep stripMargin from removing the flagpole. - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = """ task A { command { @@ -360,7 +371,7 @@ object SampleWdl { } object WorkflowScatterOutputsWithFileArrays extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = """ |task A { | command { @@ -387,7 +398,7 @@ object SampleWdl { object DeclarationsWorkflow extends SampleWdl { - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s""" |task cat { | File file @@ -441,7 +452,7 @@ object SampleWdl { } trait ZeroOrMorePostfixQuantifier extends SampleWdl { - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s""" |task hello { | Array[String] person @@ -472,7 +483,7 @@ object SampleWdl { } trait OneOrMorePostfixQuantifier extends SampleWdl { - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s""" |task hello { | Array[String]+ person @@ -499,7 +510,7 @@ object SampleWdl { } object CurrentDirectory extends SampleWdl { - override def wdlSource(runtime: String): String = + override def workflowSource(runtime: String): String = """ |task whereami { | command { @@ -520,7 +531,7 @@ object SampleWdl { } object ArrayIO extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" |task serialize { | Array[String] strs @@ -547,7 +558,7 @@ object SampleWdl { createFileArray(catRootDir) def cleanup() = cleanupFileArray(catRootDir) - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" |task cat { | Array[File]+ files @@ -572,7 +583,7 @@ object SampleWdl { createFileArray(catRootDir) def cleanup() = cleanupFileArray(catRootDir) - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" |task write_map { | Map[File, String] file_to_name @@ -661,7 +672,7 @@ object SampleWdl { |} """.stripMargin - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""$tasks | |workflow w { @@ -679,7 +690,7 @@ object SampleWdl { } object SiblingsScatterWdl extends ScatterWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""$tasks | |workflow w { @@ -700,7 +711,7 @@ object SampleWdl { } object SimpleScatterWdl extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""task echo_int { | Int int | command {echo $${int}} @@ -723,7 +734,7 @@ object SampleWdl { } object SimpleScatterWdlWithOutputs extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""task echo_int { | Int int | command {echo $${int}} @@ -748,7 +759,7 @@ object SampleWdl { } case class PrepareScatterGatherWdl(salt: String = UUID.randomUUID().toString) extends SampleWdl { - override def wdlSource(runtime: String = "") = { + override def workflowSource(runtime: String = "") = { s""" |# |# Goal here is to split up the input file into files of 1 line each (in the prepare) then in parallel call wc -w on each newly created file and count the words into another file then in the gather, sum the results of each parallel call to come up with @@ -816,7 +827,7 @@ object SampleWdl { } object FileClobber extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""task read_line { | File in | command { cat $${in} } @@ -841,7 +852,7 @@ object SampleWdl { } object FilePassingWorkflow extends SampleWdl { - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s"""task a { | File in | String out_name = "out" @@ -881,7 +892,7 @@ object SampleWdl { * different */ case class CallCachingWorkflow(salt: String) extends SampleWdl { - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s"""task a { | File in | String out_name = "out" @@ -933,7 +944,7 @@ object SampleWdl { |k3\tv3 """.stripMargin.trim - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s""" |task a { | Array[String] array @@ -969,7 +980,7 @@ object SampleWdl { } object ArrayOfArrays extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""task subtask { | Array[File] a | command { @@ -1009,7 +1020,7 @@ object SampleWdl { } object CallCachingHashingWdl extends SampleWdl { - override def wdlSource(runtime: String): WdlSource = + override def workflowSource(runtime: String): WorkflowSource = s"""task t { | Int a | Float b @@ -1047,7 +1058,7 @@ object SampleWdl { } object ExpressionsInInputs extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s"""task echo { | String inString | command { @@ -1077,7 +1088,7 @@ object SampleWdl { } object WorkflowFailSlow extends SampleWdl { - override def wdlSource(runtime: String = "") = + override def workflowSource(runtime: String = "") = s""" task shouldCompleteFast { | Int a diff --git a/core/src/test/scala/cromwell/util/TestFileUtil.scala b/core/src/test/scala/cromwell/util/TestFileUtil.scala index a6bedd490..6f0d08a1f 100644 --- a/core/src/test/scala/cromwell/util/TestFileUtil.scala +++ b/core/src/test/scala/cromwell/util/TestFileUtil.scala @@ -3,7 +3,7 @@ package cromwell.util import java.nio.file.attribute.PosixFilePermission import cromwell.core.path.{DefaultPathBuilder, Path} -import wdl4s.values._ +import wdl4s.wdl.values._ trait TestFileUtil { def createCannedFile(prefix: String, contents: String, dir: Option[Path] = None): Path = { diff --git a/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala b/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala index 91d678c01..7924f30ab 100644 --- a/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala +++ b/core/src/test/scala/cromwell/util/WdlValueJsonFormatterSpec.scala @@ -1,14 +1,10 @@ package cromwell.util -import scala.Vector - -import org.scalatest.FlatSpec -import org.scalatest.Matchers - -import JsonFormatting.WdlValueJsonFormatter.WdlValueJsonFormat -import spray.json.{ JsObject, pimpString } -import wdl4s.types.{ WdlArrayType, WdlStringType } -import wdl4s.values.{ WdlArray, WdlPair, WdlString } +import cromwell.util.JsonFormatting.WdlValueJsonFormatter.WdlValueJsonFormat +import org.scalatest.{FlatSpec, Matchers} +import spray.json.{JsObject, pimpString} +import wdl4s.wdl.types.{WdlArrayType, WdlStringType} +import wdl4s.wdl.values.{WdlArray, WdlPair, WdlString} class WdlValueJsonFormatterSpec extends FlatSpec with Matchers { diff --git a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala index c2a0c536b..71c0d813f 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala @@ -21,6 +21,7 @@ import scala.util.{Failure, Success, Try} class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit actorSystem: ActorSystem, materializer: ActorMaterializer) { + lazy val engineEndpoint = s"$cromwellUrl/engine/$apiVersion" lazy val submitEndpoint = s"$cromwellUrl/api/workflows/$apiVersion" // Everything else is a suffix off the submit endpoint: lazy val batchSubmitEndpoint = s"$submitEndpoint/batch" @@ -28,22 +29,35 @@ class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit acto def abortEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "abort") def statusEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "status") def metadataEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "metadata") + def outputsEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "outputs") + def logsEndpoint(workflowId: WorkflowId): String = workflowSpecificEndpoint(workflowId, "logs") + def diffEndpoint(workflowA: WorkflowId, callA: String, indexA: ShardIndex, workflowB: WorkflowId, callB: String, indexB: ShardIndex): String = { + def shardParam(aOrB: String, s: ShardIndex) = s.index.map(i => s"&index$aOrB=$i.toString").getOrElse("") + s"$submitEndpoint/callcaching/diff?workflowA=$workflowA&callA=$callA&workflowB=$workflowB&callB=$callB${shardParam("A", indexA)}${shardParam("B", indexB)}" + } lazy val backendsEndpoint = s"$submitEndpoint/backends" + lazy val versionEndpoint = s"$engineEndpoint/version" import model.CromwellStatusJsonSupport._ + import model.WorkflowOutputsJsonSupport._ + import model.WorkflowLogsJsonSupport._ import model.CromwellBackendsJsonSupport._ + import model.CromwellVersionJsonSupport._ + import model.CallCacheDiffJsonSupport._ private def requestEntityForSubmit(workflowSubmission: WorkflowSubmission) = { import cromwell.api.model.LabelsJsonFormatter._ val sourceBodyParts = Map( - "wdlSource" -> Option(workflowSubmission.wdl), + "workflowSource" -> Option(workflowSubmission.wdl), + "workflowType" -> workflowSubmission.workflowType, + "workflowTypeVersion" -> workflowSubmission.workflowTypeVersion, "workflowInputs" -> workflowSubmission.inputsJson, "workflowOptions" -> insertSecrets(workflowSubmission.options, workflowSubmission.refreshToken), "customLabels" -> Option(workflowSubmission.customLabels.toJson.toString) ) collect { case (name, Some(source: String)) => Multipart.FormData.BodyPart(name, HttpEntity(MediaTypes.`application/json`, ByteString(source))) } val zipBodyParts = Map( - "wdlDependencies" -> workflowSubmission.zippedImports + "workflowDependencies" -> workflowSubmission.zippedImports ) collect { case (name, Some(file)) => Multipart.FormData.BodyPart.fromPath(name, MediaTypes.`application/zip`, file.path) } val multipartFormData = Multipart.FormData((sourceBodyParts ++ zipBodyParts).toSeq : _*) @@ -64,7 +78,15 @@ class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit acto val requestEntity = requestEntityForSubmit(workflow) // Make a set of submissions that represent the batch (so we can zip with the results later): - val submissionSet = workflow.inputsBatch.map(inputs => WorkflowSingleSubmission(workflow.wdl, Option(inputs), workflow.options, workflow.customLabels, workflow.zippedImports, workflow.refreshToken)) + val submissionSet = workflow.inputsBatch.map(inputs => WorkflowSingleSubmission( + wdl = workflow.wdl, + workflowType = workflow.workflowType, + workflowTypeVersion = workflow.workflowTypeVersion, + inputsJson = Option(inputs), + options = workflow.options, + customLabels = workflow.customLabels, + zippedImports = workflow.zippedImports, + refreshToken = workflow.refreshToken)) makeRequest[List[CromwellStatus]](HttpRequest(HttpMethods.POST, batchSubmitEndpoint, List.empty[HttpHeader], requestEntity)) map { statuses => val zipped = submissionSet.zip(statuses) @@ -74,38 +96,38 @@ class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit acto } } - def abort(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStatus] = getRequest[CromwellStatus](abortEndpoint(workflowId)) map WorkflowStatus.apply - def status(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStatus] = getRequest[CromwellStatus](statusEndpoint(workflowId)) map WorkflowStatus.apply - def metadata(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowMetadata] = getRequest[String](metadataEndpoint(workflowId)) map WorkflowMetadata - def backends(implicit ec: ExecutionContext): Future[CromwellBackends] = getRequest[CromwellBackends](backendsEndpoint) + def abort(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStatus] = simpleRequest[CromwellStatus](uri = abortEndpoint(workflowId), method = HttpMethods.POST) map WorkflowStatus.apply + def status(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStatus] = simpleRequest[CromwellStatus](statusEndpoint(workflowId)) map WorkflowStatus.apply + def metadata(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowMetadata] = simpleRequest[String](metadataEndpoint(workflowId)) map WorkflowMetadata + def outputs(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowOutputs] = simpleRequest[WorkflowOutputs](outputsEndpoint(workflowId)) + def logs(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowLogs] = simpleRequest[WorkflowLogsStruct](outputsEndpoint(workflowId)) map WorkflowLogs.apply + def callCacheDiff(workflowA: WorkflowId, callA: String, shardIndexA: ShardIndex, workflowB: WorkflowId, callB: String, shardIndexB: ShardIndex)(implicit ec: ExecutionContext): Future[CallCacheDiff] = + simpleRequest[CallCacheDiff](diffEndpoint(workflowA, callA, shardIndexA, workflowB, callB, shardIndexB)) + def backends(implicit ec: ExecutionContext): Future[CromwellBackends] = simpleRequest[CromwellBackends](backendsEndpoint) + def version(implicit ec: ExecutionContext): Future[CromwellVersion] = simpleRequest[CromwellVersion](versionEndpoint) + + private [api] def executeRequest(request: HttpRequest) = Http().singleRequest(request) /** * * @tparam A The type of response expected. Must be supported by an implicit unmarshaller from ResponseEntity. */ private def makeRequest[A](request: HttpRequest)(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = for { - response <- Http().singleRequest(request) + response <- executeRequest(request) decoded <- Future.fromTry(decodeResponse(response)) entity <- Future.fromTry(decoded.toEntity) - unmarshalled <- entity.to[A] + unmarshalled <- unmarshall(response, entity)(um, ec) } yield unmarshalled - private def getRequest[A](uri: String)(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = makeRequest[A](HttpRequest(uri = uri)) - - private def insertSecrets(options: Option[String], refreshToken: Option[String]): Option[String] = { - import DefaultJsonProtocol._ - val tokenKey = "refresh_token" - - def addToken(optionsMap: Map[String, JsValue]): Map[String, JsValue] = { - refreshToken match { - case Some(token) if optionsMap.get(tokenKey).isDefined => optionsMap + (tokenKey -> JsString(token)) - case _ => optionsMap - } - } + private def unmarshall[A](response: HttpResponse, entity: Unmarshal[ResponseEntity])(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = { + import CromwellFailedResponseExceptionJsonSupport._ - options map (o => addToken(o.parseJson.asJsObject.convertTo[Map[String, JsValue]]).toJson.toString) + if (response.status.isSuccess()) entity.to[A] + else entity.to[CromwellFailedResponseException] flatMap Future.failed } + private def simpleRequest[A](uri: String, method: HttpMethod = HttpMethods.GET)(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): Future[A] = makeRequest[A](HttpRequest(uri = uri, method = method)) + private val decoders = Map( HttpEncodings.gzip -> Gzip, HttpEncodings.deflate -> Deflate, @@ -114,7 +136,7 @@ class CromwellClient(val cromwellUrl: URL, val apiVersion: String)(implicit acto private def decodeResponse(response: HttpResponse): Try[HttpResponse] = { decoders.get(response.encoding) map { decoder => - Try(decoder.decode(response)) + Try(decoder.decodeMessage(response)) } getOrElse Failure(UnsuccessfulRequestException(s"No decoder for ${response.encoding}", response)) } } @@ -124,6 +146,7 @@ object CromwellClient { def toEntity: Try[Unmarshal[ResponseEntity]] = response match { case HttpResponse(_: StatusCodes.Success, _, entity, _) => Success(Unmarshal(entity)) + case HttpResponse(_: StatusCodes.ServerError, _, entity, _) => Success(Unmarshal(entity)) case other => Failure(UnsuccessfulRequestException("Unmarshalling error", other)) } } @@ -131,4 +154,20 @@ object CromwellClient { final case class UnsuccessfulRequestException(message: String, httpResponse: HttpResponse) extends Exception { override def getMessage: String = message + ": " + httpResponse.toString } + + private[api] def insertSecrets(options: Option[String], refreshToken: Option[String]): Option[String] = { + import DefaultJsonProtocol._ + val tokenKey = "refresh_token" + + val secretOptions = for { + refreshTokenValue <- refreshToken + optionsValue <- options + optionsMap = optionsValue.parseJson.asJsObject.convertTo[Map[String, JsValue]] + if optionsMap.contains(tokenKey) + secretMap = optionsMap.updated(tokenKey, JsString(refreshTokenValue)) + secretValue = secretMap.toJson.toString + } yield secretValue + + secretOptions orElse options + } } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala new file mode 100644 index 000000000..fa4e7fb91 --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala @@ -0,0 +1,15 @@ +package cromwell.api.model + +import ShardIndexFormatter._ +import WorkflowIdJsonFormatter._ +import spray.json.DefaultJsonProtocol + +case class CallCacheDiffCallDescription(executionStatus: String, allowResultReuse: Boolean, callFqn: String, jobIndex: ShardIndex, workflowId: WorkflowId) +case class HashDifference(hashKey: String, callA: Option[String], callB: Option[String]) +case class CallCacheDiff(callA: CallCacheDiffCallDescription, callB: CallCacheDiffCallDescription, hashDifferential: List[HashDifference]) + +object CallCacheDiffJsonSupport extends DefaultJsonProtocol { + implicit val CallCacheDiffCallDescriptionFormat = jsonFormat5(CallCacheDiffCallDescription) + implicit val HashDifferenceFormat = jsonFormat3(HashDifference) + implicit val CallCacheDiffFormat = jsonFormat3(CallCacheDiff) +} diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellFailedResponseException.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellFailedResponseException.scala new file mode 100644 index 000000000..6f58fe3bd --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellFailedResponseException.scala @@ -0,0 +1,9 @@ +package cromwell.api.model + +import spray.json.DefaultJsonProtocol + +object CromwellFailedResponseExceptionJsonSupport extends DefaultJsonProtocol { + implicit val CromwellFailedResponseExceptionFormat = jsonFormat2(CromwellFailedResponseException) +} + +case class CromwellFailedResponseException(status: String, message: String) extends Exception(message) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala new file mode 100644 index 000000000..7fcb0dcba --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala @@ -0,0 +1,15 @@ +package cromwell.api.model + +import java.time.OffsetDateTime +import spray.json.DefaultJsonProtocol +import cromwell.api.model.WorkflowIdJsonFormatter._ +import cromwell.api.model.WorkflowStatusJsonFormatter._ + +case class CromwellQueryResults(results: Seq[CromwellQueryResult]) + +case class CromwellQueryResult(name: String, id: WorkflowId, status: WorkflowStatus, end: OffsetDateTime, start: OffsetDateTime) + +object CromwellQueryResultJsonFormatter extends DefaultJsonProtocol { + implicit val CromwellQueryResultJsonFormat = jsonFormat5(CromwellQueryResult) + implicit val CromwellQueryResultsJsonFormat = jsonFormat1(CromwellQueryResults) +} diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellVersion.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellVersion.scala new file mode 100644 index 000000000..d6c71a065 --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellVersion.scala @@ -0,0 +1,9 @@ +package cromwell.api.model + +import spray.json.DefaultJsonProtocol + +object CromwellVersionJsonSupport extends DefaultJsonProtocol { + implicit val CromwellVersionFormat = jsonFormat1(CromwellVersion) +} + +case class CromwellVersion(cromwell: String) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala index 7cb72dd94..fd9d88d21 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala @@ -1,13 +1,15 @@ package cromwell.api.model import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, RootJsonFormat} +import scala.language.postfixOps object LabelsJsonFormatter extends DefaultJsonProtocol { implicit object LabelJsonFormat extends RootJsonFormat[List[Label]] { def write(l: List[Label]) = JsObject(l map { label => label.key -> JsString(label.value)} :_* ) - def read(value: JsValue) = value match { - case JsObject(x) => x map { case (k, JsString(v)) => Label(k, v) } toList - } + def read(value: JsValue) = value.asJsObject.fields map { + case (k, JsString(v)) => Label(k, v) + case other => throw new UnsupportedOperationException(s"Cannot deserialize $other to a Label") + } toList } } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/OutputResponse.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/OutputResponse.scala deleted file mode 100644 index 657aca668..000000000 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/OutputResponse.scala +++ /dev/null @@ -1,9 +0,0 @@ -package cromwell.api.model - -import spray.json.DefaultJsonProtocol - -object OutputResponseJsonSupport extends DefaultJsonProtocol { - implicit val OutputResponseFormat = jsonFormat2(OutputResponse) -} - -case class OutputResponse(id: String, outputs: Map[String, String]) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala new file mode 100644 index 000000000..405305b8a --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala @@ -0,0 +1,18 @@ +package cromwell.api.model + +import spray.json.{DefaultJsonProtocol, JsNumber, JsValue, RootJsonFormat} + +case class ShardIndex(index: Option[Int]) extends AnyVal { + override def toString: String = index.getOrElse(-1).toString +} + +object ShardIndexFormatter extends DefaultJsonProtocol { + implicit object ShardIndexJsonFormat extends RootJsonFormat[ShardIndex] { + def write(si: ShardIndex) = JsNumber(si.index.getOrElse(-1)) + def read(value: JsValue) = value match { + case JsNumber(i) if i.equals(-1) => ShardIndex(None) + case JsNumber(i) if i.isValidInt && i.intValue > 0 => ShardIndex(Option(i.intValue())) + case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a ShardIndex") + } + } +} diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala index 2ad4760bb..f52495136 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala @@ -2,6 +2,8 @@ package cromwell.api.model import java.util.UUID +import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat} + // ********* !!!!!!!!!! ******** // // WARNING! This is the Cromwell API version of WorkflowId. If you aren't changing the API client, you probably @@ -9,7 +11,7 @@ import java.util.UUID // // ********* !!!!!!!!!! ******** -case class WorkflowId(id: UUID) { +final case class WorkflowId(id: UUID) extends AnyVal { override def toString = id.toString def shortString = id.toString.split("-")(0) } @@ -18,3 +20,14 @@ object WorkflowId { def fromString(id: String): WorkflowId = new WorkflowId(UUID.fromString(id)) def randomId() = WorkflowId(UUID.randomUUID()) } + +object WorkflowIdJsonFormatter extends DefaultJsonProtocol { + implicit object WorkflowIdJsonFormat extends RootJsonFormat[WorkflowId] { + def write(id: WorkflowId) = JsString(id.id.toString) + def read(value: JsValue) = value match { + case JsString(s) => WorkflowId.fromString(s) + case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a ShardIndex") + } + } +} + diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowLogs.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowLogs.scala new file mode 100644 index 000000000..b08d6299a --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowLogs.scala @@ -0,0 +1,39 @@ +package cromwell.api.model + +import spray.json.DefaultJsonProtocol +import cromwell.api.model.ShardIndexFormatter._ + +private[api] case class CallLogStruct(stdout: String, stderr: String, backendLogs: Map[String, String], shardIndex: ShardIndex, attempt: Int) +private[api] case class WorkflowLogsStruct(calls: Map[String, List[CallLogStruct]], id: String) + + +object WorkflowLogsJsonSupport extends DefaultJsonProtocol { + implicit val CallLogStructFormat = jsonFormat5(CallLogStruct) + implicit val WorkflowLogsStructFormat = jsonFormat2(WorkflowLogsStruct) +} + +/** + * @param logs Mapping from shard index and attempt + */ +case class CallLogs(logs: Map[JobLogsKey, JobLogs]) +case class JobLogsKey(shardIndex: ShardIndex, attempt: Int) +case class JobLogs(stdout: String, stderr: String, backendLogs: Map[String, String]) + +/** + * @param logs Mapping from call name to all logs for that call (including all shards and attempts) + */ +case class WorkflowLogs(logs: Map[String, CallLogs]) + +object WorkflowLogs { + def callStructsToCallLogs(structs: List[CallLogStruct]): CallLogs = { + val callLogs = structs map { struct => + JobLogsKey(struct.shardIndex, struct.attempt) -> JobLogs(struct.stdout, struct.stderr, struct.backendLogs) + } + CallLogs(callLogs.toMap) + } + + def apply(struct: WorkflowLogsStruct): WorkflowLogs = { + val workflowLogs = struct.calls map { case (callName, structs) => callName -> callStructsToCallLogs(structs)} + WorkflowLogs(workflowLogs) + } +} diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowOutputs.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowOutputs.scala new file mode 100644 index 000000000..4eb4cd0c8 --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowOutputs.scala @@ -0,0 +1,9 @@ +package cromwell.api.model + +import spray.json.DefaultJsonProtocol + +object WorkflowOutputsJsonSupport extends DefaultJsonProtocol { + implicit val OutputResponseFormat = jsonFormat2(WorkflowOutputs) +} + +case class WorkflowOutputs(id: String, outputs: Map[String, String]) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala index adadea912..6da1282d2 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala @@ -1,5 +1,7 @@ package cromwell.api.model +import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat} + // ********* !!!!!!!!!! ******** // // WARNING! This is a Cromwell API class. If you aren't changing the API client, you probably @@ -37,3 +39,13 @@ object WorkflowStatus { def apply(workflowStatus: CromwellStatus): WorkflowStatus = apply(workflowStatus.status) } + +object WorkflowStatusJsonFormatter extends DefaultJsonProtocol { + implicit object WorkflowStatusJsonFormat extends RootJsonFormat[WorkflowStatus] { + def write(status: WorkflowStatus) = new JsString(status.toString) + def read(value: JsValue) = value match { + case JsString(string) => WorkflowStatus(string) + case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a WorkflowStatus") + } + } +} diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala index 5a0b69bc6..f20a7aa15 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala @@ -4,6 +4,8 @@ import better.files.File sealed trait WorkflowSubmission { val wdl: String + val workflowType: Option[String] + val workflowTypeVersion: Option[String] val inputsJson: Option[String] val options: Option[String] val customLabels: Option[List[Label]] @@ -12,6 +14,8 @@ sealed trait WorkflowSubmission { } final case class WorkflowSingleSubmission(wdl: String, + workflowType: Option[String], + workflowTypeVersion: Option[String], inputsJson: Option[String], options: Option[String], customLabels: Option[List[Label]], @@ -19,11 +23,13 @@ final case class WorkflowSingleSubmission(wdl: String, refreshToken: Option[String]) extends WorkflowSubmission final case class WorkflowBatchSubmission(wdl: String, - inputsBatch: List[String], - options: Option[String], - customLabels: Option[List[Label]], - zippedImports: Option[File], - refreshToken: Option[String]) extends WorkflowSubmission { + workflowType: Option[String], + workflowTypeVersion: Option[String], + inputsBatch: List[String], + options: Option[String], + customLabels: Option[List[Label]], + zippedImports: Option[File], + refreshToken: Option[String]) extends WorkflowSubmission { - override val inputsJson: Option[String] = Option(inputsBatch.mkString(start="[", sep=",", end="]")) + override val inputsJson: Option[String] = Option(inputsBatch.mkString(start = "[", sep = ",", end = "]")) } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala new file mode 100644 index 000000000..5dd6bffb1 --- /dev/null +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala @@ -0,0 +1,20 @@ +package cromwell.api + +import java.time.OffsetDateTime + +import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat} + +package object model { + + implicit val OffsetDateTimeJsonFormat = OffsetDateTimeJsonFormatter.OffsetDateTimeFormat + + object OffsetDateTimeJsonFormatter extends DefaultJsonProtocol { + object OffsetDateTimeFormat extends RootJsonFormat[OffsetDateTime] { + def write(odt: OffsetDateTime) = new JsString(odt.toString) + def read(value: JsValue) = value match { + case JsString(string) => OffsetDateTime.parse(string) + case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into an OffsetDateTime") + } + } + } +} diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala new file mode 100644 index 000000000..5c0b43628 --- /dev/null +++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala @@ -0,0 +1,36 @@ +package cromwell.api + +import org.scalatest.prop.TableDrivenPropertyChecks +import org.scalatest.{FlatSpec, Matchers} +import spray.json.JsonParser.ParsingException + +class CromwellClientSpec extends FlatSpec with Matchers with TableDrivenPropertyChecks { + behavior of "CromwellClient" + + val table = Table( + ("description", "optionsOption", "refreshTokenOption", "expected"), + ("ignore bad json when refresh token not provided", Option("{"), None, Option("{")), + ("not format json when refresh token key not found", Option("{ }"), Option("myToken"), Option("{ }")), + ("replace token when found", Option("""{"refresh_token" : "replace_me"}"""), Option("myToken"), + Option("""{"refresh_token":"myToken"}""")), + ) + + forAll(table) { (description, optionsOption, refreshTokenOption, expected) => + it should description in { + val actual = CromwellClient.insertSecrets(optionsOption, refreshTokenOption) + actual should be(expected) + } + } + + it should "throw an exception when inserting a refresh token into bad json" in { + val optionsOption = Option("{") + val refreshTokenOption = Option("myToken") + val actual = intercept[ParsingException](CromwellClient.insertSecrets(optionsOption, refreshTokenOption)) + actual.summary should be("""Unexpected end-of-input at input index 1 (line 1, position 2), expected '"'""") + actual.detail should be( + """| + |{ + | ^ + |""".stripMargin) + } +} diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala new file mode 100644 index 000000000..d8b2dd917 --- /dev/null +++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala @@ -0,0 +1,41 @@ +package cromwell.api + +import java.net.URL + +import akka.actor.ActorSystem +import akka.http.scaladsl.model._ +import akka.stream.ActorMaterializer +import akka.testkit.TestKit +import cromwell.api.model.CromwellFailedResponseException +import org.scalatest.{AsyncFlatSpecLike, BeforeAndAfterAll, Matchers} + +import scala.concurrent.duration._ +import scala.concurrent.{Await, Future} +import scala.language.postfixOps + +class CromwellResponseFailedSpec extends TestKit(ActorSystem()) with AsyncFlatSpecLike with Matchers with BeforeAndAfterAll { + override def afterAll(): Unit = { + Await.ready(system.terminate(), 1 second) + super.afterAll() + } + + implicit val materializer = ActorMaterializer() + + "CromwellAPIClient" should "try to fail the Future with a CromwellFailedResponseException if the HttpResponse is unsuccessful" in { + val client = new CromwellClient(new URL("http://fakeurl"), "v1") { + override def executeRequest(request: HttpRequest): Future[HttpResponse] = Future.successful( + new HttpResponse(StatusCodes.ServiceUnavailable, List.empty[HttpHeader], HttpEntity(ContentTypes.`application/json`, + """{ + | "status": "fail", + | "message": "Cromwell service shutting down" + |} + """.stripMargin), HttpProtocols.`HTTP/1.1`) + ) + } + + recoverToExceptionIf[CromwellFailedResponseException] { client.version(scala.concurrent.ExecutionContext.global) } map { exception => + assert(exception.status == "fail") + assert(exception.message == "Cromwell service shutting down") + } + } +} diff --git a/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala new file mode 100644 index 000000000..f1983e794 --- /dev/null +++ b/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala @@ -0,0 +1,45 @@ +package cromwell.api.model + +import java.time.OffsetDateTime + +import org.scalatest.{FlatSpec, Matchers} +import spray.json._ +import cromwell.api.model.CromwellQueryResultJsonFormatter._ + +class CromwellQueryResultJsonFormatterSpec extends FlatSpec with Matchers { + + behavior of "CromwellQueryResultJsonFormat" + + val sampleQueryResult = CromwellQueryResults(results = List( + CromwellQueryResult("switcheroo", WorkflowId.fromString("bee51f36-396d-4e22-8a81-33dedff66bf6"), Failed, OffsetDateTime.parse("2017-07-24T14:44:34.010-04:00"), OffsetDateTime.parse("2017-07-24T14:44:33.227-04:00")), + CromwellQueryResult("switcheroo", WorkflowId.fromString("0071495e-39eb-478e-bc98-8614b986c91e"), Succeeded, OffsetDateTime.parse("2017-07-24T15:06:45.940-04:00"), OffsetDateTime.parse("2017-07-24T15:04:54.372-04:00")) + )) + + val sampleJson = """|{ + | "results": [ + | { + | "name": "switcheroo", + | "id": "bee51f36-396d-4e22-8a81-33dedff66bf6", + | "status": "Failed", + | "end": "2017-07-24T14:44:34.010-04:00", + | "start": "2017-07-24T14:44:33.227-04:00" + | }, + | { + | "name": "switcheroo", + | "id": "0071495e-39eb-478e-bc98-8614b986c91e", + | "status": "Succeeded", + | "end": "2017-07-24T15:06:45.940-04:00", + | "start": "2017-07-24T15:04:54.372-04:00" + | } + | ] + |}""".stripMargin.parseJson.asJsObject + + it should "write a query result as a structured JsObject" in { + + sampleQueryResult.toJson shouldEqual sampleJson + } + + it should "read a query result as a structured JsObject" in { + sampleJson.convertTo[CromwellQueryResults] shouldBe sampleQueryResult + } +} diff --git a/database/migration/src/main/resources/changelog.xml b/database/migration/src/main/resources/changelog.xml index fc864a848..e005c5ef4 100644 --- a/database/migration/src/main/resources/changelog.xml +++ b/database/migration/src/main/resources/changelog.xml @@ -65,6 +65,9 @@ + + +