Skip to content

Commit

Permalink
Fix support for AWS ACL for Batch #2671
Browse files Browse the repository at this point in the history
Signed-off-by: Paolo Di Tommaso <paolo.ditommaso@gmail.com>
  • Loading branch information
pditommaso committed Dec 29, 2022
1 parent b362d6f commit a964491
Show file tree
Hide file tree
Showing 7 changed files with 195 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@
import com.amazonaws.services.s3.transfer.Upload;
import com.amazonaws.services.s3.transfer.UploadContext;
import com.upplication.s3fs.util.S3MultipartOptions;
import nextflow.cloud.aws.util.AwsHelper;
import nextflow.util.Duration;
import nextflow.util.ThreadPoolHelper;
import nextflow.util.ThreadPoolManager;
Expand Down Expand Up @@ -291,7 +292,7 @@ public void setEndpoint(String endpoint) {
public void setCannedAcl(String acl) {
if( acl==null )
return;
this.cannedAcl = CannedAccessControlList.valueOf(acl);
this.cannedAcl = AwsHelper.parseS3Acl(acl);
log.debug("Setting S3 canned ACL={} [{}]", this.cannedAcl, acl);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package nextflow.cloud.aws.batch

import java.nio.file.Path

import com.amazonaws.services.s3.model.CannedAccessControlList
import groovy.transform.CompileStatic
import groovy.transform.EqualsAndHashCode
import groovy.transform.ToString
Expand All @@ -28,6 +29,8 @@ import nextflow.cloud.CloudTransferOptions
import nextflow.exception.ProcessUnrecoverableException
import nextflow.util.Duration

import static nextflow.cloud.aws.util.AwsHelper.parseS3Acl

/**
* Helper class wrapping AWS config options required for Batch job executions
*/
Expand Down Expand Up @@ -96,6 +99,11 @@ class AwsOptions implements CloudTransferOptions {
*/
Integer schedulingPriority

/**
* S3 access control list
*/
CannedAccessControlList s3Acl

/**
* @return A list of volume mounts using the docker cli convention ie. `/some/path` or `/some/path:/container/path` or `/some/path:/container/path:ro`
*/
Expand All @@ -111,6 +119,7 @@ class AwsOptions implements CloudTransferOptions {

AwsOptions(Session session) {
cliPath = getCliPath0(session)
s3Acl = parseS3Acl(session.config.navigate('aws.client.s3Acl') as String)
debug = session.config.navigate('aws.client.debug') as Boolean
storageClass = session.config.navigate('aws.client.uploadStorageClass') as String
storageKmsKeyId = session.config.navigate('aws.client.storageKmsKeyId') as String
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Copyright (c) 2020-2021. Seqera Labs, S.L.
*
* All Rights reserved
*
*/

package nextflow.cloud.aws.util

import com.amazonaws.services.s3.model.CannedAccessControlList
import com.google.common.base.CaseFormat

/**
* Helper class for AWS
*
* @author Paolo Di Tommaso <paolo.ditommaso@gmail.com>
*/
class AwsHelper {

static CannedAccessControlList parseS3Acl(String value) {
if( !value )
return null

return value.contains('-')
? CannedAccessControlList.valueOf(CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL,value))
: CannedAccessControlList.valueOf(value)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package nextflow.cloud.aws.util

import com.amazonaws.services.s3.model.CannedAccessControlList
import nextflow.Global
import nextflow.Session
import nextflow.cloud.aws.batch.AwsOptions
Expand All @@ -33,6 +34,7 @@ class S3BashLib extends BashFunLib<S3BashLib> {
private String debug = ''
private String cli = 'aws'
private String retryMode
private String acl = ''

S3BashLib withCliPath(String cliPath) {
if( cliPath )
Expand Down Expand Up @@ -69,6 +71,12 @@ class S3BashLib extends BashFunLib<S3BashLib> {
return this
}

S3BashLib withAcl(CannedAccessControlList value) {
if( value )
this.acl = "--acl $value "
return this
}

protected String retryEnv() {
if( !retryMode )
return ''
Expand All @@ -86,11 +94,11 @@ class S3BashLib extends BashFunLib<S3BashLib> {
local name=\$1
local s3path=\$2
if [[ "\$name" == - ]]; then
$cli s3 cp --only-show-errors ${debug}${storageEncryption}${storageKmsKeyId}--storage-class $storageClass - "\$s3path"
$cli s3 cp --only-show-errors ${debug}${acl}${storageEncryption}${storageKmsKeyId}--storage-class $storageClass - "\$s3path"
elif [[ -d "\$name" ]]; then
$cli s3 cp --only-show-errors --recursive ${debug}${storageEncryption}${storageKmsKeyId}--storage-class $storageClass "\$name" "\$s3path/\$name"
$cli s3 cp --only-show-errors --recursive ${debug}${acl}${storageEncryption}${storageKmsKeyId}--storage-class $storageClass "\$name" "\$s3path/\$name"
else
$cli s3 cp --only-show-errors ${debug}${storageEncryption}${storageKmsKeyId}--storage-class $storageClass "\$name" "\$s3path/\$name"
$cli s3 cp --only-show-errors ${debug}${acl}${storageEncryption}${storageKmsKeyId}--storage-class $storageClass "\$name" "\$s3path/\$name"
fi
}
Expand Down Expand Up @@ -124,6 +132,7 @@ class S3BashLib extends BashFunLib<S3BashLib> {
.withStorageKmsKeyId( opts.storageKmsKeyId )
.withRetryMode( opts.retryMode )
.withDebug( opts.debug )
.withAcl( opts.s3Acl )
}

static String script(AwsOptions opts) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package nextflow.cloud.aws.batch

import java.nio.file.Paths

import com.amazonaws.services.s3.model.CannedAccessControlList
import nextflow.Session
import nextflow.exception.ProcessUnrecoverableException
import spock.lang.Specification
Expand Down Expand Up @@ -241,4 +242,23 @@ class AwsOptionsTest extends Specification {
opts.volumes == ['/some/dir', '/other/dir']
}

def 'should parse s3 acl' ( ) {
when:
def opts = new AwsOptions(new Session(aws:[client:[s3Acl: 'PublicRead']]))
then:
opts.getS3Acl() == CannedAccessControlList.PublicRead


when:
opts = new AwsOptions(new Session(aws:[client:[s3Acl: 'public-read']]))
then:
opts.getS3Acl() == CannedAccessControlList.PublicRead


when:
opts = new AwsOptions(new Session(aws:[client:[s3Acl: 'unknown']]))
then:
thrown(IllegalArgumentException)
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright (c) 2020-2021. Seqera Labs, S.L.
*
* All Rights reserved
*
*/

package nextflow.cloud.aws.util

import com.amazonaws.services.s3.model.CannedAccessControlList
import spock.lang.Specification
/**
*
* @author Paolo Di Tommaso <paolo.ditommaso@gmail.com>
*/
class AwsHelperTest extends Specification {

def 'should parse S3 acl' () {
expect:
AwsHelper.parseS3Acl('PublicRead') == CannedAccessControlList.PublicRead
AwsHelper.parseS3Acl('public-read') == CannedAccessControlList.PublicRead

when:
AwsHelper.parseS3Acl('unknown')
then:
thrown(IllegalArgumentException)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -635,4 +635,99 @@ class S3BashLibTest extends Specification {
'''.stripIndent(true)
}


def 'should create with s3 acl' () {
given:
def sess1 = Mock(Session) {
getConfig() >> [aws: [ client: [ s3Acl: 'PublicRead']]]
}
and:
def opts = new AwsOptions(sess1)

expect:
S3BashLib.script(opts) == '''\
# bash helper functions
nxf_cp_retry() {
local max_attempts=5
local timeout=10
local attempt=0
local exitCode=0
while (( $attempt < $max_attempts ))
do
if "$@"
then
return 0
else
exitCode=$?
fi
if [[ $exitCode == 0 ]]
then
break
fi
nxf_sleep $timeout
attempt=$(( attempt + 1 ))
timeout=$(( timeout * 2 ))
done
}
nxf_parallel() {
IFS=$'\\n'
local cmd=("$@")
local cpus=$(nproc 2>/dev/null || < /proc/cpuinfo grep '^process' -c)
local max=$(if (( cpus>4 )); then echo 4; else echo $cpus; fi)
local i=0
local pid=()
(
set +u
while ((i<${#cmd[@]})); do
local copy=()
for x in "${pid[@]}"; do
[[ -e /proc/$x ]] && copy+=($x)
done
pid=("${copy[@]}")
if ((${#pid[@]}>=$max)); then
nxf_sleep 0.2
else
eval "${cmd[$i]}" &
pid+=($!)
((i+=1))
fi
done
for p in "${pid[@]}"; do
wait $p
done
)
unset IFS
}
# aws cli retry config
export AWS_RETRY_MODE=standard
export AWS_MAX_ATTEMPTS=5
# aws helper
nxf_s3_upload() {
local name=$1
local s3path=$2
if [[ "$name" == - ]]; then
aws s3 cp --only-show-errors --acl public-read --storage-class STANDARD - "$s3path"
elif [[ -d "$name" ]]; then
aws s3 cp --only-show-errors --recursive --acl public-read --storage-class STANDARD "$name" "$s3path/$name"
else
aws s3 cp --only-show-errors --acl public-read --storage-class STANDARD "$name" "$s3path/$name"
fi
}
nxf_s3_download() {
local source=$1
local target=$2
local file_name=$(basename $1)
local is_dir=$(aws s3 ls $source | grep -F "PRE ${file_name}/" -c)
if [[ $is_dir == 1 ]]; then
aws s3 cp --only-show-errors --recursive "$source" "$target"
else
aws s3 cp --only-show-errors "$source" "$target"
fi
}
'''.stripIndent(true)
}
}

0 comments on commit a964491

Please sign in to comment.