diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 5bd86c52..00000000
--- a/.travis.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-language: python
-
-cache: pip
-
-python:
- - 2.7
- - 3.5
- - 3.6
-
-install:
- - travis_retry pip install --upgrade pip
- - travis_retry pip install -r requirements.txt
- - travis_retry pip install --no-deps -r req_nodeps.txt
- - travis_retry pip install flake8==3.6.0
- - pip list --format=columns --outdated
-
-script:
- - flake8 --select F,E,W --ignore W504 --statistics shipyard.py convoy/*.py
- - if [[ $TRAVIS_PYTHON_VERSION > '3.4' ]]; then flake8 --select F,E,W --ignore W504 --statistics cascade/*.py cargo/*.py federation/*.py heimdall/*.py slurm/*.py; fi
- - shellcheck ./*.sh cargo/*.sh cascade/*.sh images/*.sh scripts/*.sh
diff --git a/.vsts/pipeline-replicate-singularity.yml b/.vsts/pipeline-replicate-singularity.yml
new file mode 100644
index 00000000..740ce3f2
--- /dev/null
+++ b/.vsts/pipeline-replicate-singularity.yml
@@ -0,0 +1,10 @@
+name: $(SourceBranch)$(Rev:.r)
+
+jobs:
+ - job: ReplicateSingularityImages
+ pool:
+ vmImage: ubuntu-20.04
+ steps:
+ - template: replicate-singularity.yml
+ parameters:
+ enabled: True
diff --git a/.vsts/pipeline.yml b/.vsts/pipeline.yml
index e2c959b9..df9bc163 100644
--- a/.vsts/pipeline.yml
+++ b/.vsts/pipeline.yml
@@ -1,5 +1,12 @@
name: $(SourceBranch)$(Rev:.r)
+variables:
+ VENV_VERSION: '20.4.3'
+ PY36_VER: '3.6.13'
+ PY37_VER: '3.7.10'
+ PY38_VER: '3.8.9'
+ PY39_VER: '3.9.4'
+
jobs:
- job: ComponentGovernance
pool:
@@ -11,16 +18,17 @@ jobs:
steps:
- task: ComponentGovernanceComponentDetection@0
displayName: 'CG Component Detection'
+
- job: Windows
pool:
vmImage: vs2017-win2016
strategy:
- maxParallel: 1
+ maxParallel: 0
matrix:
- Python37:
- python.version: '3.7'
- PYENV_VERSION: '3.7.5'
- VENV_VERSION: '16.2.0'
+ Python39:
+ python.version: '3.9'
+ PYENV_VERSION: $(PY39_VER)
+ PYTOX_ENV: 'py39'
steps:
- task: InstallPython@1
inputs:
@@ -30,14 +38,13 @@ jobs:
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools wheel
python -m pip install -r requirements.txt
- python -m pip install --no-deps -r req_nodeps.txt
- python -m pip install flake8==3.6.0
+ python -m pip install flake8
python -m pip list --format=columns --outdated
displayName: Fetch Dependencies
- powershell: |
- flake8 --select F,E,W --ignore W504 --statistics shipyard.py convoy\*.py
- if ($env:PYTHON_VERSION -eq "3.7") {
- flake8 --select F,E,W --ignore W504 --statistics cascade\*.py cargo\*.py federation\*.py heimdall\*.py slurm\*.py
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics shipyard.py convoy\*.py
+ if ($env:PYTHON_VERSION -eq $env:PY39_VER) {
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics cascade\*.py cargo\*.py federation\*.py heimdall\*.py slurm\*.py
}
displayName: Static Analysis
- powershell: |
@@ -50,10 +57,12 @@ jobs:
echo "##vso[task.setvariable variable=BUILDVER_TUPLE;]$buildverTuple"
echo "##vso[task.setvariable variable=ARTIFACT_CLI;]$artifactCli"
echo "##vso[task.setvariable variable=ARTIFACT_UPLOAD_PATH;]$artifactUploadPath"
+ $nugetPkgVer = "0.0." + $env:BUILD_BUILDID
+ echo "##vso[task.setvariable variable=NUGET_PACKAGE_VERSION;]$nugetPkgVer"
displayName: Pre-build Environment (Branch)
condition: >
and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Manual'),
- in(variables['Build.SourceBranchName'], 'master', 'develop'), eq(variables['python.version'], '3.7'))
+ in(variables['Build.SourceBranchName'], 'master', 'develop'), eq(variables['python.version'], '3.9'))
- powershell: |
git tag -l --points-at $env:BUILD_SOURCEVERSION | Tee-Object -Variable gitTag
$gitTag -match "^([\d\.])+"
@@ -77,7 +86,7 @@ jobs:
displayName: Pre-build Environment (Tagged Release)
condition: >
and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Manual'),
- startsWith(variables['Build.SourceBranch'], 'refs/tags/'), eq(variables['python.version'], '3.7'))
+ startsWith(variables['Build.SourceBranch'], 'refs/tags/'), eq(variables['python.version'], '3.9'))
- powershell: |
$artifactCliPath = "bin\\" + $env:ARTIFACT_CLI
echo "##vso[task.setvariable variable=ARTIFACT_CLI_PATH;]$artifactCliPath"
@@ -105,18 +114,25 @@ jobs:
pip -V
pip install --no-cache-dir pyinstaller
pip install --upgrade -r requirements.txt
- pip install --upgrade --no-deps -r req_nodeps.txt
pyinstaller -F -n "%ARTIFACT_CLI%" -p batch-shipyard --add-data federation\\docker-compose.yml;federation --add-data heimdall;heimdall --add-data schemas;schemas --add-data scripts;scripts --exclude-module future.tests --exclude-module future.backports.test --exclude-module future.moves.test --icon images\\docker\\windows\\azure.ico --version-file images\\docker\\windows\\file_version_info.txt --distpath bin shipyard.py
- blobxfer upload --storage-account "$(blobxfer.storageAccount)" --storage-account-key "$(blobxfer.storageAccountKey)" --remote-path "%ARTIFACT_UPLOAD_PATH%" --local-path "%ARTIFACT_CLI_PATH%" --strip-components 1 --file-md5 --overwrite
call pyi\\Scripts\\deactivate.bat
displayName: Build
- condition: and(succeeded(), ne(variables['ARTIFACT_CLI'], ''))
+ condition: and(succeeded(), ne(variables['ARTIFACT_CLI_PATH'], ''))
+ - template: ./sign-exec.yml
+ parameters:
+ enabled: ne(variables['ARTIFACT_CLI_PATH'], '')
+ - script: |
+ blobxfer upload --storage-account "$(blobxfer.storageAccount)" --storage-account-key "$(blobxfer.storageAccountKey)" --remote-path "%ARTIFACT_UPLOAD_PATH%" --local-path "%ARTIFACT_CLI_PATH%" --strip-components 1 --file-md5 --overwrite
+ displayName: CLI Artifact Upload
+ condition: and(succeeded(), ne(variables['ARTIFACT_CLI_PATH'], ''))
- powershell: |
Invoke-WebRequest "https://dist.nuget.org/win-x86-commandline/latest/nuget.exe" -OutFile "nuget.exe"
$env:GIT_TAG | Out-File site-extension\\version.txt -Force -NoNewline -Encoding ASCII
Get-Content -Path site-extension\\version.txt
$nugetArtifact = 'BatchShipyard.' + $env:NUGET_PACKAGE_VERSION + '.nupkg'
+ $nugetArtifactPath = "bin\\" + $nugetArtifact
echo "##vso[task.setvariable variable=NUGET_ARTIFACT;]$nugetArtifact"
+ echo "##vso[task.setvariable variable=NUGET_ARTIFACT_PATH;]$nugetArtifactPath"
.\\nuget.exe pack site-extension\\BatchShipyard.nuspec -Version $env:NUGET_PACKAGE_VERSION
[Reflection.Assembly]::LoadWithPartialName('System.IO.Compression.FileSystem')
$zip = [IO.Compression.ZipFile]::OpenRead($nugetArtifact)
@@ -124,10 +140,17 @@ jobs:
[string]::Format("{0}: {1} -> {2}", $entry.FullName, $entry.Length, $entry.CompressedLength)
}
$zip.Dispose()
- move $nugetArtifact bin\\$nugetArtifact
+ move $nugetArtifact $nugetArtifactPath
dir bin
displayName: Nuget Pack
condition: and(succeeded(), ne(variables['NUGET_PACKAGE_VERSION'], ''))
+ - template: ./sign-nuget.yml
+ parameters:
+ enabled: ne(variables['NUGET_ARTIFACT_PATH'], '')
+ - script: |
+ blobxfer upload --storage-account "$(blobxfer.storageAccount)" --storage-account-key "$(blobxfer.storageAccountKey)" --remote-path "%ARTIFACT_UPLOAD_PATH%" --local-path "%NUGET_ARTIFACT_PATH%" --strip-components 1 --file-md5 --overwrite
+ displayName: Nuget Artifact Upload
+ condition: and(succeeded(), ne(variables['NUGET_ARTIFACT_PATH'], ''))
- powershell: |
$versionTag = "bin\\version_tag.txt"
$env:GIT_TAG | Out-File $versionTag -Force -NoNewline -Encoding ASCII
@@ -140,7 +163,7 @@ jobs:
inputs:
pathtoPublish: bin
artifactName: BatchShipyard-Windows
- condition: and(succeeded(), ne(variables['ARTIFACT_CLI'], ''))
+ condition: and(succeeded(), ne(variables['ARTIFACT_CLI_PATH'], ''))
- powershell: |
docker version
docker login "$(docker.servername)" -u="$(docker.username)" -p="$(docker.password)"
@@ -156,17 +179,30 @@ jobs:
popd
displayName: Docker build
condition: and(succeeded(), ne(variables['DOCKER_TAG'], ''))
+ enabled: false
- job: Linux
pool:
- vmImage: ubuntu-16.04
+ vmImage: ubuntu-20.04
strategy:
- maxParallel: 1
+ maxParallel: 0
matrix:
+ Python36:
+ python.version: '3.6'
+ PYENV_VERSION: $(PY36_VER)
+ PYTOX_ENV: 'py36'
Python37:
python.version: '3.7'
- PYENV_VERSION: '3.7.5'
- VENV_VERSION: '16.2.0'
+ PYENV_VERSION: $(PY37_VER)
+ PYTOX_ENV: 'py37'
+ Python38:
+ python.version: '3.8'
+ PYENV_VERSION: $(PY38_VER)
+ PYTOX_ENV: 'py38'
+ Python39:
+ python.version: '3.9'
+ PYENV_VERSION: $(PY39_VER)
+ PYTOX_ENV: 'py39'
steps:
- task: UsePythonVersion@0
inputs:
@@ -176,19 +212,12 @@ jobs:
set -e
set -o pipefail
env
- curl -fSsL "https://storage.googleapis.com/shellcheck/shellcheck-stable.linux.x86_64.tar.xz" | tar -xJvp -C /tmp/
+ curl -fSsL "https://github.com/koalaman/shellcheck/releases/download/stable/shellcheck-stable.linux.x86_64.tar.xz" | tar -xJvp -C /tmp/
echo "##vso[task.prependpath]/tmp/shellcheck-stable"
- if [[ "$PYENV_VERSION" == 3.* ]]; then
- which python3
- python3 --version
- echo "##vso[task.setvariable variable=PYTHON;]python3"
- echo "##vso[task.setvariable variable=PIP;]pip3"
- else
- which python
- python --version
- echo "##vso[task.setvariable variable=PYTHON;]python"
- echo "##vso[task.setvariable variable=PIP;]pip"
- fi
+ which python3
+ python3 --version
+ echo "##vso[task.setvariable variable=PYTHON;]python3"
+ echo "##vso[task.setvariable variable=PIP;]pip3"
displayName: Initialize Build
- script: |
set -e
@@ -199,16 +228,15 @@ jobs:
$PYTHON -m pip install --upgrade pip
$PIP install --upgrade setuptools wheel
$PIP install -r requirements.txt
- $PIP install --no-deps -r req_nodeps.txt
- $PIP install flake8==3.6.0
+ $PIP install flake8
$PIP list --format=columns --outdated
displayName: Fetch Dependencies
- script: |
set -e
set -o pipefail
- flake8 --select F,E,W --ignore W504 --statistics shipyard.py convoy/*.py
- if [[ "$PYENV_VERSION" == 3.7* ]]; then
- flake8 --select F,E,W --ignore W504 --statistics cascade/*.py cargo/*.py federation/*.py heimdall/*.py slurm/*.py
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics shipyard.py convoy/*.py
+ if [[ "$PYENV_VERSION" == "$PY39_VER" ]]; then
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics cascade/*.py cargo/*.py federation/*.py heimdall/*.py slurm/*.py
fi
shellcheck ./*.sh cargo/*.sh cascade/*.sh images/*.sh scripts/*.sh
displayName: Static Analysis
@@ -228,7 +256,7 @@ jobs:
displayName: Pre-build Environment (Branch)
condition: >
and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Manual'),
- in(variables['Build.SourceBranchName'], 'master', 'develop'), eq(variables['python.version'], '3.7'))
+ in(variables['Build.SourceBranchName'], 'master', 'develop'), eq(variables['python.version'], '3.9'))
- script: |
GIT_TAG=$(git tag -l --points-at $BUILD_SOURCEVERSION)
ARTIFACT_CLI="batch-shipyard-${GIT_TAG}-cli-linux-x86_64"
@@ -240,41 +268,10 @@ jobs:
displayName: Pre-build Environment (Tagged Release)
condition: >
and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Manual'),
- startsWith(variables['Build.SourceBranch'], 'refs/tags/'), eq(variables['python.version'], '3.7'))
- - script: |
- set -e
- set -o pipefail
- docker version
- docker login "$(docker.servername)" -u="$(docker.username)" -p="$(docker.password)"
- export DOCKER_CLI_EXPERIMENTAL=enabled
- singularity_version=$(grep -m1 _SINGULARITY_VERSION convoy/misc.py | cut -d "'" -f 2)
- echo "Replicating Singularity version $singularity_version images to MCR"
- chkImage=mcr.microsoft.com/azure-batch/shipyard:${singularity_version}-singularity-mnt
- set +e
- if docker manifest inspect "$chkImage"; then
- echo "$chkImage exists, skipping replication"
- else
- set -e
- dhImage="alfpark/singularity:${singularity_version}-mnt"
- mcrImage="$(docker.servername)/public/azure-batch/shipyard:${singularity_version}-singularity-mnt"
- docker pull "$dhImage"
- docker tag "$dhImage" "$mcrImage"
- docker push "$mcrImage"
- fi
- chkImage=mcr.microsoft.com/azure-batch/shipyard:${singularity_version}-singularity-mnt-resource
- set +e
- if docker manifest inspect "$chkImage"; then
- echo "$chkImage exists, skipping replication"
- else
- set -e
- dhImage="alfpark/singularity:${singularity_version}-mnt-resource"
- mcrImage="$(docker.servername)/public/azure-batch/shipyard:${singularity_version}-singularity-mnt-resource"
- docker pull "$dhImage"
- docker tag "$dhImage" "$mcrImage"
- docker push "$mcrImage"
- fi
- displayName: Replicate Singularity Container Images
- condition: and(succeeded(), ne(variables['ARTIFACT_CLI'], ''))
+ startsWith(variables['Build.SourceBranch'], 'refs/tags/'), eq(variables['python.version'], '3.9'))
+ - template: replicate-singularity.yml
+ parameters:
+ enabled: ne(variables['ARTIFACT_CLI'], '')
- template: ./pyenv.yml
- script: |
set -e
@@ -291,7 +288,6 @@ jobs:
"set -e; source pyi/bin/activate; \
pip install pyinstaller; \
pip install --upgrade -r requirements.txt; \
- pip install --upgrade --no-deps -r req_nodeps.txt; \
pyinstaller -F -n ${ARTIFACT_CLI} -p batch-shipyard --add-data federation/docker-compose.yml:federation --add-data heimdall:heimdall --add-data schemas:schemas --add-data scripts:scripts --exclude-module future.tests --exclude-module future.backports.test --exclude-module future.moves.test --distpath bin shipyard.py; \
deactivate"
chmod +x ${ARTIFACT_CLI_PATH}
@@ -349,17 +345,19 @@ jobs:
popd
displayName: Docker Build
condition: and(succeeded(), ne(variables['DOCKER_TAG'], ''))
+ enabled: false
- job: MacOS
+ condition: false
pool:
- vmImage: macOS-10.13
+ vmImage: macOS-10.15
strategy:
- maxParallel: 1
+ maxParallel: 0
matrix:
- Python37:
- python.version: '3.7'
- PYENV_VERSION: '3.7.5'
- VENV_VERSION: '16.2.0'
+ Python39:
+ python.version: '3.9'
+ PYENV_VERSION: $(PY39_VER)
+ PYTOX_ENV: 'py39'
steps:
- task: UsePythonVersion@0
inputs:
@@ -369,17 +367,10 @@ jobs:
set -e
set -o pipefail
env
- if [[ "$PYENV_VERSION" == 3.* ]]; then
- which python3
- python3 --version
- echo "##vso[task.setvariable variable=PYTHON;]python3"
- echo "##vso[task.setvariable variable=PIP;]pip3"
- else
- which python
- python --version
- echo "##vso[task.setvariable variable=PYTHON;]python"
- echo "##vso[task.setvariable variable=PIP;]pip"
- fi
+ which python3
+ python3 --version
+ echo "##vso[task.setvariable variable=PYTHON;]python3"
+ echo "##vso[task.setvariable variable=PIP;]pip3"
displayName: Initialize Build
- script: |
set -e
@@ -389,16 +380,15 @@ jobs:
$PYTHON -m pip install --upgrade pip
$PIP install --upgrade setuptools wheel
$PIP install -r requirements.txt
- $PIP install --no-deps -r req_nodeps.txt
- $PIP install flake8==3.6.0
+ $PIP install flake8
$PIP list --format=columns --outdated
displayName: Fetch Dependencies
- script: |
set -e
set -o pipefail
- flake8 --select F,E,W --ignore W504 --statistics shipyard.py convoy/*.py
- if [[ "$PYENV_VERSION" == 3.7* ]]; then
- flake8 --select F,E,W --ignore W504 --statistics cascade/*.py cargo/*.py federation/*.py heimdall/*.py slurm/*.py
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics shipyard.py convoy/*.py
+ if [[ "$PYENV_VERSION" == "$PY39_VER" ]]; then
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics cascade/*.py cargo/*.py federation/*.py heimdall/*.py slurm/*.py
fi
displayName: Static Analysis
- script: |
@@ -412,7 +402,7 @@ jobs:
displayName: Pre-build Environment (Branch)
condition: >
and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Manual'),
- in(variables['Build.SourceBranchName'], 'master', 'develop'), eq(variables['python.version'], '3.7'))
+ in(variables['Build.SourceBranchName'], 'master', 'develop'), eq(variables['python.version'], '3.9'))
- script: |
GIT_TAG=$(git tag -l --points-at $BUILD_SOURCEVERSION)
ARTIFACT_CLI="batch-shipyard-${GIT_TAG}-cli-mac-x86_64"
@@ -423,7 +413,7 @@ jobs:
displayName: Pre-build Environment (Tagged Release)
condition: >
and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Manual'),
- startsWith(variables['Build.SourceBranch'], 'refs/tags/'), eq(variables['python.version'], '3.7'))
+ startsWith(variables['Build.SourceBranch'], 'refs/tags/'), eq(variables['python.version'], '3.9'))
- template: ./pyenv.yml
- script: |
set -e
@@ -440,7 +430,6 @@ jobs:
"set -e; source pyi/bin/activate; \
pip install pyinstaller; \
pip install --upgrade -r requirements.txt; \
- pip install --upgrade --no-deps -r req_nodeps.txt; \
pyinstaller -F -n ${ARTIFACT_CLI} -p batch-shipyard --add-data federation/docker-compose.yml:federation --add-data heimdall:heimdall --add-data schemas:schemas --add-data scripts:scripts --exclude-module future.tests --exclude-module future.backports.test --exclude-module future.moves.test --distpath bin shipyard.py; \
deactivate"
chmod +x ${ARTIFACT_CLI_PATH}
diff --git a/.vsts/replicate-singularity.yml b/.vsts/replicate-singularity.yml
new file mode 100644
index 00000000..d6a15aaa
--- /dev/null
+++ b/.vsts/replicate-singularity.yml
@@ -0,0 +1,38 @@
+parameters:
+ enabled: false
+
+steps:
+ - script: |
+ set -e
+ set -o pipefail
+ docker version
+ docker login "$(docker.servername)" -u="$(docker.username)" -p="$(docker.password)"
+ export DOCKER_CLI_EXPERIMENTAL=enabled
+ singularity_version=$(grep -m1 _SINGULARITY_VERSION convoy/misc.py | cut -d "'" -f 2)
+ echo "Replicating Singularity version $singularity_version images to MCR"
+ chkImage=mcr.microsoft.com/azure-batch/shipyard:${singularity_version}-singularity-mnt
+ set +e
+ if docker manifest inspect "$chkImage"; then
+ echo "$chkImage exists, skipping replication"
+ else
+ set -e
+ dhImage="alfpark/singularity:${singularity_version}-mnt"
+ mcrImage="$(docker.servername)/public/azure-batch/shipyard:${singularity_version}-singularity-mnt"
+ docker pull "$dhImage"
+ docker tag "$dhImage" "$mcrImage"
+ docker push "$mcrImage"
+ fi
+ chkImage=mcr.microsoft.com/azure-batch/shipyard:${singularity_version}-singularity-mnt-resource
+ set +e
+ if docker manifest inspect "$chkImage"; then
+ echo "$chkImage exists, skipping replication"
+ else
+ set -e
+ dhImage="alfpark/singularity:${singularity_version}-mnt-resource"
+ mcrImage="$(docker.servername)/public/azure-batch/shipyard:${singularity_version}-singularity-mnt-resource"
+ docker pull "$dhImage"
+ docker tag "$dhImage" "$mcrImage"
+ docker push "$mcrImage"
+ fi
+ displayName: Replicate Singularity Container Images
+ condition: ${{ parameters.enabled }}
diff --git a/.vsts/sign-exec.yml b/.vsts/sign-exec.yml
new file mode 100644
index 00000000..78b4b3ab
--- /dev/null
+++ b/.vsts/sign-exec.yml
@@ -0,0 +1,39 @@
+parameters:
+ enabled: false
+ folder: ./bin
+ pattern: '*.dll,*.exe'
+
+steps:
+ - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
+ displayName: "Code Sign - Executables and Libraries"
+ condition: ${{ parameters.enabled }}
+ inputs:
+ ConnectedServiceName: 'ESRP CodeSign'
+ FolderPath: ${{ parameters.folder }}
+ Pattern: ${{ parameters.pattern }}
+ UseMinimatch: false
+ signConfigType: inlineSignParams
+ inlineOperation: |
+ [
+ {
+ "KeyCode" : "CP-230012",
+ "OperationCode" : "SigntoolSign",
+ "Parameters" : {
+ "OpusName" : "Microsoft",
+ "OpusInfo" : "http://www.microsoft.com",
+ "FileDigest" : "/fd \"SHA256\"",
+ "PageHash" : "/NPH",
+ "TimeStamp" : "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
+ },
+ "ToolName" : "sign",
+ "ToolVersion" : "1.0"
+ },
+ {
+ "KeyCode" : "CP-230012",
+ "OperationCode" : "SigntoolVerify",
+ "Parameters" : {},
+ "ToolName" : "sign",
+ "ToolVersion" : "1.0"
+ }
+ ]
+ SessionTimeout: 60
diff --git a/.vsts/sign-nuget.yml b/.vsts/sign-nuget.yml
new file mode 100644
index 00000000..40e99e1c
--- /dev/null
+++ b/.vsts/sign-nuget.yml
@@ -0,0 +1,33 @@
+parameters:
+ enabled: false
+ folder: ./bin
+ pattern: '*.nupkg'
+
+steps:
+ - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
+ displayName: "Code Sign - Nuget Packages"
+ condition: ${{ parameters.enabled }}
+ inputs:
+ ConnectedServiceName: 'ESRP CodeSign'
+ FolderPath: ${{ parameters.folder }}
+ Pattern: ${{ parameters.pattern }}
+ UseMinimatch: false
+ signConfigType: inlineSignParams
+ inlineOperation: |
+ [
+ {
+ "KeyCode" : "CP-401405",
+ "OperationCode" : "NuGetSign",
+ "Parameters" : {},
+ "ToolName" : "sign",
+ "ToolVersion" : "1.0"
+ },
+ {
+ "KeyCode" : "CP-401405",
+ "OperationCode" : "NuGetVerify",
+ "Parameters" : {},
+ "ToolName" : "sign",
+ "ToolVersion" : "1.0"
+ }
+ ]
+ SessionTimeout: 60
diff --git a/README.md b/README.md
index 097484af..1f2c42a8 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,4 @@
[![Build Status](https://azurebatch.visualstudio.com/batch-shipyard/_apis/build/status/batch-shipyard-CI)](https://azurebatch.visualstudio.com/batch-shipyard/_build/latest?definitionId=11)
-[![Build Status](https://travis-ci.org/Azure/batch-shipyard.svg?branch=master)](https://travis-ci.org/Azure/batch-shipyard)
[![Build status](https://ci.appveyor.com/api/projects/status/3a0j0gww57o6nkpw/branch/master?svg=true)](https://ci.appveyor.com/project/alfpark/batch-shipyard)
# Batch Shipyard
diff --git a/THIRD_PARTY_NOTICES.txt b/THIRD_PARTY_NOTICES.txt
index cb75221a..b9d2ed8b 100644
--- a/THIRD_PARTY_NOTICES.txt
+++ b/THIRD_PARTY_NOTICES.txt
@@ -371,138 +371,6 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-------------------------------------------------------------------------------
-future (https://github.com/PythonCharmers/python-future)
-
-Copyright (c) 2013-2018 Python Charmers Pty Ltd, Australia
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
--------------------------------------------------------------------------------
-
-libtorrent (https://github.com/arvidn/libtorrent)
-
-Copyright (c) 2003-2016, Arvid Norberg
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
- * Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in
- the documentation and/or other materials provided with the distribution.
- * Neither the name of the author nor the names of its
- contributors may be used to endorse or promote products derived
- from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
-
-------------------------------------------------------------------------------
-
-puff.c
-Copyright (C) 2002, 2003 Mark Adler
-For conditions of distribution and use, see copyright notice in puff.h
-version 1.7, 3 Mar 2003
-
-puff.c is a simple inflate written to be an unambiguous way to specify the
-deflate format. It is not written for speed but rather simplicity. As a
-side benefit, this code might actually be useful when small code is more
-important than speed, such as bootstrap applications. For typical deflate
-data, zlib's inflate() is about four times as fast as puff(). zlib's
-inflate compiles to around 20K on my machine, whereas puff.c compiles to
-around 4K on my machine (a PowerPC using GNU cc). If the faster decode()
-function here is used, then puff() is only twice as slow as zlib's
-inflate().
-
-All dynamically allocated memory comes from the stack. The stack required
-is less than 2K bytes. This code is compatible with 16-bit int's and
-assumes that long's are at least 32 bits. puff.c uses the short data type,
-assumed to be 16 bits, for arrays in order to to conserve memory. The code
-works whether integers are stored big endian or little endian.
-
-In the comments below are "Format notes" that describe the inflate process
-and document some of the less obvious aspects of the format. This source
-code is meant to supplement RFC 1951, which formally describes the deflate
-format:
-
- http://www.zlib.org/rfc-deflate.html
-
-------------------------------------------------------------------------------
-
-GeoIP.c
-
-Copyright (C) 2006 MaxMind LLC
-
-This library is free software; you can redistribute it and/or
-modify it under the terms of the GNU Lesser General Public
-License as published by the Free Software Foundation; either
-version 2.1 of the License, or (at your option) any later version.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-Lesser General Public License for more details.
-
-You should have received a copy of the GNU Lesser General Public
-License along with this library; if not, write to the Free Software
-Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
-
-------------------------------------------------------------------------------
-
-Boost Software License - Version 1.0 - August 17th, 2003
-
-Permission is hereby granted, free of charge, to any person or organization
-obtaining a copy of the software and accompanying documentation covered by
-this license (the "Software") to use, reproduce, display, distribute,
-execute, and transmit the Software, and to prepare derivative works of the
-Software, and to permit third-parties to whom the Software is furnished to
-do so, all subject to the following:
-
-The copyright notices in the Software and this entire statement, including
-the above license grant, this restriction and the following disclaimer,
-must be included in all copies of the Software, in whole or in part, and
-all derivative works of the Software, unless such copies or derivative
-works are solely in the form of machine-executable object code generated by
-a source language processor.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
-SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
-FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
-ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
-
--------------------------------------------------------------------------------
-
msrest (https://github.com/Azure/msrest-for-python)
MIT License
@@ -645,6 +513,17 @@ direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
@@ -659,7 +538,7 @@ analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation;
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021 Python Software Foundation;
All Rights Reserved" are retained in Python alone or in any derivative version
prepared by Licensee.
@@ -839,6 +718,20 @@ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
-------------------------------------------------------------------------------
python-dateutil (https://github.com/dateutil/dateutil)
@@ -902,27 +795,189 @@ The above BSD License Applies to all code, even that also covered by Apache 2.0.
requests (https://github.com/requests/requests)
-Copyright 2018 Kenneth Reitz
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
- https://www.apache.org/licenses/LICENSE-2.0
+ 1. Definitions.
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
-------------------------------------------------------------------------------
-ruamel.yaml (https://bitbucket.org/ruamel/yaml)
+ruamel.yaml (https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree/)
The MIT License (MIT)
- Copyright (c) 2014-2019 Anthon van der Neut, Ruamel bvba
+ Copyright (c) 2014-2021 Anthon van der Neut, Ruamel bvba
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -944,7 +999,7 @@ ruamel.yaml (https://bitbucket.org/ruamel/yaml)
-------------------------------------------------------------------------------
-singularity (https://github.com/sylabs/singularity)
+singularity (https://github.com/hpcng/singularity)
Copyright (c) 2015-2017, Gregory M. Kurtzer. All rights reserved.
Copyright (c) 2016-2017, The Regents of the University of California. All right reserved.
diff --git a/appveyor.yml b/appveyor.yml
index 13acff8c..7253d750 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -7,35 +7,41 @@ cache:
environment:
matrix:
- - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- PYTHON: "C:\\Python27-x64"
- PYTHON_VERSION: "2.7"
+ - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
+ PYTHON: "C:\\Python37-x64"
+ PYTHON_VERSION: "3.7"
PYTHON_ARCH: "64"
- - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- PYTHON: "C:\\Python35-x64"
- PYTHON_VERSION: "3.5"
+ TOX_ENV: "py37"
+ - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
+ PYTHON: "C:\\Python38-x64"
+ PYTHON_VERSION: "3.8"
PYTHON_ARCH: "64"
- - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
- PYTHON: "C:\\Python36-x64"
- PYTHON_VERSION: "3.6"
+ TOX_ENV: "py38"
+ - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
+ PYTHON: "C:\\Python39-x64"
+ PYTHON_VERSION: "3.9"
PYTHON_ARCH: "64"
+ TOX_ENV: "py39"
init:
- echo %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%
install:
+#- curl -fsSL -o rustup-init.exe https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe
+#- rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain stable
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
+#- where rustc
+- python -m pip install -U pip
- where pip
- pip install --upgrade setuptools wheel
- pip install -r requirements.txt
-- pip install --no-deps -r req_nodeps.txt
-- pip install flake8==3.6.0
+- pip install flake8
- pip list --format=columns --outdated
build: off
test_script:
-- flake8 --select F,E,W --ignore W504 --statistics shipyard.py convoy\\*.py
-- IF "%PYTHON_VERSION%" GEQ "3.5" (
- flake8 --select F,E,W --ignore W504 --statistics cascade\\*.py cargo\\*.py federation\\*.py heimdall\\*.py slurm\\*.py
+- flake8 --exit-zero --select F,E,W --ignore W504 --statistics shipyard.py convoy
+- IF "%PYTHON_VERSION%" GEQ "3.9" (
+ flake8 --exit-zero --select F,E,W --ignore W504 --statistics cascade cargo federation heimdall slurm
)
diff --git a/cargo/requirements.txt b/cargo/requirements.txt
index 2ee54c36..64580b4a 100644
--- a/cargo/requirements.txt
+++ b/cargo/requirements.txt
@@ -1,4 +1,4 @@
-azure-batch==8.0.0
-msrest==0.6.10
-requests==2.22.0
-ruamel.yaml==0.16.5
+azure-batch==9.0.0
+msrest==0.6.21
+requests>=2.26.0,<3
+ruamel.yaml>=0.17.16,<1
diff --git a/cascade/requirements.txt b/cascade/requirements.txt
index a6418bf0..8c0e62ce 100644
--- a/cascade/requirements.txt
+++ b/cascade/requirements.txt
@@ -1,2 +1,2 @@
azure-cosmosdb-table==1.0.6
-azure-storage-blob==2.1.0
+azure-storage-blob>=2.1.0,<3
diff --git a/convoy/aad.py b/convoy/aad.py
index 9ac93cda..f33cb48b 100644
--- a/convoy/aad.py
+++ b/convoy/aad.py
@@ -22,22 +22,11 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import datetime
-import io
import json
import logging
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import os
# non-stdlib imports
import adal
@@ -149,20 +138,10 @@ def signed_session(self):
if cache_token and util.is_not_empty(self._token_cache_file):
logger.debug('storing token to local cache: {}'.format(
self._token_cache_file))
- if util.on_python2():
- with io.open(
- self._token_cache_file,
- 'w', encoding='utf8') as fd:
- fd.write(json.dumps(
- self._token, indent=4, sort_keys=True,
- ensure_ascii=False))
- else:
- with open(
- self._token_cache_file,
- 'w', encoding='utf8') as fd:
- json.dump(
- self._token, fd, indent=4, sort_keys=True,
- ensure_ascii=False)
+ with open(self._token_cache_file, 'w', encoding='utf8') as fd:
+ json.dump(
+ self._token, fd, indent=4, sort_keys=True,
+ ensure_ascii=False)
if not util.on_windows():
os.chmod(self._token_cache_file, 0o600)
except adal.AdalError as err:
diff --git a/convoy/autoscale.py b/convoy/autoscale.py
index f01a0898..f4795d5a 100644
--- a/convoy/autoscale.py
+++ b/convoy/autoscale.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import collections
# non-stdlib imports
diff --git a/convoy/batch.py b/convoy/batch.py
index b349c9ad..7a0a9922 100644
--- a/convoy/batch.py
+++ b/convoy/batch.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import codecs
import collections
@@ -40,10 +33,7 @@
import logging
import multiprocessing
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import ssl
import sys
import time
@@ -299,6 +289,9 @@ def list_supported_images(
image.image_reference.publisher.lower() not in
settings.get_valid_publishers()):
continue
+ if (not show_unrelated and
+ not settings.check_for_container_capability(image)):
+ continue
if image.image_reference.publisher not in image_map[os_type]:
image_map[os_type][image.image_reference.publisher] = {}
if (image.image_reference.offer not in
@@ -660,7 +653,8 @@ def _block_for_nodes_ready(
errors = []
for err in pool.resize_errors:
errors.append('{}: {}'.format(err.code, err.message))
- if (err.code == 'AccountCoreQuotaReached' or
+ if (err.code == 'AllocationFailed' or
+ err.code == 'AccountCoreQuotaReached' or
(err.code == 'AccountLowPriorityCoreQuotaReached' and
pool.target_dedicated_nodes == 0) or
(err.code == 'AllocationTimedout' and
@@ -669,6 +663,12 @@ def _block_for_nodes_ready(
pool.allocation_state ==
batchmodels.AllocationState.steady)):
fatal_resize_error = True
+ if util.is_not_empty(pool.resize_errors):
+ for err in pool.resize_errors:
+ if util.is_not_empty(err.values):
+ for de in err.values:
+ errors.append('{}: {}'.format(
+ de.name, de.value))
if fatal_resize_error:
pool_stats(batch_client, config, pool_id=pool_id)
raise RuntimeError(
@@ -1286,7 +1286,8 @@ def list_pools(batch_client, config):
errors.append(' * {}: {}'.format(err.code, err.message))
if util.is_not_empty(err.values):
for de in err.values:
- de.append(' * {}: {}'.format(de.name, de.value))
+ errors.append(' * {}: {}'.format(
+ de.name, de.value))
else:
errors = [' * no resize errors']
entry = [
@@ -3110,10 +3111,7 @@ def get_remote_login_settings(
pool_id, node.id)
for node_id in futures:
ret[node_id] = futures[node_id].result()
- if util.on_python2():
- ret = collections.OrderedDict(sorted(ret.iteritems()))
- else:
- ret = collections.OrderedDict(sorted(ret.items()))
+ ret = collections.OrderedDict(sorted(ret.items()))
if not suppress_output:
for node_id in ret:
logger.info('node {}: ip {} port {}'.format(
@@ -5239,7 +5237,7 @@ def add_jobs(
# catalog multi instance tasks for cleanup
if settings.is_multi_instance_task(task):
has_multi_instance = True
- if not native:
+ if not native and util.is_not_empty(di):
mi_docker_container_names.add(
util.normalize_docker_image_name_for_job(job_id, di))
del di
@@ -5569,6 +5567,9 @@ def add_jobs(
if ('The specified job is already in a completed state.' in
ex.message.value):
if recreate:
+ logger.debug(
+ 'detected completed job {} with recreate '
+ 'flag'.format(job_id))
# get job state
_job = batch_client.job.get(job_id)
if _job.state == batchmodels.JobState.completed:
@@ -5739,12 +5740,34 @@ def add_jobs(
if util.is_none_or_empty(federation_id):
logger.info('Adding jobschedule {} to pool {}'.format(
job_id, pool.id))
+ _del_js_rf = False
try:
batch_client.job_schedule.add(jobschedule)
- except Exception:
+ except Exception as ex:
+ _del_js_rf = True
+ if ('The specified job schedule is already in '
+ 'completed state.' in ex.message.value):
+ if recreate:
+ logger.debug(
+ 'detected completed job schedule {} with '
+ 'recreate flag'.format(job_id))
+ # get job schedule state
+ _js = batch_client.job_schedule.get(job_id)
+ if (_js.state ==
+ batchmodels.JobScheduleState.completed):
+ delete_or_terminate_jobs(
+ batch_client, config, True,
+ jobscheduleid=job_id, wait=True)
+ time.sleep(1)
+ batch_client.job_schedule.add(jobschedule)
+ _del_js_rf = False
+ else:
+ raise
+ finally:
# delete uploaded task map
- storage.delete_resource_file(blob_client, taskmaploc)
- raise
+ if _del_js_rf:
+ storage.delete_resource_file(blob_client, taskmaploc)
+ del _del_js_rf
else:
if storage.check_if_job_exists_in_federation(
table_client, federation_id, jobschedule.id):
diff --git a/convoy/clients.py b/convoy/clients.py
index 26d4c499..a958725e 100644
--- a/convoy/clients.py
+++ b/convoy/clients.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import logging
# non-stdlib imports
diff --git a/convoy/crypto.py b/convoy/crypto.py
index 0c942e59..e4b4a38d 100644
--- a/convoy/crypto.py
+++ b/convoy/crypto.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import base64
import collections
@@ -36,10 +29,7 @@
import getpass
import logging
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import tempfile
import stat
import subprocess
diff --git a/convoy/data.py b/convoy/data.py
index 82b65c00..c914834d 100644
--- a/convoy/data.py
+++ b/convoy/data.py
@@ -22,27 +22,14 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import datetime
import fnmatch
import logging
import math
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
-try:
- from shlex import quote as shellquote
-except ImportError:
- from pipes import quote as shellquote
+import pathlib
+from shlex import quote as shellquote
import threading
import time
# non-stdlib imports
@@ -59,7 +46,7 @@
logger = logging.getLogger(__name__)
util.setup_logger(logger)
# global defines
-_BLOBXFER_VERSION = '1.9.4'
+_BLOBXFER_VERSION = '1.11.0'
_MEGABYTE = 1048576
_MAX_READ_BLOCKSIZE_BYTES = 4194304
_FILE_SPLIT_PREFIX = '_shipyard-'
diff --git a/convoy/federation.py b/convoy/federation.py
index 9ce8a698..d61c305b 100644
--- a/convoy/federation.py
+++ b/convoy/federation.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import functools
import logging
diff --git a/convoy/fleet.py b/convoy/fleet.py
index 2fd4e4f1..180e49d4 100644
--- a/convoy/fleet.py
+++ b/convoy/fleet.py
@@ -22,21 +22,11 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import concurrent.futures
import logging
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import requests
import sys
import tempfile
@@ -69,36 +59,26 @@
_ROOT_PATH = pathlib.Path(__file__).resolve().parent.parent
_RESOURCES_PATH = None
_NVIDIA_DRIVER = {
- 'compute_cc37': {
- 'url': (
- 'http://us.download.nvidia.com/tesla/'
- '418.87/NVIDIA-Linux-x86_64-418.87.01.run'
- ),
- 'sha256': (
- 'fe6dcba384d67e906fad3cdc707fb6b0993cf190fc694660b70224d49a69144f'
- ),
- 'target': 'nvidia-driver_cc37.run'
- },
- 'compute_cc6-7': {
+ 'compute': {
'url': (
- 'http://us.download.nvidia.com/tesla/'
- '418.87/NVIDIA-Linux-x86_64-418.87.01.run'
+ 'https://us.download.nvidia.com/tesla/'
+ '470.57.02/NVIDIA-Linux-x86_64-470.57.02.run'
),
'sha256': (
- 'fe6dcba384d67e906fad3cdc707fb6b0993cf190fc694660b70224d49a69144f'
+ '55d7ae104827faa79e975321fe2b60f9dd42fbff65642053443c0e56fdb4c47d'
),
- 'target': 'nvidia-driver_cc6-7.run'
+ 'target': 'nvidia-driver-compute.run'
},
- 'viz_cc52': {
+ 'viz': {
# https://aka.ms/nvgrid-linux
# https://go.microsoft.com/fwlink/?linkid=874272
'url': (
- 'https://download.microsoft.com/download/1/a/5/'
- '1a537cae-5b52-4348-acd2-2f210fc412b0/'
- 'NVIDIA-Linux-x86_64-430.46-grid.run'
+ 'https://download.microsoft.com/download/b/d/d/'
+ 'bdd729ee-5003-4427-ace4-a7b9172b2e29/'
+ 'NVIDIA-Linux-x86_64-470.63.01-grid-azure.run'
),
'sha256': (
- 'a06b13c36c8c203babf7e681f1b83e7d7967c76e4bdb71ba4a2cd80dad502bfd'
+ '71852dc93f8c28b289db8b6ac8d41fae6139d63bb16b4f4afc91dc8b2418cdd6'
),
'target': 'nvidia-driver-grid.run'
},
@@ -125,10 +105,10 @@
'url': (
'http://download.microsoft.com/download/6/8/F/'
'68FE11B8-FAA4-4F8D-8C7D-74DA7F2CFC8C/'
- 'lis-rpms-4.3.4.x86_64.tar.gz'
+ 'lis-rpms-4.3.5.x86_64.tar.gz'
),
'sha256': (
- '5317176c536c6f013b5090c20e1a6045df155ac927afbcd1fb5059e49d19bc2b'
+ '2fcfd68473fc3a46c97688acf6702a966561d02ec7b08c6b9ca668d54c83f20d'
),
'target': 'lis.tar.gz',
'intermediate': 'lis_compact.tar',
@@ -450,7 +430,8 @@ def _download_file(desc, pkg, dldict):
:param dict dldict: download dict
"""
logger.debug('downloading {} to {}'.format(desc, dldict['target']))
- response = requests.get(dldict['url'], stream=True)
+ response = requests.get(
+ dldict['url'], stream=True, headers={'User-Agent': 'Mozilla/5.0'})
with pkg.open('wb') as f:
for chunk in response.iter_content(chunk_size=_REQUEST_CHUNK_SIZE):
if chunk:
@@ -499,8 +480,8 @@ def _setup_intel_mpi_rt_package(config, pool_settings):
:rtype: pathlib.Path
:return: package path
"""
- # only for native ubuntu rdma
- if (not settings.is_rdma_pool(pool_settings.vm_size) or
+ # only for native ubuntu networkdirect rdma
+ if (not settings.is_networkdirect_rdma_pool(pool_settings.vm_size) or
not pool_settings.vm_configuration.offer ==
'ubuntu-server-container-rdma'):
return None
@@ -2655,42 +2636,36 @@ def _adjust_settings_for_pool_creation(config):
allowed = False
shipyard_container_required = True
if publisher == 'microsoft-azure-batch':
- if offer == 'centos-container':
- allowed = True
- elif offer == 'centos-container-rdma':
- allowed = True
- elif offer == 'ubuntu-server-container':
- allowed = True
- elif offer == 'ubuntu-server-container-rdma':
+ if (offer == 'centos-container' or
+ offer == 'centos-container-rdma' or
+ offer == 'ubuntu-server-container' or
+ offer == 'ubuntu-server-container-rdma'):
allowed = True
elif publisher == 'canonical':
if offer == 'ubuntuserver':
- if sku == '16.04-lts':
+ if sku == '18.04-lts':
allowed = True
shipyard_container_required = False
- elif sku == '18.04-lts':
+ elif offer == '0001-com-ubuntu-server-focal':
+ if sku == '20_04-lts':
allowed = True
shipyard_container_required = False
- elif publisher == 'credativ':
- if offer == 'debian':
- if sku >= '9':
+ elif publisher == 'debian':
+ if offer == 'debian-10':
+ if sku >= '10':
allowed = True
elif publisher == 'openlogic':
if offer.startswith('centos'):
- if sku >= '7':
+ if sku >= '7' and sku < '8':
allowed = True
elif publisher == 'microsoftwindowsserver':
if offer == 'windowsserver':
- if (sku == '2016-datacenter-with-containers' or
- sku == '2019-datacenter-with-containers' or
- sku == '2019-datacenter-with-containers-smalldisk' or
- sku == '2019-datacenter-core-with-containers' or
- sku == '2019-datacenter-core-with-containers-smalldisk'):
- allowed = True
- elif offer == 'windowsserversemiannual':
- if (sku == 'datacenter-core-1709-with-containers-smalldisk' or
- sku == 'datacenter-core-1803-with-containers-smalldisk' or
- sku == 'datacenter-core-1809-with-containers-smalldisk'):
+ if (sku.startswith('2016-datacenter-with-containers') or
+ sku.startswith('2019-datacenter-with-containers') or
+ sku.startswith('2022-datacenter-with-containers') or
+ sku.startswith('2019-datacenter-core-with-containers') or
+ sku.startswith('2022-datacenter-core-with-containers') or
+ sku.startswith('datacenter-core-20h2-with-containers')):
allowed = True
if (util.is_not_empty(node_agent) and
node_agent.lower().startswith('batch.node.ubuntu')):
@@ -2888,7 +2863,9 @@ def _adjust_settings_for_pool_creation(config):
if is_windows:
raise ValueError(
'Cannot install kata_containers runtime on Windows')
- if not ((publisher == 'canonical' and offer == 'ubuntuserver') or
+ if not ((publisher == 'canonical' and
+ (offer == 'ubuntuserver' or
+ offer.startswith('0001-com-ubuntu-server'))) or
(publisher == 'openlogic' and
offer.startswith('centos')) or
publisher == 'microsoft-azure-batch'):
diff --git a/convoy/keyvault.py b/convoy/keyvault.py
index 5490c92c..cb26228a 100644
--- a/convoy/keyvault.py
+++ b/convoy/keyvault.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import json
import logging
diff --git a/convoy/misc.py b/convoy/misc.py
index 6457b908..3c08b987 100644
--- a/convoy/misc.py
+++ b/convoy/misc.py
@@ -22,19 +22,9 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import logging
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import os
import time
import uuid
@@ -52,7 +42,7 @@
util.setup_logger(logger)
# global defines
-_SINGULARITY_VERSION = '3.5.0'
+_SINGULARITY_VERSION = '3.7.3'
_TENSORBOARD_LOG_ARGS = frozenset((
'--tensorboard_logdir', '-tensorboard_logdir', '--logdir', '--log_dir',
'--log-dir',
diff --git a/convoy/monitor.py b/convoy/monitor.py
index 5e22a4a6..0ea62f0d 100644
--- a/convoy/monitor.py
+++ b/convoy/monitor.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import functools
import logging
diff --git a/convoy/remotefs.py b/convoy/remotefs.py
index 1f57b22e..d39e9b39 100644
--- a/convoy/remotefs.py
+++ b/convoy/remotefs.py
@@ -22,22 +22,12 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import functools
import json
import logging
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
# non-stdlib imports
import azure.mgmt.compute.models as computemodels
import msrestazure.azure_exceptions
@@ -1120,13 +1110,9 @@ def resize_storage_cluster(
stdout, stderr = proc.communicate()
logline = 'add brick script completed with ec={}'.format(proc.returncode)
if util.is_not_empty(stdout):
- if util.on_python2():
- stdout = stdout.decode('utf8')
if util.on_windows():
stdout = stdout.replace('\n', os.linesep)
if util.is_not_empty(stderr):
- if util.on_python2():
- stderr = stderr.decode('utf8')
if util.on_windows():
stderr = stderr.replace('\n', os.linesep)
if proc.returncode != 0:
@@ -1351,13 +1337,9 @@ def expand_storage_cluster(
command=['sudo', script_cmd])
stdout, stderr = proc.communicate()
if util.is_not_empty(stdout):
- if util.on_python2():
- stdout = stdout.decode('utf8')
if util.on_windows():
stdout = stdout.replace('\n', os.linesep)
if util.is_not_empty(stderr):
- if util.on_python2():
- stderr = stderr.decode('utf8')
if util.on_windows():
stderr = stderr.replace('\n', os.linesep)
vms[offset]['status'] = proc.returncode
@@ -1891,13 +1873,9 @@ def stat_storage_cluster(
command=['sudo', script_cmd])
stdout = proc.communicate()[0]
if util.is_not_empty(stdout):
- if util.on_python2():
- stdout = stdout.decode('utf8')
if util.on_windows():
stdout = stdout.replace('\n', os.linesep)
fsstatfmt = '>> File Server Status for {} ec={}:{}{}'
- if util.on_python2():
- fsstatfmt = unicode(fsstatfmt) # noqa
fsstatus.append(
fsstatfmt.format(vm.name, proc.returncode, os.linesep, stdout))
vmstatus[vm.name] = {
diff --git a/convoy/resource.py b/convoy/resource.py
index af972102..46709dfc 100644
--- a/convoy/resource.py
+++ b/convoy/resource.py
@@ -22,22 +22,12 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import functools
import json
import logging
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import random
import time
# non-stdlib imports
diff --git a/convoy/settings.py b/convoy/settings.py
index c93516fd..9f969167 100644
--- a/convoy/settings.py
+++ b/convoy/settings.py
@@ -22,20 +22,10 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import collections
import datetime
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import re
# non-stdlib imports
import azure.batch.models as batchmodels
@@ -56,38 +46,29 @@
'/usr/local/lib/python2.7/dist-packages/tensorboard/main.py',
6006
)
-_GPU_CC37_INSTANCES = re.compile(
- # standard nc
- r'^standard_nc[\d]+r?(_promo)?$',
- re.IGNORECASE
-)
_GPU_COMPUTE_INSTANCES = re.compile(
- # standard nc, ncv2, ncv3, nd, ndv2
- r'^standard_n[cd][\d]+r?s?(_v[\d])?(_promo)?$',
+ # standard nc, ncv2, ncv3, nd, ndv2, ncas_t4_v3, ndasr_v4
+ r'^standard_n[cd][\d]+(as_t4)?(as)?r?s?(_v[\d])?(_promo)?$',
re.IGNORECASE
)
_GPU_VISUALIZATION_INSTANCES = re.compile(
- # standard nv, nvv2
- r'^standard_nv[\d]+s?(_v2)?(_promo)?$',
+ # standard nv, nvv3
+ r'^standard_nv[\d]+s?(_v3)?(_promo)?$',
re.IGNORECASE
)
_SRIOV_RDMA_INSTANCES = re.compile(
- # standard hb/hc
- r'^standard_((hb|hc)[\d]+m?rs?(_v[\d])?)$',
- re.IGNORECASE
-)
-_SRIOV_RDMA_TRANSITION_INSTANCES = re.compile(
- # standard nc+r_v3
- r'^standard_(nc[\d]+rs_v3)$',
+ # standard hb, hbv2, hbv3, hc, nc+r_v2, nc+r_v3, ndv1, ndv2, ndasr_v4
+ (r'^standard_(((hb|hc)[\d]+m?rs?(_v[\d])?)|'
+ r'(nc[\d]+rs_v[2-4])|(nd[\d]+(rs|rs_v2|asr_v4)))$'),
re.IGNORECASE
)
_NETWORKDIRECT_RDMA_INSTANCES = re.compile(
# standard a8/a9, h+r, nc+r, nd+r
- r'^standard_((a8|a9)|((h|nc|nd)[\d]+m?rs?(_v[1-3])?))(_promo)?$',
+ r'^standard_((a8|a9)|((h|nc|nd)[\d]+m?rs?))(_promo)?$',
re.IGNORECASE
)
_PREMIUM_STORAGE_INSTANCES = re.compile(
- r'^standard_(([a-z]+[\d]+.*s(_v[\d])?)|([dg]s[\d]+(_v2)?))$',
+ r'^standard_(([a-z]+[\d]+.*s(_v[\d])?)|([dg]s[\d]+(_v[\d]+)?))$',
re.IGNORECASE
)
_NESTED_VIRTUALIZATION_INSTANCES = re.compile(
@@ -143,9 +124,12 @@
'edr_ib': re.compile(r'^standard_(hc|hb)+[\d]+rs$', re.IGNORECASE),
}
_VALID_PUBLISHERS = frozenset((
- 'canonical', 'credativ', 'microsoft-azure-batch',
+ 'canonical', 'debian', 'microsoft-azure-batch',
'microsoftwindowsserver', 'openlogic'
))
+_VALID_PUBLISHERS_CHECK_CAPABILITIES = frozenset((
+ 'microsoft-azure-batch', 'microsoftwindowsserver'
+))
_SINGULARITY_COMMANDS = frozenset(('exec', 'run'))
_FORBIDDEN_MERGE_TASK_PROPERTIES = frozenset((
'depends_on', 'depends_on_range', 'multi_instance', 'task_factory'
@@ -622,6 +606,21 @@ def get_valid_publishers():
return _VALID_PUBLISHERS
+def check_for_container_capability(image):
+ # type: (batchmodels.Image) -> bool
+ """Check for container capability for publisher
+ :param batchmodels.Image: image to check
+ :rtype: bool
+ :return: image has container capability or isn't in cap check set
+ """
+ if (image.image_reference.publisher.lower() not in
+ _VALID_PUBLISHERS_CHECK_CAPABILITIES):
+ return True
+ if image.capabilities is None:
+ return False
+ return 'DockerCompatible' in image.capabilities
+
+
def get_tensorboard_docker_image():
# type: (None) -> Tuple[str, str]
"""Get tensorboard docker image
@@ -754,12 +753,9 @@ def get_gpu_type_from_vm_size(vm_size):
:return: type of gpu and compute capability
"""
if is_gpu_compute_pool(vm_size):
- if _GPU_CC37_INSTANCES.match(vm_size):
- return 'compute_cc37'
- else:
- return 'compute_cc6-7'
+ return 'compute'
elif is_gpu_visualization_pool(vm_size):
- return 'viz_cc52'
+ return 'viz'
else:
return None
@@ -815,9 +811,11 @@ def gpu_configuration_check(config, vm_size=None):
sku = pool_sku(config, lower=True)
if publisher == 'microsoft-azure-batch':
return True
- elif (publisher == 'canonical' and offer == 'ubuntuserver' and
- sku > '16.04'):
- return True
+ elif publisher == 'canonical':
+ if offer == 'ubuntuserver' and sku == '18.04':
+ return True
+ elif offer.startswith('0001-com-ubuntu-server') and sku >= '20_04':
+ return True
elif publisher == 'openlogic':
if offer == 'centos-hpc' and sku >= '7.3':
return True
@@ -844,7 +842,9 @@ def is_lis_install_required(config, vm_size=None):
publisher = pool_publisher(config, lower=True)
offer = pool_offer(config, lower=True)
sku = pool_sku(config, lower=True)
- if publisher == 'openlogic' and offer == 'centos' and sku > '7.3':
+ # lis (4.3.5) is not needed for 7.8+
+ if (publisher == 'openlogic' and offer == 'centos' and
+ sku >= '7.4' and sku <= '7.7'):
return True
return False
@@ -885,10 +885,7 @@ def is_sriov_rdma_pool(vm_size):
:rtype: bool
:return: if sriov rdma is present
"""
- return (
- _SRIOV_RDMA_INSTANCES.match(vm_size) is not None or
- _SRIOV_RDMA_TRANSITION_INSTANCES.match(vm_size) is not None
- )
+ return _SRIOV_RDMA_INSTANCES.match(vm_size) is not None
def is_networkdirect_rdma_pool(vm_size):
@@ -898,10 +895,7 @@ def is_networkdirect_rdma_pool(vm_size):
:rtype: bool
:return: if network direct rdma is present
"""
- return (
- _NETWORKDIRECT_RDMA_INSTANCES.match(vm_size) is not None and
- _SRIOV_RDMA_TRANSITION_INSTANCES.match(vm_size) is None
- )
+ return _NETWORKDIRECT_RDMA_INSTANCES.match(vm_size) is not None
def is_rdma_pool(vm_size):
@@ -982,7 +976,8 @@ def temp_disk_mountpoint(config, offer=None):
offer = '!ubuntu'
else:
offer = offer.lower()
- if offer.startswith('ubuntu'):
+ if (offer.startswith('ubuntu') or
+ offer.startswith('0001-com-ubuntu-server')):
return '/mnt'
elif offer.startswith('windows'):
return 'D:\\batch'
@@ -1130,26 +1125,26 @@ def _populate_pool_vm_configuration(config):
if not vm_config.native and _kv_read(conf, 'native', default=False):
vm_size = _pool_vm_size(config)
if (vm_config.publisher == 'canonical' and
- vm_config.offer == 'ubuntuserver' and
- vm_config.sku == '16.04-lts'):
+ vm_config.offer == '0001-com-ubuntu-server-focal' and
+ vm_config.sku == '20_04-lts'):
vm_config = PoolVmPlatformImageSettings(
publisher='microsoft-azure-batch',
offer='ubuntu-server-container{}'.format(
'-rdma' if is_rdma_pool(vm_size) else ''),
- sku=vm_config.sku.replace('.', '-'),
+ sku=vm_config.sku.replace('_', '-'),
version='latest',
native=True,
license_type=None,
)
elif (vm_config.publisher == 'openlogic' and
vm_config.offer.startswith('centos') and
- (vm_config.sku == '7.4' or vm_config.sku == '7.5' or
- vm_config.sku == '7.6' or vm_config.sku == '7.7')):
+ ((vm_config.sku >= '7.4' and vm_config.sku <= '7.7') or
+ (vm_config.sku >= '7_8' and vm_config.sku < '8_'))):
vm_config = PoolVmPlatformImageSettings(
publisher='microsoft-azure-batch',
offer='centos-container{}'.format(
'-rdma' if is_rdma_pool(vm_size) else ''),
- sku=vm_config.sku.replace('.', '-'),
+ sku=vm_config.sku.replace('.', '-').replace('_', '-'),
version='latest',
native=True,
license_type=None,
@@ -5312,6 +5307,8 @@ def slurm_options_settings(config):
partitions = {}
part_conf = _kv_read_checked(conf, 'elastic_partitions')
for key in part_conf:
+ if key.lower() == 'default':
+ raise ValueError('elastic partition id cannot be named "default"')
part = _kv_read_checked(part_conf, key)
batch_pools = {}
pool_conf = _kv_read_checked(part, 'batch_pools', default={})
diff --git a/convoy/slurm.py b/convoy/slurm.py
index 8f9898d4..5b511593 100644
--- a/convoy/slurm.py
+++ b/convoy/slurm.py
@@ -22,22 +22,12 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import functools
import logging
import json
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import time
import uuid
# non-stdlib imports
@@ -103,8 +93,8 @@ def _apply_slurm_config_to_batch_pools(
'Cannot create a Slurm partition {} on a Windows '
'pool {}'.format(partname, pool.id))
elif (na_sku != 'batch.node.centos 7' and
- na_sku != 'batch.node.ubuntu 16.04' and
- na_sku != 'batch.node.ubuntu 18.04'):
+ na_sku != 'batch.node.ubuntu 18.04' and
+ na_sku != 'batch.node.ubuntu 20.04'):
raise RuntimeError(
'Cannot create a Slurm partition {} on pool {} with node '
'agent sku id {}'.format(partname, pool.id, na_sku))
diff --git a/convoy/storage.py b/convoy/storage.py
index 50320321..0b87a9bd 100644
--- a/convoy/storage.py
+++ b/convoy/storage.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import datetime
import json
diff --git a/convoy/task_factory.py b/convoy/task_factory.py
index e4ec94df..6e4025e0 100644
--- a/convoy/task_factory.py
+++ b/convoy/task_factory.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import collections
import copy
@@ -38,10 +31,7 @@
import importlib
import itertools
import random
-try:
- from urllib.parse import quote as urlquote
-except ImportError: # pramga: no cover
- from urllib import quote as urlquote
+from urllib.parse import quote as urlquote
# non-stdlib imports
import azure.storage.blob as azureblob
import azure.storage.file as azurefile
@@ -343,7 +333,10 @@ def generate_task(task, storage_settings):
args = module.generate()
for arg in args:
taskcopy = copy.copy(base_task_copy)
- taskcopy['command'] = taskcopy['command'].format(*arg)
+ if isinstance(arg, collections.Mapping):
+ taskcopy['command'] = taskcopy['command'].format(**arg)
+ else:
+ taskcopy['command'] = taskcopy['command'].format(*arg)
yield taskcopy
elif 'file' in task_factory:
for file in _get_storage_entities(task_factory, storage_settings):
diff --git a/convoy/util.py b/convoy/util.py
index 15711f8f..0f63145d 100644
--- a/convoy/util.py
+++ b/convoy/util.py
@@ -22,13 +22,6 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import (
- absolute_import, division, print_function, unicode_literals
-)
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import base64
import copy
@@ -38,42 +31,19 @@
import logging
import logging.handlers
import os
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import platform
import socket
import struct
import subprocess
-try:
- from os import scandir as scandir
-except ImportError:
- from scandir import scandir as scandir
-import sys
import time
-# function remaps
-try:
- raw_input
-except NameError:
- raw_input = input
# global defines
-_PY2 = sys.version_info.major == 2
_ON_WINDOWS = platform.system() == 'Windows'
_REGISTERED_LOGGER_HANDLERS = []
-def on_python2():
- # type: (None) -> bool
- """Execution on python2
- :rtype: bool
- :return: if on Python2
- """
- return _PY2
-
-
def on_windows():
# type: (None) -> bool
"""Execution on Windows
@@ -175,7 +145,7 @@ def get_input(prompt):
:rtype: str
:return: user input
"""
- return raw_input(prompt)
+ return input(prompt)
def confirm_action(config, msg=None, allow_auto=True):
@@ -280,7 +250,7 @@ def scantree(path):
:rtype: DirEntry
:return: DirEntry via generator
"""
- for entry in scandir(path):
+ for entry in os.scandir(path):
if entry.is_dir(follow_symlinks=True):
# due to python2 compat, cannot use yield from here
for t in scantree(entry.path):
@@ -400,10 +370,7 @@ def base64_encode_string(string):
:rtype: str
:return: base64-encoded string
"""
- if on_python2():
- return base64.b64encode(string)
- else:
- return str(base64.b64encode(string), 'ascii')
+ return str(base64.b64encode(string), 'ascii')
def base64_decode_string(string):
diff --git a/convoy/validator.py b/convoy/validator.py
index 11d24cf1..eb62551e 100644
--- a/convoy/validator.py
+++ b/convoy/validator.py
@@ -24,19 +24,11 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
-# compat imports
-from __future__ import absolute_import, division, print_function
-from builtins import ( # noqa
- bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
- next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import enum
import logging
import sys
-try:
- import pathlib2 as pathlib
-except ImportError:
- import pathlib
+import pathlib
import warnings
# non-stdlib imports
import pykwalify.core
diff --git a/docs/01-batch-shipyard-installation.md b/docs/01-batch-shipyard-installation.md
index c8afaca8..a19ab0d9 100644
--- a/docs/01-batch-shipyard-installation.md
+++ b/docs/01-batch-shipyard-installation.md
@@ -2,9 +2,9 @@
There are multiple available options for installing Batch Shipyard. Please
pick an option that is most suitable for your work environment.
-* [Azure Cloud Shell](#cloudshell)
* [Pre-built binary](#binary)
* [Installers](#installers)
+* [Azure Cloud Shell](#cloudshell)
* [Docker image](#docker-install)
* [Singularity image](#singularity-install)
* [Jupyter Notebooks](#jupyter)
@@ -13,20 +13,6 @@ If you wish to install Batch Shipyard into your Azure App Service (e.g.,
Azure Function App) environment, please see
[this guide](60-batch-shipyard-site-extension.md).
-## Azure Cloud Shell
-Batch Shipyard is now integrated into
-[Azure Cloud Shell](https://docs.microsoft.com/azure/cloud-shell/overview)
-with no installation required. Simply request a Cloud Shell session and type
-`shipyard` to invoke the CLI. Data stored in your home directory or
-`clouddrive` will persist between Cloud Shell sessions.
-
-Note that Azure Cloud Shell may not have the most recent release of
-Batch Shipyard. You can see the version of Batch Shipyard installed with
-the command `shipyard --version`.
-
-If you wish to install Batch Shipyard on your machine, please proceed to the
-Installation section.
-
## Pre-built Binary
Download an appropriate [Release](https://github.com/Azure/batch-shipyard/releases)
binary for your operating system. Pre-built binaries are not available
@@ -43,6 +29,20 @@ code and run the install script to download and setup dependencies. This
is typically the most flexible and compatible installation outside of the
Docker image for the CLI.
+## Azure Cloud Shell
+Batch Shipyard is now integrated into
+[Azure Cloud Shell](https://docs.microsoft.com/azure/cloud-shell/overview)
+with no installation required. Simply request a Cloud Shell session and type
+`shipyard` to invoke the CLI. Data stored in your home directory or
+`clouddrive` will persist between Cloud Shell sessions.
+
+Note that Azure Cloud Shell may not have the most recent release of
+Batch Shipyard. You can see the version of Batch Shipyard installed with
+the command `shipyard --version`.
+
+If you wish to install Batch Shipyard on your machine, please proceed to the
+Installation section.
+
### Step 1: Acquire Batch Shipyard
Clone the repository:
```shell
@@ -70,11 +70,7 @@ a variety of recent Linux distributions. This installation script can be used
regardless of if you obtained Batch Shipyard through `git clone` or
downloading a release package.
-Please ensure that your target Python distribution is 2.7 or 3.5+. It is
-recommended to install Batch Shipyard on Python 3.5 or later. Although Python
-3.5+ is recommended, if you cannot easily install Python 3.5+ on
-your system but Python 2.7 is available, then please use that version of
-Python to avoid installation hassles with a Python interpreter.
+Please ensure that your target Python is 3.5+.
The `install.sh` script supports isolated installation through a virtual
environment so that other system-wide or user python dependencies are left
@@ -84,9 +80,7 @@ If you would like to specify the virtual environment to use, use the
`-e` parameter. If you don't want to use a virtual environment and instead
would like to install into your user environment, specify the `-u` option.
Using this option will require modifying your shell rc file for advanced
-data movement capability provided by Batch Shipyard. Note that the default
-installation targets `python3`; you can use the `-2` argument to install
-for `python` (Python 2.7).
+data movement capability provided by Batch Shipyard.
The recommended installation method with a virtual environment:
```shell
@@ -94,10 +88,8 @@ The recommended installation method with a virtual environment:
# Obtain Batch Shipyard through git clone or downloading the archive and unpacking
# Change directory to where Batch Shipyard was cloned or unpacked to
cd batch-shipyard
-# Install for Python 3.5+ (recommended) in the virtual environment ".shipyard"
+# Install for Python 3.5+ in the virtual environment ".shipyard"
./install.sh
-# Or install for Python 2.7 (not recommended) in the virtual environment ".shipyard"
-./install.sh -2
```
A helper script named `shipyard` will be generated with a successful
@@ -121,10 +113,8 @@ Alternatively, install directly into your "user" environment:
# Obtain Batch Shipyard through git clone or downloading the archive and unpacking
# Change directory to where Batch Shipyard was cloned or unpacked to
cd batch-shipyard
-# Install for Python 3.5+
+# Install in user environment
./install.sh -u
-# Or install for Python 2.7
-./install.sh -2 -u
# Add $HOME/.local/bin to your PATH in your shell rc file if it is not present.
# For example, the following line can be added to ~/.bashrc for bash shells:
export PATH=$PATH:$HOME/.local/bin
@@ -143,7 +133,7 @@ release of Batch Shipyard.
#### Installation on CentOS 6.x / RHEL 6.x / Fedora 13 to 18
The default python interpreter distributed with 6.x series releases is
incompatible with Batch Shipyard. To install on these distributions, you must
-install `epel-release` package first then the `python34` epel package. Once
+install `epel-release` package first then the `python36` epel package. Once
these packages are installed, then invoke the installer in the following
manner:
@@ -152,7 +142,7 @@ DISTRIB_ID=centos DISTRIB_RELEASE=6.x ./install.sh
```
#### Unsupported Linux Distributions
-The following distributions will not work with the `install.sh` script:
+The following distributions will NOT work with the `install.sh` script:
* CentOS < 6.0
* Debian < 8
@@ -166,12 +156,10 @@ Please follow the manual installation instructions found later in this
document for these distributions.
### Step 2 [Mac]: Run the `install.sh` Script
-It is recommended to follow the steps outlined on
+You will need to follow the steps outlined on
[this guide](http://docs.python-guide.org/en/latest/starting/install3/osx/#install3-osx)
to install Batch Shipyard on a Python3 installation rather than the default
-Python 2.7 that is shipped with Mac OS X. However, if you prefer to use
-the system defaulted Python 2.7, the installation will work with that
-environment as well.
+Python2 that is shipped with Mac OS X.
The `install.sh` script supports isolated installation through a virtual
environment so that other system-wide or user python dependencies are left
@@ -186,10 +174,8 @@ The recommended installation method with a virtual environment:
# Obtain Batch Shipyard through git clone or downloading the archive and unpacking
# Change directory to where Batch Shipyard was cloned or unpacked to
cd batch-shipyard
-# Install for Python 3.5+ (recommended) in the virtual environment ".shipyard"
+# Install for Python 3.5+ in the virtual environment ".shipyard"
./install.sh
-# Or to install for Python 2.7 in the virtual environment ".shipyard"
-./install.sh -2
```
A helper script named `shipyard` will be generated with a successful
@@ -241,12 +227,6 @@ environments due to the delay in activating a conda environment.
Python from [python.org](https://www.python.org) (CPython) is recommended as
the execution environment.
-If you are installing on Python 2.7, you can download the necessary
-development headers and compiler
-[from Microsoft](http://aka.ms/vcpython27). It is recommended to upgrade to
-Python 3.5 or later so that you do not need a compiler to install the
-dependencies.
-
Alternatively you can install Batch Shipyard using the `requirements.txt`
file:
@@ -353,9 +333,8 @@ respective platform below.
#### Linux, Mac, and Windows Subsystem for Linux
Rerun the `install.sh` script with the appropriate parameters for all
-upgrades. Please ensure that if you specified `-2`, `-3` and/or the
-`-e ` parameter, then these parameters are issued again for
-upgrades.
+upgrades. Please ensure that if you specified `-u` or `-e `
+parameter that these parameters are issued again for upgrades.
#### Windows
Rerun the `install.cmd` script with the same virtual environment parameter.
@@ -389,13 +368,13 @@ properly.
## Manual Installation
### Requirements
The Batch Shipyard tool is written in Python. The client script is compatible
-with Python 2.7 or 3.5+ (recommended). You will also
-need to install dependent Python packages that Batch Shipyard requires.
-Installation can be performed using the [requirements.txt](../requirements.txt)
-file via the command `pip install --upgrade --user -r requirements.txt` (or
-via `pip3` for Python3). Note that this `pip` command should be run for every
-Batch Shipyard upgrade if not using `install.sh`. The use of `install.sh` is
-highly recommended instead of these manual steps below on Linux platforms.
+with Python 3.5+. You will also need to install dependent Python packages that
+Batch Shipyard requires. Installation can be performed using
+the [requirements.txt](../requirements.txt) file via the command
+`pip3 install --upgrade --user -r requirements.txt`. Note that this `pip3`
+command should be run for every Batch Shipyard upgrade if not using
+`install.sh`. The use of `install.sh` is highly recommended instead of these
+manual steps below on Linux platforms.
Batch Shipyard has some Python dependencies which require a valid compiler,
ssl, ffi, and Python development libraries to be installed due to the
@@ -408,33 +387,24 @@ is needed. The following are example commands to execute (as root or with
#### Ubuntu/Debian
```
apt-get update
-apt-get install -y build-essential libssl-dev libffi-dev libpython-dev python-dev python-pip
-pip install --upgrade pip
+apt-get install -y build-essential libssl-dev libffi-dev python3-dev python3-pip
+pip3 install --upgrade pip
```
#### CentOS/RHEL/Fedora
```
-yum install -y gcc openssl-devel libffi-devel python-devel
+yum install -y epel-release
+yum install -y python36-devel gcc openssl-devel libffi-devel
curl -fSsL https://bootstrap.pypa.io/get-pip.py | python
```
#### SLES/OpenSUSE
```
zypper ref
-zypper -n in gcc libopenssl-devel libffi48-devel python-devel
+zypper -n in gcc libopenssl-devel libffi48-devel python3-devel
curl -fSsL https://bootstrap.pypa.io/get-pip.py | python
```
-#### Note about Python 3.5+
-If installing for Python 3.5+, then simply use the Python3 equivalents for
-the python dependencies. For example, on Ubuntu/Debian:
-```
-apt-get update
-apt-get install -y build-essential libssl-dev libffi-dev libpython3-dev python3-dev python3-pip
-pip3 install --upgrade pip
-```
-would install the proper dependencies for Python3.
-
### Data Movement Support
Batch Shipyard contains native support for moving files locally accessible
at the point of script execution. The `install.sh` script ensures that the
diff --git a/docs/13-batch-shipyard-configuration-pool.md b/docs/13-batch-shipyard-configuration-pool.md
index f13bb657..9db22cff 100644
--- a/docs/13-batch-shipyard-configuration-pool.md
+++ b/docs/13-batch-shipyard-configuration-pool.md
@@ -572,7 +572,7 @@ the driver for the `vm_size` specified.
* (optional) `ignore_warnings` property allows overriding the default
beahvior to place the node in start task failed state if during node
prep there are warnings of possible GPU issues such as infoROM
- corruption. It is recommended not to set this value to `true`. The
+ corruption. It is not recommended to set this value to `true`. The
default, if not specified, is `false`.
* (optional) `batch_insights_enabled` property enables
[Batch Insights](https://github.com/Azure/batch-insights) monitoring for
diff --git a/docs/14-batch-shipyard-configuration-jobs.md b/docs/14-batch-shipyard-configuration-jobs.md
index 629d9e25..ab6bde9d 100644
--- a/docs/14-batch-shipyard-configuration-jobs.md
+++ b/docs/14-batch-shipyard-configuration-jobs.md
@@ -1144,7 +1144,7 @@ property are:
may be null.
* (required if using MPI) `mpi` contains the following members:
* (required) `runtime` is the runtime that should be used. Valid
- values are `intelmpi`, `intelmpi_ofa`, `mpich`, and `openmpi`.
+ values are `intelmpi`, `intelmpi-ofa`, `mpich`, and `openmpi`.
With Docker containers, it is the user's responsibility to provide
a container image that has the specified runtime installed. For
Singularity containers, the specified runtime must be installed
diff --git a/docs/18-batch-shipyard-configuration-slurm.md b/docs/18-batch-shipyard-configuration-slurm.md
index dd3b457f..f44ea28d 100644
--- a/docs/18-batch-shipyard-configuration-slurm.md
+++ b/docs/18-batch-shipyard-configuration-slurm.md
@@ -102,7 +102,7 @@ slurm:
reclaim_exclude_num_nodes: 0
max_runtime_limit: null
default: true
- preempty_type: preempt/partition_prio
+ preempt_type: preempt/partition_prio
preempt_mode: requeue
over_subscribe: no
priority_tier: 10
diff --git a/docs/25-batch-shipyard-platform-image-support.md b/docs/25-batch-shipyard-platform-image-support.md
index 91cdf5cb..55344d69 100644
--- a/docs/25-batch-shipyard-platform-image-support.md
+++ b/docs/25-batch-shipyard-platform-image-support.md
@@ -15,28 +15,51 @@ be converted to the native equivalent on pool provisioning.
Please see the [FAQ](97-faq.md) for more information about native mode
container pools.
+### Enabling `microsoft-azure-batch` Native Images
+For Batch-managed pool allocation Batch accounts, no action is needed to
+enable use of `microsoft-azure-batch` published `native` images. For User
+Subscription pool allocation Batch accounts, you will need to explicitly
+accept Marketplace terms for each image and image version. Periodically new
+image versions will be published and you will need to accept terms
+individually for each image version. If you do not accept Marketplace terms
+for these images and attempt to deploy a Batch pool with a
+`microsoft-azure-batch` `native` image using a User subscription pool
+allocation Batch account, you will observe a Resize Error on your pool with
+the error `AllocationFailed: Desired number of dedicated nodes could not be
+allocated`. The error details will have the message: `Reason: Allocation
+failed due to marketplace purchase eligibilty check returned errors`.
+
+You can accept Marketplace terms for `microsoft-azure-batch` published
+`native` images using the Azure CLI:
+
+1. Ensure that you are on the correct subscription id of the Batch account
+in the Azure CLI.
+2. Run `az vm image list --all --publisher microsoft-azure-batch`
+3. Find the correlated VM image using the tables provided below. Locate
+the `urn` of that image in the JSON object in the output of the command.
+4. Run `az vm image accept-terms --urn ` for the
+corresponding `urn` to accept the terms for the image.
+
+## Image Support Matrix for Batch Shipyard
+
### CentOS
| Publisher | Offer | Sku | GPU | IB/RDMA | Native Only | Native Convert |
|-----------------------|-----------------------|-----|:---:|:-------:|:-----------:|:--------------:|
| microsoft-azure-batch | centos-container | 7-4 | X | | X | |
-| microsoft-azure-batch | centos-container | 7-6 | X | | X | |
-| microsoft-azure-batch | centos-container | 7-7 | X | | X | |
+| microsoft-azure-batch | centos-container | 7-8 | X | | X | |
| microsoft-azure-batch | centos-container-rdma | 7-4 | X | X (4) | X | |
-| microsoft-azure-batch | centos-container-rdma | 7-6 | X | X (5) | X | |
-| microsoft-azure-batch | centos-container-rdma | 7-7 | X | X (5) | X | |
+| microsoft-azure-batch | centos-container-rdma | 7-8 | X | X (5) | X | |
| OpenLogic | CentOS | 7.4 | X | | | X |
-| OpenLogic | CentOS | 7.6 | X | | | X |
-| OpenLogic | CentOS | 7.7 | X | | | X |
+| OpenLogic | CentOS | 7_8 | X | | | X |
| OpenLogic | CentOS-HPC | 7.4 | X | X (4) | | X |
-| OpenLogic | CentOS-HPC | 7.6 | X | X (5) | | X |
| OpenLogic | CentOS-HPC | 7.7 | X | X (5) | | X |
### Debian
-| Publisher | Offer | Sku | GPU | IB/RDMA | Native Only | Native Convert |
-|-----------|--------|-----|:---:|:-------:|:-----------:|:--------------:|
-| Credativ | Debian | 9 | | | | |
+| Publisher | Offer | Sku | GPU | IB/RDMA | Native Only | Native Convert |
+|-----------|-----------|-----|:---:|:-------:|:-----------:|:--------------:|
+| Debian | Debian-10 | 10 | | | | |
### SLES
@@ -46,22 +69,22 @@ SLES is not supported at this time.
| Publisher | Offer | Sku | GPU | IB/RDMA | Native Only | Native Convert |
|-----------------------|------------------------------|-------------|:---:|:---------:|:-----------:|:--------------:|
-| Canonical | UbuntuServer | 16.04-LTS | X | X (1) | | X (2,4) |
| Canonical | UbuntuServer | 18.04-LTS | X | X (1) | | |
-| microsoft-azure-batch | ubuntu-server-container | 16-04-lts | X | | X | |
-| microsoft-azure-batch | ubuntu-server-container-rdma | 16-04-lts | X | X (3,4) | X | |
+| Canonical | 0001-com-ubuntu-server-focal | 20_04-lts | X | X (1) | | X (2,5) |
+| microsoft-azure-batch | ubuntu-server-container | 20-04-lts | X | | X | |
+| microsoft-azure-batch | ubuntu-server-container-rdma | 20-04-lts | X | X (5) | X | |
### Windows
+Note that `WindowsServer` Skus ending with suffixes such as `-gs`,
+`-smalldisk`, `-smalldisk-gs`, and `-smalldisk-g2` are supported.
+
| Publisher | Offer | Sku | GPU | IB/RDMA | Native Only | Native Convert |
|------------------------|-------------------------|------------------------------------------------|:---:|:-------:|:-----------:|:--------------:|
| MicrosoftWindowsServer | WindowsServer | 2016-Datacenter-with-Containers | | | X | |
| MicrosoftWindowsServer | WindowsServer | 2019-Datacenter-with-Containers | | | X | |
-| MicrosoftWindowsServer | WindowsServer | 2019-Datacenter-with-Containers-smalldisk | | | X | |
| MicrosoftWindowsServer | WindowsServer | 2019-Datacenter-Core-with-Containers | | | X | |
-| MicrosoftWindowsServer | WindowsServer | 2019-Datacenter-Core-with-Containers-smalldisk | | | X | |
-| MicrosoftWindowsServer | WindowsServer | Datacenter-Core-1903-with-Containers-smalldisk | | | X | |
-| MicrosoftWindowsServer | WindowsServerSemiAnnual | Datacenter-Core-1809-with-Containers-smalldisk | | | X | |
+| MicrosoftWindowsServer | WindowsServer | Datacenter-Core-20H2-with-Containers | | | X | |
## Notes
1. IB/RDMA is supported for this host OS with a custom image unless
diff --git a/docs/35-batch-shipyard-task-factory-merge-task.md b/docs/35-batch-shipyard-task-factory-merge-task.md
index 4b1cf629..3e4a5638 100644
--- a/docs/35-batch-shipyard-task-factory-merge-task.md
+++ b/docs/35-batch-shipyard-task-factory-merge-task.md
@@ -495,8 +495,9 @@ positional argument (i.e., `*args`), we are creating a range from `0` to that
argument value and `yield`ing the result as a iterable (tuple). Yielding
the result as an iterable is mandatory as the return value is unpacked and
applied to the `command`. This allows for multiple parameters to be generated
-and applied for each generated task. An example corresponding configuration
-may be similar to the following:
+and applied for each generated task. The iterable can also be a dictionary to
+allow named placeholders when applied to `command'. An example corresponding
+configuration may be similar to the following:
```yaml
task_factory:
diff --git a/docs/74-batch-shipyard-azure-keyvault.md b/docs/74-batch-shipyard-azure-keyvault.md
index 15156fb8..033e2a6e 100644
--- a/docs/74-batch-shipyard-azure-keyvault.md
+++ b/docs/74-batch-shipyard-azure-keyvault.md
@@ -1,7 +1,9 @@
# Using Azure KeyVault for Credentials with Batch Shipyard
The focus of this article is to explain how to use Azure KeyVault for
managing credentials config files and/or individual keys and passwords for use
-with Batch Shipyard.
+with Batch Shipyard. The concepts presented here are specifically for managing
+credentials and secrets in configuration files and are tangential to an
+Azure KeyVault required for User Subscription pool allocation Batch accounts.
## Introduction and Concepts
The [credentials.yaml](10-batch-shipyard-configuration.md#cred) file
diff --git a/docs/97-faq.md b/docs/97-faq.md
index 2b7bb18c..d51eeb5e 100644
--- a/docs/97-faq.md
+++ b/docs/97-faq.md
@@ -68,6 +68,12 @@ for more information. Compliant
[custom images](63-batch-shipyard-custom-images.md) are compatible with
`native` mode.
+Note that if using `native` images on User Subscription pool allocation Batch
+accounts, you will need to explicitly accpet Marketplace terms for each iamge
+and image version. Please see the
+[Batch Shipyard Platform Image support doc](25-batch-shipyard-platform-image-support.md)
+for more information about this process.
+
Advantages of `native` mode are:
* Batch Shipyard with a provisioned SSH user is no longer necessary to
diff --git a/federation/requirements.txt b/federation/requirements.txt
index 5d71735a..0b2db61a 100644
--- a/federation/requirements.txt
+++ b/federation/requirements.txt
@@ -1,10 +1,10 @@
-azure-batch==8.0.0
+azure-batch==9.0.0
azure-cosmosdb-table==1.0.6
-azure-mgmt-compute==9.0.0
-azure-mgmt-resource==6.0.0
-azure-mgmt-storage==6.0.0
-azure-storage-blob==2.1.0
-azure-storage-queue==2.1.0
-msrestazure==0.6.2
-python-dateutil==2.8.1
-requests==2.22.0
+azure-mgmt-compute==12.0.0
+azure-mgmt-resource==10.0.0
+azure-mgmt-storage==10.0.0
+azure-storage-blob>=2.1.0,<3
+azure-storage-queue>=2.1.0,<3
+msrestazure==0.6.4
+python-dateutil>=2.8.2,<3
+requests>=2.26.0,<3
diff --git a/heimdall/requirements.txt b/heimdall/requirements.txt
index fbf3c7de..14611167 100644
--- a/heimdall/requirements.txt
+++ b/heimdall/requirements.txt
@@ -1,8 +1,8 @@
-azure-batch==8.0.0
+azure-batch==9.0.0
azure-cosmosdb-table==1.0.6
-azure-mgmt-compute==9.0.0
-azure-mgmt-network==8.0.0
-azure-mgmt-resource==6.0.0
-azure-mgmt-storage==6.0.0
-msrestazure==0.6.2
-requests==2.22.0
+azure-mgmt-compute==12.0.0
+azure-mgmt-network==10.2.0
+azure-mgmt-resource==10.0.0
+azure-mgmt-storage==10.0.0
+msrestazure==0.6.4
+requests>=2.26.0,<3
diff --git a/images/docker/linux/cli/Dockerfile b/images/docker/linux/cli/Dockerfile
index 743c43e5..9389e5b7 100644
--- a/images/docker/linux/cli/Dockerfile
+++ b/images/docker/linux/cli/Dockerfile
@@ -1,6 +1,6 @@
# Dockerfile for Azure/batch-shipyard (cli)
-FROM alpine:3.10
+FROM python:3.9.4-alpine3.13
MAINTAINER Fred Park
ARG GIT_BRANCH
@@ -8,18 +8,20 @@ ARG GIT_COMMIT
RUN apk update \
&& apk add --update --no-cache \
- musl build-base python3 python3-dev openssl-dev libffi-dev \
- ca-certificates openssl openssh-client rsync git bash \
+ musl build-base openssl-dev libffi-dev rust cargo ca-certificates git \
+ openssl openssh-client rsync bash \
+ && python3 -m ensurepip --upgrade \
+ && pip3 install --no-cache-dir --upgrade pip setuptools setuptools-rust wheel \
&& git clone -b $GIT_BRANCH --single-branch https://github.com/Azure/batch-shipyard.git /opt/batch-shipyard \
&& cd /opt/batch-shipyard \
&& git checkout $GIT_COMMIT \
&& rm -rf .git .github .vsts \
&& rm -f .git* .travis.yml *.yml install* \
- && python3 -m pip install --no-cache-dir --upgrade pip \
&& pip3 install --no-cache-dir -r requirements.txt \
- && pip3 install --no-cache-dir --no-deps -r req_nodeps.txt \
&& python3 -m compileall -f /opt/batch-shipyard/shipyard.py /opt/batch-shipyard/convoy \
- && apk del --purge build-base python3-dev openssl-dev libffi-dev git \
+ && pip3 uninstall -y setuptools-rust wheel \
+ && apk del --purge build-base patch openssl-dev libffi-dev rust cargo git \
&& rm /var/cache/apk/*
+ && rm -rf /root/.cache /root/.cargo
ENTRYPOINT ["/opt/batch-shipyard/shipyard.py"]
diff --git a/images/docker/singularity/Dockerfile b/images/docker/singularity/Dockerfile
index 3dbface6..17f6c9f8 100644
--- a/images/docker/singularity/Dockerfile
+++ b/images/docker/singularity/Dockerfile
@@ -1,12 +1,12 @@
# Dockerfile for Singularity
-FROM ubuntu:18.04
+FROM ubuntu:20.04
MAINTAINER Fred Park
ARG SINGULARITY_VERSION
ARG LOCAL_STATE_DIR
-ENV GO_VERSION=1.13.4 \
+ENV GO_VERSION=1.16.3 \
GOOS=linux \
GOARCH=amd64
@@ -29,8 +29,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN cd /usr/local \
&& curl -fSsL https://dl.google.com/go/go${GO_VERSION}.linux-amd64.tar.gz | tar -zxpf - \
&& export GOPATH=${HOME}/go \
- && export PATH=/usr/local/go/bin:${PATH}:${GOPATH}/bin \
- && go get -u github.com/golang/dep
+ && export PATH=/usr/local/go/bin:${PATH}:${GOPATH}/bin
RUN export GOPATH=${HOME}/go \
&& export PATH=/usr/local/go/bin:${PATH}:${GOPATH}/bin \
@@ -46,6 +45,6 @@ RUN export GOPATH=${HOME}/go \
&& ldconfig /opt/singularity/lib/singularity \
&& ln -s /opt/singularity/bin/singularity /usr/bin/singularity
-FROM alpine:3.10
+FROM alpine:3.13
COPY --from=0 /opt/singularity /opt/singularity
diff --git a/images/docker/windows/cli/Dockerfile b/images/docker/windows/cli/Dockerfile
index b3a76263..a3931259 100644
--- a/images/docker/windows/cli/Dockerfile
+++ b/images/docker/windows/cli/Dockerfile
@@ -1,7 +1,7 @@
# Dockerfile for Azure/batch-shipyard CLI (Windows)
# Adapted from: https://github.com/StefanScherer/dockerfiles-windows/blob/master/python/Dockerfile
-FROM python:3.7.5-windowsservercore-ltsc2016
+FROM python:3.9.4-windowsservercore-ltsc2016
MAINTAINER Fred Park
ENV chocolateyUseWindowsCompression false
@@ -16,7 +16,6 @@ WORKDIR C:\\batch-shipyard
RUN git clone -b $Env:GIT_BRANCH --single-branch https://github.com/Azure/batch-shipyard.git C:\batch-shipyard ; \
git checkout $Env:GIT_COMMIT ; \
pip install --no-cache-dir -r requirements.txt ; \
- pip install --no-cache-dir --no-deps -r req_nodeps.txt ; \
Remove-Item .git -Force -Recurse ; \
Remove-Item .git* -Force -Recurse ; \
Remove-Item .vsts -Force -Recurse ; \
@@ -36,8 +35,8 @@ COPY --from=0 /batch-shipyard /batch-shipyard
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
-ENV PYTHON_VERSION 3.7.5
-ENV PYTHON_PIP_VERSION 19.3.1
+ENV PYTHON_VERSION 3.9.4
+ENV PYTHON_PIP_VERSION 21.0.1
RUN $env:PATH = 'C:\Python;C:\Python\Scripts;{0}' -f $env:PATH ; \
Set-ItemProperty -Path 'HKLM:\SYSTEM\CurrentControlSet\Control\Session Manager\Environment\' -Name Path -Value $env:PATH ; \
diff --git a/images/gen_3rd_party_notices.sh b/images/gen_3rd_party_notices.sh
index eb0adade..a3be21d4 100755
--- a/images/gen_3rd_party_notices.sh
+++ b/images/gen_3rd_party_notices.sh
@@ -27,12 +27,6 @@ DEPENDENCIES=(
click
https://github.com/pallets/click
https://github.com/pallets/click/raw/master/LICENSE.rst
- future
- https://github.com/PythonCharmers/python-future
- https://github.com/PythonCharmers/python-future/raw/master/LICENSE.txt
- libtorrent
- https://github.com/arvidn/libtorrent
- https://github.com/arvidn/libtorrent/raw/libtorrent-1_0_11/LICENSE
msrest
https://github.com/Azure/msrest-for-python
https://github.com/Azure/msrest-for-python/raw/master/LICENSE.md
@@ -52,11 +46,11 @@ DEPENDENCIES=(
https://github.com/requests/requests
https://github.com/requests/requests/raw/master/LICENSE
ruamel.yaml
- https://bitbucket.org/ruamel/yaml
- https://bitbucket.org/ruamel/yaml/raw/75c831644aa26f12ff33ac81180fbaa23b81d4bb/LICENSE
+ https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree/
+ "https://sourceforge.net/p/ruamel-yaml/code/ci/default/tree/LICENSE?format=raw"
singularity
- https://github.com/sylabs/singularity
- https://github.com/sylabs/singularity/raw/master/LICENSE.md
+ https://github.com/hpcng/singularity
+ https://github.com/hpcng/singularity/raw/master/LICENSE.md
)
DEPLEN=${#DEPENDENCIES[@]}
diff --git a/images/singularity/cli.def b/images/singularity/cli.def
index 4949b75a..32eaff8e 100644
--- a/images/singularity/cli.def
+++ b/images/singularity/cli.def
@@ -1,22 +1,24 @@
# Singularity for Azure/batch-shipyard (cli)
Bootstrap: library
-From: alpine:3.9
+From: alpine:3.13
%post
apk update
apk add --update --no-cache \
- musl build-base python3 python3-dev openssl-dev libffi-dev \
- ca-certificates openssl openssh-client rsync git bash
+ musl build-base openssl-dev libffi-dev rust cargo ca-certificates git
+python3 -m ensurepip --upgrade
+pip3 install --no-cache-dir --upgrade pip setuptools setuptools-rust wheel
git clone -b master --single-branch --depth 5 https://github.com/Azure/batch-shipyard.git /opt/batch-shipyard
cd /opt/batch-shipyard
rm -rf .git .github .vsts
rm -f .git* .travis.yml *.yml install*
pip3 install --no-cache-dir -r requirements.txt
-pip3 install --no-cache-dir --no-deps -r req_nodeps.txt
python3 -m compileall -f /opt/batch-shipyard/shipyard.py /opt/batch-shipyard/convoy
-apk del --purge build-base python3-dev openssl-dev libffi-dev git
+pip3 uninstall -y setuptools-rust wheel
+apk del --purge build-base patch openssl-dev libffi-dev rust cargo git
rm /var/cache/apk/*
+rm -rf /root/.cache /root/.cargo
%runscript
exec /opt/batch-shipyard/shipyard.py "$@"
diff --git a/install.sh b/install.sh
index 8687dd70..109b1ec8 100755
--- a/install.sh
+++ b/install.sh
@@ -13,30 +13,18 @@ SUDO=sudo
VENV_NAME=.shipyard
# process options
-while getopts "h?23ce:u" opt; do
+while getopts "h?ce:u" opt; do
case "$opt" in
h|\?)
echo "install.sh parameters"
echo ""
- echo "-2 install for Python 2.7"
- echo "-3 install for Python 3.4+ [default]"
echo "-c install for Cloud Shell (via Dockerfile)"
echo "-e [environment name] install to a virtual environment"
echo "-u force install into user python environment instead of a virtual enviornment"
echo ""
exit 1
;;
- 2)
- PYTHON=python
- PIP=pip
- ;;
- 3)
- PYTHON=python3
- PIP=pip3
- ;;
c)
- PYTHON=python3
- PIP=pip3
VENV_NAME=cloudshell
SUDO=
;;
@@ -133,11 +121,13 @@ else
. /etc/os-release
DISTRIB_ID=$ID
DISTRIB_RELEASE=$VERSION_ID
+ DISTRIB_LIKE=$ID_LIKE
fi
# check for OS X
if [ -z "${DISTRIB_ID+x}" ] && [ "$(uname)" == "Darwin" ]; then
DISTRIB_ID=$(uname)
DISTRIB_RELEASE=$(uname -a | cut -d' ' -f3)
+ DISTRIB_LIKE=$ID_LIKE
fi
fi
@@ -153,51 +143,40 @@ if [ "$DISTRIB_ID" != "Darwin" ]; then
DISTRIB_RELEASE=${DISTRIB_RELEASE,,}
fi
-echo "Detected OS: $DISTRIB_ID $DISTRIB_RELEASE"
+echo "Detected OS: $DISTRIB_ID $DISTRIB_RELEASE (ID_LIKE=$DISTRIB_LIKE)"
# install requisite packages from distro repo
if [ -n "$SUDO" ] || [ "$(id -u)" -eq 0 ]; then
- if [ "$DISTRIB_ID" == "ubuntu" ] || [ "$DISTRIB_ID" == "debian" ]; then
+ if [ "$DISTRIB_ID" == "ubuntu" ] || [ "$DISTRIB_ID" == "debian" ] || [ "$DISTRIB_ID" == "cbld" ] || [ "$DISTRIB_LIKE" == "debian" ]; then
$SUDO apt-get update
if [ $ANACONDA -eq 1 ]; then
PYTHON_PKGS=
else
- if [ $PYTHON == "python" ]; then
- PYTHON_PKGS="libpython-dev python-dev"
- if [ $ANACONDA -eq 0 ]; then
- PYTHON_PKGS="$PYTHON_PKGS python-pip"
- fi
- else
- PYTHON_PKGS="libpython3-dev python3-dev"
- if [ $ANACONDA -eq 0 ]; then
- PYTHON_PKGS="$PYTHON_PKGS python3-pip"
- fi
+ PYTHON_PKGS="libpython3-dev python3-dev"
+ if [ $ANACONDA -eq 0 ]; then
+ PYTHON_PKGS="$PYTHON_PKGS python3-pip"
fi
fi
# shellcheck disable=SC2086
$SUDO apt-get install -y --no-install-recommends \
build-essential libssl-dev libffi-dev openssl \
openssh-client rsync $PYTHON_PKGS
- elif [ "$DISTRIB_ID" == "centos" ] || [ "$DISTRIB_ID" == "rhel" ]; then
+ elif [ "$DISTRIB_ID" == "centos" ] || [ "$DISTRIB_ID" == "rhel" ] || [ "$DISTRIB_LIKE" == "rhel" ]; then
$SUDO yum makecache fast
if [ $ANACONDA -eq 1 ]; then
PYTHON_PKGS=
else
- if [ $PYTHON == "python" ]; then
- PYTHON_PKGS="python-devel"
- else
- if ! yum list installed epel-release; then
- echo "epel-release package not installed."
- echo "Please install the epel-release package or refer to the Installation documentation for manual installation steps".
- exit 1
- fi
- if ! yum list installed python34; then
- echo "python34 epel package not installed."
- echo "Please install the python34 epel package or refer to the Installation documentation for manual installation steps."
- exit 1
- fi
- PYTHON_PKGS="python34-devel"
+ if ! yum list installed epel-release; then
+ echo "epel-release package not installed."
+ echo "Please install the epel-release package or refer to the Installation documentation for manual installation steps".
+ exit 1
fi
+ if ! yum list installed python36; then
+ echo "python36 epel package not installed."
+ echo "Please install the python36 epel package or refer to the Installation documentation for manual installation steps."
+ exit 1
+ fi
+ PYTHON_PKGS="python36-devel"
fi
# shellcheck disable=SC2086
$SUDO yum install -y gcc openssl-devel libffi-devel openssl \
@@ -210,11 +189,7 @@ if [ -n "$SUDO" ] || [ "$(id -u)" -eq 0 ]; then
if [ $ANACONDA -eq 1 ]; then
PYTHON_PKGS=
else
- if [ $PYTHON == "python" ]; then
- PYTHON_PKGS="python-devel"
- else
- PYTHON_PKGS="python3-devel"
- fi
+ PYTHON_PKGS="python3-devel"
fi
# shellcheck disable=SC2086
$SUDO zypper -n in gcc libopenssl-devel libffi48-devel openssl \
@@ -261,7 +236,6 @@ if [ -n "$VENV_NAME" ]; then
$PIP uninstall -y azure-storage
set -e
$PIP install --upgrade -r requirements.txt
- $PIP install --upgrade --no-deps -r req_nodeps.txt
deactivate
else
# set python version
@@ -280,7 +254,6 @@ if [ -n "$VENV_NAME" ]; then
$PIP uninstall -y azure-storage
set -e
$PIP install --upgrade -r requirements.txt
- $PIP install --upgrade --no-deps -r req_nodeps.txt
source deactivate "$VENV_NAME"
fi
else
@@ -290,7 +263,6 @@ else
$PIP uninstall -y azure-storage
set -e
$PIP install --upgrade --user -r requirements.txt
- $PIP install --upgrade --no-deps --user -r req_nodeps.txt
fi
# create shipyard script
@@ -325,15 +297,9 @@ EOF
fi
fi
-if [ $PYTHON == "python" ]; then
-cat >> shipyard << 'EOF'
-python $BATCH_SHIPYARD_ROOT_DIR/shipyard.py $*
-EOF
-else
cat >> shipyard << 'EOF'
python3 $BATCH_SHIPYARD_ROOT_DIR/shipyard.py $*
EOF
-fi
if [ -n "$VENV_NAME" ]; then
if [ $ANACONDA -eq 0 ]; then
diff --git a/recipes/OpenFOAM-Infiniband-IntelMPI/config/jobs.yaml b/recipes/OpenFOAM-Infiniband-IntelMPI/config/jobs.yaml
index 19c11be2..9b515294 100644
--- a/recipes/OpenFOAM-Infiniband-IntelMPI/config/jobs.yaml
+++ b/recipes/OpenFOAM-Infiniband-IntelMPI/config/jobs.yaml
@@ -2,7 +2,9 @@ job_specifications:
- id: openfoamjob
auto_complete: true
shm_size: 256m
- auto_scratch: true
+ auto_scratch:
+ setup: dependency
+ num_instances: pool_current_dedicated
tasks:
- docker_image: alfpark/openfoam:4.0-icc-intelmpi
resource_files:
@@ -12,7 +14,7 @@ job_specifications:
num_instances: pool_current_dedicated
pre_execution_command: source set_up_sample.sh
mpi:
- runtime: intelmpi
+ runtime: intelmpi-ofa
options:
- -np $np
- -ppn $ppn
diff --git a/recipes/OpenFOAM-Infiniband-IntelMPI/config/pool.yaml b/recipes/OpenFOAM-Infiniband-IntelMPI/config/pool.yaml
index 21c6faaf..e6bea584 100644
--- a/recipes/OpenFOAM-Infiniband-IntelMPI/config/pool.yaml
+++ b/recipes/OpenFOAM-Infiniband-IntelMPI/config/pool.yaml
@@ -4,7 +4,7 @@ pool_specification:
platform_image:
publisher: OpenLogic
offer: CentOS-HPC
- sku: '7.1'
+ sku: '7.4'
vm_count:
dedicated: 2
low_priority: 0
diff --git a/recipes/OpenFOAM-Infiniband-OpenMPI/config/jobs.yaml b/recipes/OpenFOAM-Infiniband-OpenMPI/config/jobs.yaml
index 1424734c..8fb84814 100644
--- a/recipes/OpenFOAM-Infiniband-OpenMPI/config/jobs.yaml
+++ b/recipes/OpenFOAM-Infiniband-OpenMPI/config/jobs.yaml
@@ -1,7 +1,9 @@
job_specifications:
- id: docker-openfoam-openmpi-ib-job
auto_complete: true
- auto_scratch: true
+ auto_scratch:
+ setup: dependency
+ num_instances: pool_current_dedicated
tasks:
- docker_image: vincentlabo/openfoam:openmpi-ib
additional_docker_run_options:
diff --git a/recipes/OpenFOAM-TCP-OpenMPI/config/jobs.yaml b/recipes/OpenFOAM-TCP-OpenMPI/config/jobs.yaml
index a32ec6a0..79a3b403 100644
--- a/recipes/OpenFOAM-TCP-OpenMPI/config/jobs.yaml
+++ b/recipes/OpenFOAM-TCP-OpenMPI/config/jobs.yaml
@@ -1,7 +1,9 @@
job_specifications:
- id: openfoamjob
auto_complete: true
- auto_scratch: true
+ auto_scratch:
+ setup: dependency
+ num_instances: pool_current_dedicated
tasks:
- docker_image: alfpark/openfoam:4.0-gcc-openmpi
resource_files:
diff --git a/req_nodeps.txt b/req_nodeps.txt
deleted file mode 100644
index d7887f8a..00000000
--- a/req_nodeps.txt
+++ /dev/null
@@ -1 +0,0 @@
-pykwalify==1.7.0
diff --git a/requirements.txt b/requirements.txt
index 8c42df92..9c199b7d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,25 +1,24 @@
-adal==1.2.2
-azure-batch==8.0.0
+adal==1.2.7
+azure-batch==9.0.0
azure-cosmosdb-table==1.0.6
azure-keyvault==1.1.0
-azure-mgmt-authorization==0.60.0
-azure-mgmt-batch==7.0.0
-azure-mgmt-compute==9.0.0
-azure-mgmt-network==8.0.0
-azure-mgmt-resource==6.0.0
-azure-mgmt-storage==6.0.0
-azure-storage-blob==2.1.0
-azure-storage-file==2.1.0
-azure-storage-queue==2.1.0
-blobxfer==1.9.4
-click==7.0
-future==0.18.2
-futures==3.3.0; python_version < '3'
-keyrings.alt==3.1.1
-msrest==0.6.10
-msrestazure==0.6.2
+azure-mgmt-authorization==0.61.0
+azure-mgmt-batch==9.0.0
+azure-mgmt-compute==12.0.0
+azure-mgmt-network==10.2.0
+azure-mgmt-resource==10.0.0
+azure-mgmt-storage==10.0.0
+azure-storage-blob>=2.1.0,<3
+azure-storage-file>=2.1.0,<3
+azure-storage-queue>=2.1.0,<3
+blobxfer==1.11.0
+click>=8.0.1,<9
+keyrings.alt>=4.1.0,<5
+msrest==0.6.21
+msrestazure==0.6.4
pathlib2==2.3.5; python_version < '3.5'
-python-dateutil==2.8.1
-requests==2.22.0
-ruamel.yaml==0.16.5
+pykwalify>=1.8.0,<2
+python-dateutil>=2.8.2,<3
+requests>=2.26.0,<3
+ruamel.yaml>=0.17.16,<1
scandir==1.10.0; python_version < '3.5'
diff --git a/schemas/jobs.yaml b/schemas/jobs.yaml
index f05fab95..56d7af08 100644
--- a/schemas/jobs.yaml
+++ b/schemas/jobs.yaml
@@ -810,6 +810,9 @@ mapping:
type: str
is_file_share:
type: bool
+ condition:
+ type: str
+ enum: ['taskcompletion', 'taskfailure', 'tasksuccess']
exclude:
type: seq
sequence:
diff --git a/scripts/shipyard_federation_bootstrap.sh b/scripts/shipyard_federation_bootstrap.sh
index 534824fc..450a2a63 100755
--- a/scripts/shipyard_federation_bootstrap.sh
+++ b/scripts/shipyard_federation_bootstrap.sh
@@ -27,6 +27,7 @@ if [ -e /etc/os-release ]; then
. /etc/os-release
DISTRIB_ID=$ID
DISTRIB_RELEASE=$VERSION_ID
+ DISTRIB_LIKE=$ID_LIKE
DISTRIB_CODENAME=$VERSION_CODENAME
if [ -z "$DISTRIB_CODENAME" ]; then
if [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "9" ]; then
@@ -47,13 +48,12 @@ if [ -z "${DISTRIB_CODENAME}" ]; then
fi
DISTRIB_ID=${DISTRIB_ID,,}
DISTRIB_RELEASE=${DISTRIB_RELEASE,,}
+DISTRIB_LIKE=${DISTRIB_LIKE,,}
DISTRIB_CODENAME=${DISTRIB_CODENAME,,}
# set distribution specific vars
PACKAGER=
-if [ "$DISTRIB_ID" == "ubuntu" ]; then
- PACKAGER=apt
-elif [ "$DISTRIB_ID" == "debian" ]; then
+if [ "$DISTRIB_ID" == "ubuntu" ] || [ "$DISTRIB_ID" == "debian" ] || [ "$DISTRIB_LIKE" == "debian" ]; then
PACKAGER=apt
elif [[ $DISTRIB_ID == centos* ]] || [ "$DISTRIB_ID" == "rhel" ]; then
PACKAGER=yum
@@ -199,7 +199,7 @@ refresh_package_index() {
apt-get update
rc=$?
elif [ "$PACKAGER" == "yum" ]; then
- yum makecache -y fast
+ yum makecache -y
rc=$?
elif [ "$PACKAGER" == "zypper" ]; then
zypper -n --gpg-auto-import-keys ref
@@ -323,8 +323,9 @@ install_docker_host_engine() {
docker info
log INFO "Docker Host Engine installed"
# install docker-compose
- install_packages python3-pip python3-distutils apache2-utils
- pip3 install --upgrade setuptools wheel
+ install_packages rustc cargo build-essential libssl-dev libffi-dev \
+ python3-dev python3-pip python3-distutils apache2-utils
+ pip3 install --upgrade setuptools setuptools-rust wheel
pip3 install docker-compose
log INFO "Docker-compose installed"
}
diff --git a/scripts/shipyard_monitoring_bootstrap.sh b/scripts/shipyard_monitoring_bootstrap.sh
index 0abd0613..89128b35 100755
--- a/scripts/shipyard_monitoring_bootstrap.sh
+++ b/scripts/shipyard_monitoring_bootstrap.sh
@@ -31,6 +31,7 @@ if [ -e /etc/os-release ]; then
. /etc/os-release
DISTRIB_ID=$ID
DISTRIB_RELEASE=$VERSION_ID
+ DISTRIB_LIKE=$ID_LIKE
DISTRIB_CODENAME=$VERSION_CODENAME
if [ -z "$DISTRIB_CODENAME" ]; then
if [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "9" ]; then
@@ -51,13 +52,12 @@ if [ -z "${DISTRIB_CODENAME}" ]; then
fi
DISTRIB_ID=${DISTRIB_ID,,}
DISTRIB_RELEASE=${DISTRIB_RELEASE,,}
+DISTRIB_LIKE=${DISTRIB_LIKE,,}
DISTRIB_CODENAME=${DISTRIB_CODENAME,,}
# set distribution specific vars
PACKAGER=
-if [ "$DISTRIB_ID" == "ubuntu" ]; then
- PACKAGER=apt
-elif [ "$DISTRIB_ID" == "debian" ]; then
+if [ "$DISTRIB_ID" == "ubuntu" ] || [ "$DISTRIB_ID" == "debian" ] || [ "$DISTRIB_LIKE" == "debian" ]; then
PACKAGER=apt
elif [[ $DISTRIB_ID == centos* ]] || [ "$DISTRIB_ID" == "rhel" ]; then
PACKAGER=yum
@@ -197,7 +197,7 @@ refresh_package_index() {
apt-get update
rc=$?
elif [ "$PACKAGER" == "yum" ]; then
- yum makecache -y fast
+ yum makecache -y
rc=$?
elif [ "$PACKAGER" == "zypper" ]; then
zypper -n --gpg-auto-import-keys ref
@@ -305,8 +305,9 @@ install_docker_host_engine() {
docker info
log INFO "Docker Host Engine installed"
# install docker-compose
- install_packages python3-pip python3-distutils apache2-utils
- pip3 install --upgrade setuptools wheel
+ install_packages rustc cargo build-essential libssl-dev libffi-dev \
+ python3-dev python3-pip python3-distutils apache2-utils
+ pip3 install --upgrade setuptools setuptools-rust wheel
pip3 install docker-compose
log INFO "Docker-compose installed"
}
diff --git a/scripts/shipyard_nodeprep.sh b/scripts/shipyard_nodeprep.sh
index 3dca869d..03183603 100755
--- a/scripts/shipyard_nodeprep.sh
+++ b/scripts/shipyard_nodeprep.sh
@@ -6,12 +6,13 @@ set -e
set -o pipefail
# version consts
-DOCKER_CE_VERSION_DEBIAN=19.03.5
-DOCKER_CE_VERSION_CENTOS=19.03.5
+DOCKER_CE_VERSION_DEBIAN=20.10.8
+DOCKER_CE_VERSION_CENTOS=20.10.8
DOCKER_CE_VERSION_SLES=17.09.1
-GLUSTER_VERSION_DEBIAN=7
-GLUSTER_VERSION_CENTOS=6
-IMDS_VERSION=2019-04-30
+GLUSTER_VERSION_DEBIAN=9
+GLUSTER_VERSION_CENTOS=9
+BLOBFUSE_VERSION=1.4.1
+IMDS_VERSION=2021-01-01
# consts
DOCKER_CE_PACKAGE_DEBIAN="5:${DOCKER_CE_VERSION_DEBIAN}~3-0~"
@@ -29,6 +30,7 @@ UCX_IB_PKEY_FILE=$AZ_BATCH_TASK_WORKING_DIR/UCX_IB_PKEY
MCR_REPO=mcr.microsoft.com
BLOBXFER_IMAGE_PREFIX=${MCR_REPO}/blobxfer
SHIPYARD_IMAGE_PREFIX=${MCR_REPO}/azure-batch/shipyard
+SINGULARITY_IMAGE_PREFIX=${SHIPYARD_IMAGE_PREFIX}
# status file consts
lisinstalled=${VOLATILE_PATH}/.batch_shipyard_lis_installed
@@ -49,10 +51,11 @@ if [ -e /etc/os-release ]; then
. /etc/os-release
DISTRIB_ID=$ID
DISTRIB_RELEASE=$VERSION_ID
+ DISTRIB_LIKE=$ID_LIKE
DISTRIB_CODENAME=$VERSION_CODENAME
if [ -z "$DISTRIB_CODENAME" ]; then
- if [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "9" ]; then
- DISTRIB_CODENAME=stretch
+ if [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "10" ]; then
+ DISTRIB_CODENAME=buster
fi
fi
else
@@ -69,14 +72,13 @@ if [ -z "${DISTRIB_CODENAME}" ]; then
fi
DISTRIB_ID=${DISTRIB_ID,,}
DISTRIB_RELEASE=${DISTRIB_RELEASE,,}
+DISTRIB_LIKE=${DISTRIB_LIKE,,}
DISTRIB_CODENAME=${DISTRIB_CODENAME,,}
# set distribution specific vars
PACKAGER=
SYSTEMD_PATH=/lib/systemd/system
-if [ "$DISTRIB_ID" == "ubuntu" ]; then
- PACKAGER=apt
-elif [ "$DISTRIB_ID" == "debian" ]; then
+if [ "$DISTRIB_ID" == "ubuntu" ] || [ "$DISTRIB_ID" == "debian" ] || [ "$DISTRIB_LIKE" == "debian" ]; then
PACKAGER=apt
elif [[ $DISTRIB_ID == centos* ]] || [ "$DISTRIB_ID" == "rhel" ]; then
PACKAGER=yum
@@ -88,6 +90,15 @@ if [ "$PACKAGER" == "apt" ]; then
export DEBIAN_FRONTEND=noninteractive
fi
+# set python and pip
+if command -v python3 > /dev/null 2>&1; then
+ PYTHON=python3
+ PIP=pip3
+else
+ PYTHON=python
+ PIP=pip
+fi
+
# globals
azureblob=0
azurefile=0
@@ -248,6 +259,14 @@ if [ -z "$blobxferversion" ]; then
exit 1
fi
+get_current_timestamp() {
+ if [ $PYTHON == "python3" ]; then
+ $PYTHON -c 'import datetime;print(datetime.datetime.utcnow().timestamp())'
+ else
+ $PYTHON -c 'import datetime;import time;print(time.mktime(datetime.datetime.utcnow().timetuple()))'
+ fi
+}
+
save_startup_to_volatile() {
set +e
touch "${VOLATILE_PATH}"/startup/.save
@@ -381,15 +400,12 @@ get_vm_size_from_imds() {
if [ -n "$vm_size" ]; then
return
fi
- curl -fSsL -H Metadata:true "http://169.254.169.254/metadata/instance?api-version=${IMDS_VERSION}" > imd.json
- vm_size=$(python -c "import json;f=open('imd.json','r');a=json.load(f);print(a['compute']['vmSize']).lower()")
- if [[ "$vm_size" =~ ^standard_((hb|hc)[0-9]+m?rs?(_v[1-9])?)$ ]]; then
+ curl -fSsL -H Metadata:true --noproxy "*" "http://169.254.169.254/metadata/instance?api-version=${IMDS_VERSION}" > imd.json
+ vm_size=$(${PYTHON} -c "import json;f=open('imd.json','r');a=json.load(f);print(a['compute']['vmSize'].lower())")
+ if [[ "$vm_size" =~ ^standard_(((hb|hc)[0-9]+m?rs?(_v[1-9])?)|(nc[0-9]+rs_v[2-4])|(nd[0-9]+(rs|rs_v2|asr_v4)))$ ]]; then
# SR-IOV RDMA
vm_rdma_type=1
- elif [[ "$vm_size" =~ ^standard_(nc[0-9]+rs_v3)$ ]]; then
- # SR-IOV RDMA (transition)
- vm_rdma_type=1
- elif [[ "$vm_size" =~ ^standard_((a8|a9)|((h|nc|nd)[0-9]+m?rs?(_v[1-3])?))$ ]]; then
+ elif [[ "$vm_size" =~ ^standard_((a8|a9)|((h|nc|nd)[0-9]+m?rs?))$ ]]; then
# network direct RDMA
vm_rdma_type=2
fi
@@ -494,7 +510,7 @@ refresh_package_index() {
apt-get update
rc=$?
elif [ "$PACKAGER" == "yum" ]; then
- yum makecache -y fast
+ yum makecache -y
rc=$?
elif [ "$PACKAGER" == "zypper" ]; then
zypper -n --gpg-auto-import-keys ref
@@ -608,9 +624,9 @@ execute_command_with_retry() {
}
# TODO remove this once native images are fixed
-blacklist_kernel_upgrade() {
+denylist_kernel_upgrade() {
if [ "$DISTRIB_ID" != "ubuntu" ]; then
- log DEBUG "No kernel upgrade blacklist required on $DISTRIB_ID $DISTRIB_RELEASE"
+ log DEBUG "No kernel upgrade denylist required on $DISTRIB_ID $DISTRIB_RELEASE"
return
fi
set +e
@@ -619,7 +635,7 @@ blacklist_kernel_upgrade() {
set -e
if [ $rc -ne 0 ]; then
sed -i "/^Unattended-Upgrade::Package-Blacklist {/a\"linux-azure\";\\n\"linux-cloud-tools-azure\";\\n\"linux-headers-azure\";\\n\"linux-image-azure\";\\n\"linux-tools-azure\";" /etc/apt/apt.conf.d/50unattended-upgrades
- log INFO "Added linux-azure to package blacklist for unattended upgrades"
+ log INFO "Added linux-azure to package denylist for unattended upgrades"
fi
}
@@ -679,7 +695,7 @@ check_for_nvidia_on_custom_or_native() {
else
check_for_nvidia_driver_on_custom_or_native
# prevent kernel upgrades from breaking driver
- blacklist_kernel_upgrade
+ denylist_kernel_upgrade
enable_nvidia_persistence_mode
query_nvidia_card
fi
@@ -756,7 +772,7 @@ install_kernel_devel_package() {
installed=1
fi
if [ "$installed" -eq 0 ]; then
- if [[ "$centos_ver" == 7.3.* ]] || [[ "$centos_ver" == 7.4.* ]] || [[ "$centos_ver" == 7.5.* ]] || [[ "$centos_ver" == 7.6.* ]]; then
+ if [[ "$centos_ver" == 7.3.* ]] || [[ "$centos_ver" == 7.4.* ]] || [[ "$centos_ver" == 7.5.* ]] || [[ "$centos_ver" == 7.6.* ]] || [[ "$centos_ver" == 7.7.* ]] || [[ "$centos_ver" == 7.8.* ]]; then
local pkg
pkg="${kernel_devel_package}.rpm"
download_file_as "http://vault.centos.org/${centos_ver}/updates/x86_64/Packages/${pkg}" "$pkg"
@@ -793,7 +809,7 @@ install_nvidia_software() {
exit 1
fi
set -e
- # blacklist nouveau from being loaded if rebooted
+ # denylist nouveau from being loaded if rebooted
local blfile
if [ "$DISTRIB_ID" == "ubuntu" ]; then
blfile=/etc/modprobe.d/blacklist-nouveau.conf
@@ -906,30 +922,31 @@ mount_azureblob_container() {
else
log DEBUG "blobfuse is not installed"
local mspkg
+ local bfpkg
if [ "$PACKAGER" == "apt" ]; then
mspkg=packages-microsoft-prod.deb
+ bfpkg="blobfuse=${BLOBFUSE_VERSION}"
if [ "$DISTRIB_ID" == "ubuntu" ]; then
download_file_as "https://packages.microsoft.com/config/${DISTRIB_ID}/${DISTRIB_RELEASE}/${mspkg}" "$mspkg"
elif [ "$DISTRIB_ID" == "debian" ]; then
install_packages apt-transport-https
- if [ "$DISTRIB_RELEASE" == "9" ]; then
- download_file_as "https://packages.microsoft.com/config/ubuntu/16.04/${mspkg}" "$mspkg"
- fi
+ download_file_as "https://packages.microsoft.com/config/${DISTRIB_ID}/${DISTRIB_RELEASE}/${mspkg}" "$mspkg"
fi
elif [ "$PACKAGER" == "yum" ]; then
mspkg=packages-microsoft-prod.rpm
+ bfpkg="blobfuse-${BLOBFUSE_VERSION}"
download_file_as "https://packages.microsoft.com/config/rhel/${DISTRIB_RELEASE}/${mspkg}" "$mspkg"
elif [ "$PACKAGER" == "zypper" ]; then
mspkg=packages-microsoft-prod.rpm
download_file_as "https://packages.microsoft.com/config/sles/${DISTRIB_RELEASE}/${mspkg}" "$mspkg"
fi
- if [ ! -f ${mspkg} ]; then
+ if [ ! -f ${mspkg} ] || [ -z "${bfpkg}" ]; then
echo "ERROR: unsupported distribution for Azure blob: $DISTRIB_ID $DISTRIB_RELEASE"
exit 1
fi
install_local_packages ${mspkg}
refresh_package_index
- install_packages blobfuse
+ install_packages ${bfpkg}
fi
./azureblob-mount.sh
rm azureblob-mount.sh
@@ -962,7 +979,9 @@ docker_pull_image_fallback() {
uhs=$(grep -i 'received unexpected HTTP status' <<<"$pull_out")
local tht
tht=$(grep -i 'TLS handshake timeout' <<<"$pull_out")
- if [[ -n "$tmr" ]] || [[ -n "$crbp" ]] || [[ -n "$epic" ]] || [[ -n "$erb" ]] || [[ -n "$uhs" ]] || [[ -n "$tht" ]]; then
+ local rcc
+ rcc=$(grep -i 'request canceled while waiting for connection' <<<"$pull_out")
+ if [[ -n "$tmr" ]] || [[ -n "$crbp" ]] || [[ -n "$epic" ]] || [[ -n "$erb" ]] || [[ -n "$uhs" ]] || [[ -n "$tht" ]] || [[ -n "$rcc" ]]; then
log WARNING "will retry: $pull_out"
else
log ERROR "$pull_out"
@@ -1013,7 +1032,9 @@ docker_pull_image() {
uhs=$(grep -i 'received unexpected HTTP status' <<<"$pull_out")
local tht
tht=$(grep -i 'TLS handshake timeout' <<<"$pull_out")
- if [[ -n "$tmr" ]] || [[ -n "$crbp" ]] || [[ -n "$epic" ]] || [[ -n "$erb" ]] || [[ -n "$uhs" ]] || [[ -n "$tht" ]]; then
+ local rcc
+ rcc=$(grep -i 'request canceled while waiting for connection' <<<"$pull_out")
+ if [[ -n "$tmr" ]] || [[ -n "$crbp" ]] || [[ -n "$epic" ]] || [[ -n "$erb" ]] || [[ -n "$uhs" ]] || [[ -n "$tht" ]] || [[ -n "$rcc" ]]; then
log WARNING "will retry: $pull_out"
else
log ERROR "$pull_out"
@@ -1063,7 +1084,7 @@ install_singularity() {
fi
install_packages squashfs-tools $cryptpkg
# fetch docker image for singularity bits
- local di="${SHIPYARD_IMAGE_PREFIX}:${singularityversion}-${disuffix}"
+ local di="${SINGULARITY_IMAGE_PREFIX}:${singularityversion}-${disuffix}"
log DEBUG "Image=${di} basedir=${singularity_basedir}"
docker_pull_image "$di"
mkdir -p /opt/singularity
@@ -1406,10 +1427,14 @@ install_beeond() {
local led
local pkgnum
if [ "$PACKAGER" == "apt" ]; then
- if { [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "9" ]; } || { [ "$DISTRIB_ID" == "ubuntu" ] && [ "$DISTRIB_RELEASE" == "16.04" ]; } then
- pkgnum=9
- elif [ "$DISTRIB_ID" == "ubuntu" ] && [ "$DISTRIB_RELEASE" == "18.04" ]; then
- logger ERROR "BeeGFS BeeOND is not supported on Ubuntu 18.04"
+ if [ "$DISTRIB_ID" == "ubuntu" ] ; then
+ if [ "$DISTRIB_RELEASE" == "16.04" ] || [ "$DISTRIB_RELEASE" == "18.04" ]; then
+ # 18.04 is technically on buster/debian 10
+ pkgnum=9
+ fi
+ fi
+ if [ -z "$pkgnum" ]; then
+ logger ERROR "BeeGFS BeeOND is not supported on $DISTRIB_ID $DISTRIB_RELEASE"
exit 1
fi
download_file_as "https://www.beegfs.io/release/latest-stable/dists/beegfs-deb${pkgnum}.list" "/etc/apt/sources.list.d/beegfs-deb${pkgnum}.list"
@@ -1418,8 +1443,10 @@ install_beeond() {
elif [ "$PACKAGER" == "yum" ]; then
if [[ "$DISTRIB_RELEASE" == 7* ]]; then
pkgnum=7
- elif [[ "$DISTRIB_RELEASE" == 8* ]]; then
- pkgnum=8
+ fi
+ if [ -z "$pkgnum" ]; then
+ logger ERROR "BeeGFS BeeOND is not supported on $DISTRIB_ID $DISTRIB_RELEASE"
+ exit 1
fi
download_file_as "https://www.beegfs.io/release/latest-stable/dists/beegfs-rhel${pkgnum}.repo" "/etc/yum.repos.d/beegfs-rhel${pkgnum}.repo"
rpm --import "https://www.beegfs.io/release/latest-stable/gpg/RPM-GPG-KEY-beegfs"
@@ -1670,11 +1697,7 @@ setup_cascade() {
envfile=$AZ_BATCH_NODE_STARTUP_DIR/wd/.cascade_envfile
if [ $cascadecontainer -eq 1 ]; then
# store shipyard docker pull start
- if command -v python3 > /dev/null 2>&1; then
- drpstart=$(python3 -c 'import datetime;print(datetime.datetime.utcnow().timestamp())')
- else
- drpstart=$(python -c 'import datetime;import time;print(time.mktime(datetime.datetime.utcnow().timetuple()))')
- fi
+ drpstart=$(get_current_timestamp)
log DEBUG "Pulling $cascade_docker_image"
docker_pull_image "$cascade_docker_image"
if [ -n "$singularity_basedir" ]; then
@@ -1682,11 +1705,7 @@ setup_cascade() {
docker_pull_image "$cascade_singularity_image"
fi
# store shipyard pull end
- if command -v python3 > /dev/null 2>&1; then
- drpend=$(python3 -c 'import datetime;print(datetime.datetime.utcnow().timestamp())')
- else
- drpend=$(python -c 'import datetime;import time;print(time.mktime(datetime.datetime.utcnow().timetuple()))')
- fi
+ drpend=$(get_current_timestamp)
# create env file
cat > "$envfile" << EOF
PYTHONASYNCIODEBUG=1
@@ -1730,6 +1749,7 @@ run_cascade() {
if [ $cascadecontainer -ne 0 ]; then d="-d"; else d=""; fi
log_directory=$(pwd)
set +e
+ # shellcheck disable=SC2086
"${AZ_BATCH_TASK_WORKING_DIR}"/shipyard_cascade.sh \
-b "$block" \
-c "$concurrent_source_downloads" \
@@ -1836,51 +1856,58 @@ install_and_start_batch_insights() {
log INFO "Batch Insights enabled."
}
+print_configuration() {
+ echo "Configuration:"
+ echo "--------------"
+ echo "Batch Shipyard version: $shipyardversion"
+ echo "OS Distribution: $DISTRIB_ID $DISTRIB_RELEASE"
+ echo "Python=$PYTHON pip=$PIP"
+ echo "User mountpoint: $USER_MOUNTPOINT"
+ echo "Mount path: $MOUNTS_PATH"
+ echo "Custom image: $custom_image"
+ echo "Native mode: $native_mode"
+ echo "Blobxfer version: $blobxferversion"
+ echo "Singularity version: $singularityversion"
+ echo "Batch Insights: $batch_insights"
+ echo "Prometheus: NE=$PROM_NODE_EXPORTER_PORT,$PROM_NODE_EXPORTER_OPTIONS CA=$PROM_CADVISOR_PORT,$PROM_CADVISOR_OPTIONS"
+ echo "Network optimization: $networkopt"
+ echo "Encryption cert thumbprint: $encrypted"
+ echo "Install Kata Containers: $kata"
+ echo "Default container runtime: $default_container_runtime"
+ echo "Install BeeGFS BeeOND: $beeond"
+ echo "Storage cluster mounts (${#sc_args[@]}): ${sc_args[*]}"
+ echo "Custom mount: $SHIPYARD_CUSTOM_MOUNTS_FSTAB"
+ echo "Install LIS: $lis"
+ echo "GPU: $gpu"
+ echo "GPU ignore warnings: $SHIPYARD_GPU_IGNORE_WARNINGS"
+ echo "Azure Blob: $azureblob"
+ echo "Azure File: $azurefile"
+ echo "GlusterFS on compute: $gluster_on_compute"
+ echo "HPN-SSH: $hpnssh"
+ echo "Enable Azure Batch group for Docker access: $docker_group"
+ echo "Fallback registry: $fallback_registry"
+ echo "Docker image preload delay: $delay_preload"
+ echo "Cascade via container: $cascadecontainer"
+ echo "Concurrent source downloads: $concurrent_source_downloads"
+ echo "Block on images: $block"
+ echo "Singularity decryption certs: $SHIPYARD_SINGULARITY_DECRYPTION_CERTIFICATES"
+ echo ""
+}
+
log INFO "Prep start"
-echo "Configuration:"
-echo "--------------"
-echo "Custom image: $custom_image"
-echo "Native mode: $native_mode"
-echo "OS Distribution: $DISTRIB_ID $DISTRIB_RELEASE"
-echo "Batch Shipyard version: $shipyardversion"
-echo "Blobxfer version: $blobxferversion"
-echo "Singularity version: $singularityversion"
-echo "User mountpoint: $USER_MOUNTPOINT"
-echo "Mount path: $MOUNTS_PATH"
-echo "Batch Insights: $batch_insights"
-echo "Prometheus: NE=$PROM_NODE_EXPORTER_PORT,$PROM_NODE_EXPORTER_OPTIONS CA=$PROM_CADVISOR_PORT,$PROM_CADVISOR_OPTIONS"
-echo "Network optimization: $networkopt"
-echo "Encryption cert thumbprint: $encrypted"
-echo "Install Kata Containers: $kata"
-echo "Default container runtime: $default_container_runtime"
-echo "Install BeeGFS BeeOND: $beeond"
-echo "Storage cluster mounts (${#sc_args[@]}): ${sc_args[*]}"
-echo "Custom mount: $SHIPYARD_CUSTOM_MOUNTS_FSTAB"
-echo "Install LIS: $lis"
-echo "GPU: $gpu"
-echo "GPU ignore warnings: $SHIPYARD_GPU_IGNORE_WARNINGS"
-echo "Azure Blob: $azureblob"
-echo "Azure File: $azurefile"
-echo "GlusterFS on compute: $gluster_on_compute"
-echo "HPN-SSH: $hpnssh"
-echo "Enable Azure Batch group for Docker access: $docker_group"
-echo "Fallback registry: $fallback_registry"
-echo "Docker image preload delay: $delay_preload"
-echo "Cascade via container: $cascadecontainer"
-echo "Concurrent source downloads: $concurrent_source_downloads"
-echo "Block on images: $block"
-echo "Singularity decryption certs: $SHIPYARD_SINGULARITY_DECRYPTION_CERTIFICATES"
-echo ""
-
-# set python env vars
-export LC_ALL=en_US.UTF-8
-# store node prep start
-if command -v python3 > /dev/null 2>&1; then
- npstart=$(python3 -c 'import datetime;print(datetime.datetime.utcnow().timestamp())')
+# set locale
+if [ "$DISTRIB_ID" == "debian" ]; then
+ export LC_ALL=C.UTF-8
else
- npstart=$(python -c 'import datetime;import time;print(time.mktime(datetime.datetime.utcnow().timetuple()))')
+ export LC_ALL=en_US.UTF-8
fi
+set +e
+localectl
+set -e
+
+# store node prep start
+npstart=$(get_current_timestamp)
# get ephemeral device/disk
get_ephemeral_device
@@ -1891,6 +1918,9 @@ check_for_buggy_ntfs_mount
# set ephemeral device/user mountpoint
set_user_mountpoint
+# show configuration
+print_configuration
+
# save startup stderr/stdout
save_startup_to_volatile
@@ -2051,11 +2081,7 @@ process_storage_clusters
process_custom_fstab
# store node prep end
-if command -v python3 > /dev/null 2>&1; then
- npend=$(python3 -c 'import datetime;print(datetime.datetime.utcnow().timestamp())')
-else
- npend=$(python -c 'import datetime;import time;print(time.mktime(datetime.datetime.utcnow().timetuple()))')
-fi
+npend=$(get_current_timestamp)
# touch node prep finished file to preserve idempotency
touch "$nodeprepfinished"
diff --git a/scripts/shipyard_remotefs_addbrick.sh b/scripts/shipyard_remotefs_addbrick.sh
index 994763ac..07163cd4 100755
--- a/scripts/shipyard_remotefs_addbrick.sh
+++ b/scripts/shipyard_remotefs_addbrick.sh
@@ -115,14 +115,14 @@ gluster_add_bricks() {
gluster volume add-brick $gluster_volname ${volarg} ${bricks}
fi
# get info and status
- gluster volume info $gluster_volname
- gluster volume status $gluster_volname detail
+ gluster volume info "$gluster_volname"
+ gluster volume status "$gluster_volname" detail
# rebalance
echo "Rebalancing gluster volume $gluster_volname"
set +e
- if gluster volume rebalance $gluster_volname start; then
+ if gluster volume rebalance "$gluster_volname" start; then
sleep 5
- gluster volume rebalance $gluster_volname status
+ gluster volume rebalance "$gluster_volname" status
fi
set -e
}
diff --git a/scripts/shipyard_remotefs_bootstrap.sh b/scripts/shipyard_remotefs_bootstrap.sh
index 3a355b4b..54d8a583 100755
--- a/scripts/shipyard_remotefs_bootstrap.sh
+++ b/scripts/shipyard_remotefs_bootstrap.sh
@@ -6,7 +6,7 @@ set -o pipefail
export DEBIAN_FRONTEND=noninteractive
# constants
-GLUSTER_VERSION=7
+GLUSTER_VERSION=9
gluster_brick_mountpath=/gluster/brick
gluster_brick_location=$gluster_brick_mountpath/brick0
ipaddress=$(ip addr list eth0 | grep "inet " | cut -d' ' -f6 | cut -d/ -f1)
@@ -283,7 +283,7 @@ setup_glusterfs() {
echo "$myhostname:/$gluster_volname $mountpath glusterfs defaults,_netdev,noauto,x-systemd.automount,fetch-attempts=10 0 2" >> /etc/fstab
fi
# create mountpath
- mkdir -p $mountpath
+ mkdir -p "$mountpath"
# mount it
echo "Mounting gluster volume $gluster_volname locally to $mountpath"
local START
diff --git a/scripts/shipyard_slurm_computenode_nodeprep.sh b/scripts/shipyard_slurm_computenode_nodeprep.sh
index f5db3c1a..85d3bd58 100755
--- a/scripts/shipyard_slurm_computenode_nodeprep.sh
+++ b/scripts/shipyard_slurm_computenode_nodeprep.sh
@@ -36,6 +36,7 @@ if [ -e /etc/os-release ]; then
. /etc/os-release
DISTRIB_ID=$ID
DISTRIB_RELEASE=$VERSION_ID
+ DISTRIB_LIKE=$ID_LIKE
DISTRIB_CODENAME=$VERSION_CODENAME
if [ -z "$DISTRIB_CODENAME" ]; then
if [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "9" ]; then
@@ -56,17 +57,14 @@ if [ -z "${DISTRIB_CODENAME}" ]; then
fi
DISTRIB_ID=${DISTRIB_ID,,}
DISTRIB_RELEASE=${DISTRIB_RELEASE,,}
+DISTRIB_LIKE=${DISTRIB_LIKE,,}
DISTRIB_CODENAME=${DISTRIB_CODENAME,,}
# set distribution specific vars
PACKAGER=
PACKAGE_SUFFIX=
SLURM_PACKAGE=
-if [ "$DISTRIB_ID" == "ubuntu" ]; then
- PACKAGER=apt
- PACKAGE_SUFFIX=deb
- SLURM_PACKAGE="${SLURM_PACKAGE_DEBIAN}.${PACKAGE_SUFFIX}"
-elif [ "$DISTRIB_ID" == "debian" ]; then
+if [ "$DISTRIB_ID" == "ubuntu" ] || [ "$DISTRIB_ID" == "debian" ] || [ "$DISTRIB_LIKE" == "debian" ]; then
PACKAGER=apt
PACKAGE_SUFFIX=deb
SLURM_PACKAGE="${SLURM_PACKAGE_DEBIAN}.${PACKAGE_SUFFIX}"
@@ -209,7 +207,7 @@ refresh_package_index() {
apt-get update
rc=$?
elif [ "$PACKAGER" == "yum" ]; then
- yum makecache -y fast
+ yum makecache -y
rc=$?
elif [ "$PACKAGER" == "zypper" ]; then
zypper -n --gpg-auto-import-keys ref
diff --git a/scripts/shipyard_slurm_master_bootstrap.sh b/scripts/shipyard_slurm_master_bootstrap.sh
index 0250d903..b5aee2a4 100755
--- a/scripts/shipyard_slurm_master_bootstrap.sh
+++ b/scripts/shipyard_slurm_master_bootstrap.sh
@@ -36,6 +36,7 @@ if [ -e /etc/os-release ]; then
. /etc/os-release
DISTRIB_ID=$ID
DISTRIB_RELEASE=$VERSION_ID
+ DISTRIB_LIKE=$ID_LIKE
DISTRIB_CODENAME=$VERSION_CODENAME
if [ -z "$DISTRIB_CODENAME" ]; then
if [ "$DISTRIB_ID" == "debian" ] && [ "$DISTRIB_RELEASE" == "9" ]; then
@@ -56,6 +57,7 @@ if [ -z "${DISTRIB_CODENAME}" ]; then
fi
DISTRIB_ID=${DISTRIB_ID,,}
DISTRIB_RELEASE=${DISTRIB_RELEASE,,}
+DISTRIB_LIKE=${DISTRIB_LIKE,,}
DISTRIB_CODENAME=${DISTRIB_CODENAME,,}
# set distribution specific vars
@@ -65,7 +67,7 @@ SYSTEMD_PATH=/lib/systemd/system
if [ "$DISTRIB_ID" == "ubuntu" ]; then
PACKAGER=apt
USER_MOUNTPOINT=/mnt
-elif [ "$DISTRIB_ID" == "debian" ]; then
+elif [ "$DISTRIB_ID" == "debian" ] || [ "$DISTRIB_LIKE" == "debian" ]; then
PACKAGER=apt
elif [[ $DISTRIB_ID == centos* ]] || [ "$DISTRIB_ID" == "rhel" ]; then
PACKAGER=yum
@@ -244,7 +246,7 @@ refresh_package_index() {
apt-get update
rc=$?
elif [ "$PACKAGER" == "yum" ]; then
- yum makecache -y fast
+ yum makecache -y
rc=$?
elif [ "$PACKAGER" == "zypper" ]; then
zypper -n --gpg-auto-import-keys ref
diff --git a/scripts/windows/shipyard_nodeprep_nativedocker.ps1 b/scripts/windows/shipyard_nodeprep_nativedocker.ps1
index a4c924c7..571d6fdc 100644
--- a/scripts/windows/shipyard_nodeprep_nativedocker.ps1
+++ b/scripts/windows/shipyard_nodeprep_nativedocker.ps1
@@ -7,6 +7,7 @@ param(
[String] $x # blobxfer version
)
+Set-Variable ImdsVersion -option Constant -value "2021-01-01"
Set-Variable NodePrepFinished -option Constant -value (Join-Path $env:AZ_BATCH_NODE_ROOT_DIR -ChildPath "volatile" | Join-Path -ChildPath ".batch_shipyard_node_prep_finished")
Set-Variable VolatileStartupSave -option Constant -value (Join-Path $env:AZ_BATCH_NODE_ROOT_DIR -ChildPath "volatile" | Join-Path -ChildPath "startup" | Join-Path -ChildPath ".save")
Set-Variable MountsPath -option Constant -value (Join-Path $env:AZ_BATCH_NODE_ROOT_DIR -ChildPath "mounts")
@@ -46,6 +47,9 @@ Write-Host "Encrypted: $e"
Write-Host "Azure File: $a"
Write-Host ""
+# retrive IMDS
+Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Proxy $Null -Uri "http://169.254.169.254/metadata/instance?api-version=${ImdsVersion}" | ConvertTo-Json -Depth 64 | Out-File imd.json
+
# touch volatile startup save file
New-Item -ItemType file $VolatileStartupSave -Force
@@ -82,12 +86,6 @@ if ($a) {
.\azurefile-mount.cmd
}
-if (Test-Path $NodePrepFinished -pathType Leaf)
-{
- Write-Host "$NodePrepFinished file exists, assuming successful completion of node prep"
- exit 0
-}
-
# download blobxfer binary
$bxurl = "https://github.com/Azure/blobxfer/releases/download/${x}/blobxfer-${x}-win-amd64.exe"
$bxoutf = Join-Path $Env:AZ_BATCH_TASK_WORKING_DIR -ChildPath "blobxfer.exe"
@@ -98,6 +96,13 @@ if (!$?)
throw "Download from $bxurl to $bxoutf failed"
}
+# check if this script was run successfully prior
+if (Test-Path $NodePrepFinished -pathType Leaf)
+{
+ Write-Host "$NodePrepFinished file exists, assuming successful completion of node prep"
+ exit 0
+}
+
# pull required images
Exec { docker pull mcr.microsoft.com/azure-batch/shipyard:${v}-cargo-windows }
diff --git a/shipyard.py b/shipyard.py
index 15f63433..d82ed584 100755
--- a/shipyard.py
+++ b/shipyard.py
@@ -1953,7 +1953,7 @@ def jobs(ctx):
@jobs.command('add')
@click.option(
'--recreate', is_flag=True,
- help='Recreate any completed jobs with the same id')
+ help='Recreate any completed job or job schedules with the same id')
@click.option(
'--tail',
help='Tails the specified file of the last job and task added')
diff --git a/site-extension/install.cmd b/site-extension/install.cmd
index 07aa22c3..e576616e 100644
--- a/site-extension/install.cmd
+++ b/site-extension/install.cmd
@@ -70,11 +70,6 @@ IF ERRORLEVEL 1 (
echo "pip install requirements.txt failed"
exit /b 1
)
-"%PYTHON%" -m pip install --upgrade --no-deps -r req_nodeps.txt
-IF ERRORLEVEL 1 (
- echo "pip install req_nodeps.txt failed"
- exit /b 1
-)
popd
REM futurize isodate (for some reason this is sometimes installed as python2)
diff --git a/slurm/requirements.txt b/slurm/requirements.txt
index 5a86fac5..953c8d34 100644
--- a/slurm/requirements.txt
+++ b/slurm/requirements.txt
@@ -1,8 +1,8 @@
-azure-batch==8.0.0
+azure-batch==9.0.0
azure-cosmosdb-table==1.0.6
-azure-mgmt-resource==6.0.0
-azure-mgmt-storage==6.0.0
-azure-storage-queue==2.1.0
-msrestazure==0.6.2
-python-dateutil==2.8.1
-requests==2.22.0
+azure-mgmt-resource==10.0.0
+azure-mgmt-storage==10.0.0
+azure-storage-queue>=2.1.0,<3
+msrestazure==0.6.4
+python-dateutil>=2.8.2,<3
+requests>=2.26.0,<3