/
config.yml
242 lines (229 loc) · 7.58 KB
/
config.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
version: 2
jobs:
build:
environment:
TZ: "/usr/share/zoneinfo/America/Los_Angeles"
SCRATCH: "/scratch"
docker:
- image: docker:18.01.0-ce-git
working_directory: /tmp/src/niworkflows
steps:
- run:
name: Install parallel gzip and python3
command: |
apk add --no-cache pigz python3
- restore_cache:
keys:
- docker-v1-{{ .Branch }}-{{ epoch }}
- docker-v1-{{ .Branch }}-
- docker-v1-master-
- docker-v1-
paths:
- /tmp/cache/docker.tar.gz
- checkout
- setup_remote_docker
- run:
name: Load Docker image layer cache
no_output_timeout: 30m
command: |
docker info
set +o pipefail
if [ -f /tmp/cache/docker.tar.gz ]; then
pigz -d --stdout /tmp/cache/docker.tar.gz | docker load
docker images
fi
- run:
name: Build Docker image
no_output_timeout: 60m
command: |
THISVERSION=$( python3 get_version.py )
e=1 && for i in {1..5}; do
docker build \
--cache-from=niworkflows:py3 \
--rm=false \
-t niworkflows:py3 \
--build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` \
--build-arg VCS_REF=`git rev-parse --short HEAD` \
--build-arg VERSION=${CIRCLE_TAG:-$THISVERSION} . \
&& e=0 && break || sleep 15
done && [ "$e" -eq "0" ]
- run:
name: Docker save
no_output_timeout: 40m
command: |
mkdir -p /tmp/cache
docker save ubuntu:xenial-20161213 niworkflows:py3 \
| pigz -8 -p 3 > /tmp/cache/docker.tar.gz
- save_cache:
key: docker-v1-{{ .Branch }}-{{ epoch }}
paths:
- /tmp/cache/docker.tar.gz
# - persist_to_workspace:
# root: /tmp
# paths:
# - cache/docker.tar.gz
get_data:
machine:
# Ubuntu 14.04 with Docker 17.10.0-ce
image: circleci/classic:201711-01
working_directory: /home/circleci/data
steps:
- restore_cache:
keys:
- data-v1-{{ epoch }}
- data-v1-
- run:
name: Get test data from ds000003
command: |
mkdir -p /tmp/data
if [[ ! -d /tmp/data/ds003_downsampled ]]; then
wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q \
-O ds003_downsampled.tar.gz "https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/57f328f6b83f6901ef94cf70"
tar xvzf ds003_downsampled.tar.gz -C /tmp/data/
else
echo "Dataset ds000003 was cached"
fi
- run:
name: Get BIDS test data stub
command: |
mkdir -p /tmp/data
if [[ ! -d /tmp/data/BIDS-examples-1-enh-ds054 ]]; then
wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q \
-O BIDS-examples-1-enh-ds054.zip "http://github.com/chrisfilo/BIDS-examples-1/archive/enh/ds054.zip"
unzip BIDS-examples-1-enh-ds054.zip -d /tmp/data/
else
echo "BIDS stub was cached"
fi
- run:
name: Store FreeSurfer license file
command: |
mkdir -p /tmp/fslicense
printf "$FS_LICENSE_CONTENT" | base64 -d >> /tmp/fslicense/license.txt
- persist_to_workspace:
root: /tmp
paths:
- data
- fslicense
- save_cache:
key: data-v1-{{ epoch }}
paths:
- /tmp/data
test_pytest:
machine:
image: circleci/classic:201711-01
working_directory: /tmp/tests
steps:
- attach_workspace:
at: /tmp
- restore_cache:
keys:
- docker-v1-{{ .Branch }}-{{ epoch }}
- docker-v1-{{ .Branch }}-
- docker-v1-master-
- docker-v1-
- checkout:
path: /tmp/src/niworkflows
- run:
name: Load Docker image layer cache
no_output_timeout: 30m
command: |
docker info
set +o pipefail
if [ -f /tmp/cache/docker.tar.gz ]; then
sudo apt update && sudo apt -y install pigz
pigz -d --stdout /tmp/cache/docker.tar.gz | docker load
docker images
fi
- run:
name: Set PR number
command: |
echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
source $BASH_ENV
echo $CIRCLE_PR_NUMBER
- run:
name: Get codecov
command: python -m pip install codecov
- run:
name: Run unit tests
no_output_timeout: 2h
command: |
sudo setfacl -d -m group:ubuntu:rwx $PWD
sudo setfacl -m group:ubuntu:rwx $PWD
docker run -it --rm=false \
-e TEST_DATA_HOME=/data -v /tmp/data:/data \
-v ${PWD}:/tmp niworkflows:py3 \
pytest --junit-xml=/tmp/pytest.xml \
--cov niworkflows --cov-report xml:/tmp/unittests.xml \
--ignore=/src/niworkflows/niworkflows/tests/ \
--ignore=/src/niworkflows/niworkflows/interfaces/ants.py \
/src/niworkflows/niworkflows
- run:
name: Submit unit test coverage
command: |
python -m codecov --file unittests.xml --root /tmp/src/niworkflows \
--flags unittests -e CIRCLE_JOB
- run:
name: Run reportlet tests
no_output_timeout: 2h
command: |
docker run -it --rm=false \
-e SAVE_CIRCLE_ARTIFACTS="/tmp" \
-e TEST_DATA_HOME=/data -v /tmp/data:/data \
-v /tmp/fslicense/license.txt:/opt/freesurfer/license.txt:ro \
-v ${PWD}:/tmp niworkflows:py3 \
pytest -n auto --junit-xml=/tmp/reportlets.xml \
--cov niworkflows --cov-report xml:/tmp/reportlets.xml \
/src/niworkflows/niworkflows/tests/
- run:
name: Submit reportlet test coverage
command: |
python -m codecov --file reportlets.xml --root /tmp/src/niworkflows \
--flags reportlettests -e CIRCLE_JOB
- store_artifacts:
path: /tmp/tests
- store_test_results:
path: /tmp/tests
deploy:
machine:
image: circleci/classic:201711-01
working_directory: /tmp/src/niworkflows
steps:
- checkout
- run:
name: Deploy to PyPi
command: |
pip install "setuptools>=27.0" twine docutils
echo "${CIRCLE_TAG}" > niworkflows/VERSION
echo "include niworkflows/VERSION" >> MANIFEST.in
python setup.py check -r -s
python setup.py sdist
twine upload dist/*
workflows:
version: 2
build_test_deploy:
jobs:
- build:
filters:
tags:
only: /.*/
- get_data:
filters:
tags:
only: /.*/
- test_pytest:
requires:
- build
- get_data
filters:
branches:
ignore: /docs?\/.*/
tags:
only: /.*/
- deploy:
requires:
- test_pytest
filters:
branches:
ignore: /.*/
tags:
only: /.*/