/
.travis.yml
175 lines (147 loc) · 5.77 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
branches:
only:
- master
sudo: false
group: deprecated-2017Q4
filter_secrets: false
os:
- linux
language: generic
env:
matrix:
- PY=3.7 NUMPY=1.16 SCIPY=1.2 PETSc=3.11
- PY=3.6 NUMPY=1.15 SCIPY=1.1
- PY=3.6 NUMPY=1.14 SCIPY=1.0.1 PETSc=3.10.1 UPLOAD_DOCS=1
- PY=2.7 NUMPY=1.13 SCIPY=1.1 PETSc=3.9.1
- PY=2.7 NUMPY=1.12 SCIPY=1.1
git:
depth: 99999
addons:
apt:
update: true
sources:
- ubuntu-toolchain-r-test
packages:
- gfortran
- libblas-dev
- liblapack-dev
- libopenmpi-dev
- openmpi-bin
ssh_known_hosts:
- web543.webfaction.com
cache:
apt: true
timeout: 300
directories:
- $HOME/.cache/pip
- $HOME/miniconda
notifications:
slack:
secure: Dd+tpZkz48Q47Y+PtdL4b+KAs55PsvWjt9ybhip6xljhA5kVsba9oZS+KsAC8RLWSzVJLOSjz3Cu3MsRym4sTd/g4Pbqyh0ldK2Xnl+n2JOgpPFSXtFuH4Ln3uWB6kYtUK6+aGIC8qhbvEt8tukTBT0RduEmdRyVIZ3oN7YjETPSZXvujeiUFLssfpZW2mqoA/tIgJHFSlySAp6J5694t2Z/p8sHOrK8G/Nm+qlk4xqXHvJ3xablcSBG4BZCrpqmMMdTLXBt2E2K9Rc1P2ZBIrSHVWfSLx+4n79U2385+og7miN1Zuf3gY3YuGKIwnBTtEzTu20905idkr4QdKELCBEcU4azdznwjvUkXWkiFAJII9UELTluSQmZX602zWk4AgJNeHxhN3EbBSMezfYVZjprhlAlwnZZv6t4qAkvuzb7KOA4s679xWzWOBOn1wkynfIF8A66APqssveyz/PvZHSjnHQoLgMU+kwzoX759o0Z/HuRlhCcjv0W9DWxU2bFNi/zVh9YyvR8fG15biGthzOyuf+CHjxohw+J6M+YdR1RIf1g/60nGUPHx4j4SN3kEFPmEDxzZT/f349gvaZGOmKXBi0wH8iY/i9RinM9LJB4t6chj2MkKwUA26bYaVaIO6FYPfE7r+tTG6OXdck4voCs/s4aa9VKEX97yhh0i9g=
before_install:
# When building master (e.g. after a merge), we will create a fresh env as a thorough test.
# For other builds (e.g. pull requests), try to use cached env for faster testing.
# Need to check for existence of files to determine if cache exists.
# If the dir doesn't exist, but is slated to be cached later,
# Travis unhelpfully creates it, which then causes "dir already exists"
# errors when you go to actually install the thing, so we must non-intuitively
# delete the file before re-creating it later.
- if [ "$TRAVIS_REPO_SLUG" = "OpenMDAO/OpenMDAO" ] && [ "$TRAVIS_PULL_REQUEST" = "false" ]; then
echo "building master";
MASTER_BUILD=1;
rm -rf $HOME/miniconda;
elif [ -d $HOME/miniconda/envs/PY$PY ]; then
echo "cached miniconda environment found";
CACHED_ENV=1;
else
echo "cached miniconda environment not found";
rm -rf $HOME/miniconda;
fi
install:
# get key decrypted, placed, chmodded, and added for passwordless access to WebFaction
- if [ "$encrypted_74d70a284b7d_key" ]; then
openssl aes-256-cbc -K $encrypted_74d70a284b7d_key -iv $encrypted_74d70a284b7d_iv -in travis_deploy_rsa.enc -out /tmp/travis_deploy_rsa -d;
eval "$(ssh-agent -s)";
chmod 600 /tmp/travis_deploy_rsa;
ssh-add /tmp/travis_deploy_rsa;
echo -e "Host web543.webfaction.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config;
fi
# if we don't have a cached conda environment then build one, otherwise just activate the cached one
- if [ "$CACHED_ENV" ]; then
echo ">>> Using cached environment";
export PATH=$HOME/miniconda/bin:$PATH;
source $HOME/miniconda/bin/activate PY$PY;
else
echo ">>> Building python environment";
echo " >> Installing conda";
echo " > Downloading miniconda";
wget "https://repo.continuum.io/miniconda/Miniconda${PY:0:1}-4.5.11-Linux-x86_64.sh" -O miniconda.sh;
chmod +x miniconda.sh;
echo " > Installing miniconda";
./miniconda.sh -b -p $HOME/miniconda;
export PATH=$HOME/miniconda/bin:$PATH;
echo " >> Creating conda environment";
conda create --yes -n PY$PY python=$PY;
source $HOME/miniconda/bin/activate PY$PY;
echo " >> Installing non-pure Python dependencies from conda";
conda install --yes numpy=$NUMPY scipy=$SCIPY cython swig;
pip install --upgrade pip;
echo " >> Installing forked python packages";
pip install git+https://github.com/swryan/coveralls-python@work;
echo " >> Installing pyOptSparse";
echo " > Cloning pyOptSparse from OpenMDAO's fork";
git clone https://github.com/OpenMDAO/pyoptsparse.git;
cd pyoptsparse;
if [ "$SNOPT_LOCATION" ] && [ "${PY:0:1}" = "3" ]; then
cd pyoptsparse/pySNOPT;
echo " > Secure copying SNOPT over SSH";
scp -r "$SNOPT_LOCATION" .;
cd ../..;
fi
echo " > Install pyOptSparse";
python setup.py install;
cd ..;
if [ "$PETSc" ]; then
echo " >> Installing parallel processing dependencies";
pip install mpi4py;
pip install petsc4py==$PETSc;
fi
fi
# install OpenMDAO and its development and documentation dependencies
# NOTE: not using -e on purpose here, to catch packaging errors
- echo ">>> Installing OpenMDAO";
pip install .[all];
# display summary of installed packages and their versions
- conda list
script:
# prevent OpenMPI warning messages
- export OMPI_MCA_btl=^openib
# make docs first
- cd openmdao/docs;
- if [ "$PETSc" ]; then
make travis;
fi
# run the tests from down here to see if it can work without being at top level
# only do coverage on the upload machine.
- if [ "$UPLOAD_DOCS" ]; then
testflo -n 1 openmdao --coverage --coverpkg openmdao --cover-omit \*tests/\* --cover-omit \*devtools/\* --cover-omit \*test_suite/\* --cover-omit \*docs/\* --cover-omit \*code_review/\*;
else
testflo -n 1 --timeout=120 openmdao;
fi
after_success:
# again, only run coverage operations on the upload machine after success.
- if [ "$UPLOAD_DOCS" ]; then
coveralls --rcfile=../../.coveragerc --output=coveralls.json;
sed 's/\/home\/travis\/miniconda\/lib\/python'"$PY"'\/site-packages\///g' < coveralls.json > coveralls-upd.json;
coveralls --upload=coveralls-upd.json;
fi
deploy:
provider: script
skip_cleanup: true
script:
# only deploy docs in a build after a PR or merge is accepted
- if [ "$MASTER_BUILD" ] && [ "$UPLOAD_DOCS" ]; then
python _utils/upload_doc_version.py;
fi
on:
branch: master