Permalink
Fetching contributors…
Cannot retrieve contributors at this time
146 lines (139 sloc) 6.7 KB
language: python
# Use container-based infrastructure
sudo: false
env:
# Enable python 2 and python 3 builds
# DEPS=full: build optional dependencies: pandas, nitime, statsmodels,
# scikit-learn, patsy, nibabel pillow;
# in the case of Python 2, also mayavi, traits, pysurfer
# DEPS=minimal: don't build optional dependencies; tests that require those
# dependencies are supposed to be skipped
#
# Note that we don't run coverage on Py3k anyway because it slows our tests
# by a factor of 2 (!), so we make this our "from install dir" run.
#
# Run one test (3.5) with a non-default stim channel to make sure our
# tests are explicit about channels.
#
# Must force libpng version to avoid silly libpng.so.15 error (MPL 1.1 needs it)
#
# Conda currently has packaging bug with mayavi/traits/numpy where 1.10 can't be used
# but breaks sklearn on install; hopefully eventually the NUMPY=1.9 on 2.7 full can be removed
# Mayavi=4.3 on old 2.7 installs, but doesn't work properly due to a traits bug
- PYTHON=2.7 DEPS=full TEST_LOCATION=src NUMPY="=1.9" SCIPY="=0.17"
- PYTHON=2.7 DEPS=nodata TEST_LOCATION=src MNE_DONTWRITE_HOME=true MNE_FORCE_SERIAL=true MNE_SKIP_NETWORK_TEST=1 # also runs flake8
- PYTHON=3.5 DEPS=full TEST_LOCATION=install MNE_STIM_CHANNEL=STI101
- PYTHON=2.7 DEPS=full TEST_LOCATION=src NUMPY="=1.8" SCIPY="=0.12" MPL="=1.3" SKLEARN="=0.14" PANDAS="=0.12"
- PYTHON=2.7 DEPS=minimal TEST_LOCATION=src
# Setup anaconda
before_install:
- wget -q http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
- chmod +x miniconda.sh
- ./miniconda.sh -b -p /home/travis/miniconda
- export PATH=/home/travis/miniconda/bin:$PATH
- conda update --yes --quiet conda
# We need to create a (fake) display on Travis (allows Mayavi tests to run)
- export DISPLAY=:99.0
- /sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -screen 0 1400x900x24 -ac +extension GLX +render -noreset
install:
- conda create -n testenv --yes pip python=$PYTHON
- source activate testenv
- ENSURE_PACKAGES="numpy$NUMPY scipy$SCIPY matplotlib$MPL libpng$LIBPNG"
- conda install --yes --quiet $ENSURE_PACKAGES nose coverage
# We have to replicate e.g. numpy$NUMPY to ensure the recommended (higher) versions
# are not automatically installed below with multiple "conda install" calls!
- if [ "${DEPS}" == "full" ]; then
curl https://staff.washington.edu/larsoner/minimal_cmds.tar.gz | tar xz;
export MNE_ROOT="${PWD}/minimal_cmds";
export NEUROMAG2FT_ROOT="${PWD}/minimal_cmds/bin";
source ${MNE_ROOT}/bin/mne_setup_sh;
conda install --yes --quiet $ENSURE_PACKAGES pandas$PANDAS scikit-learn$SKLEARN patsy h5py pillow;
pip install -q joblib nibabel;
if [ "${PYTHON}" == "3.5" ]; then
conda install --yes --quiet $ENSURE_PACKAGES ipython;
else
conda install --yes --quiet $ENSURE_PACKAGES ipython==1.1.0 statsmodels pandas$PANDAS;
pip install nitime faulthandler;
if [ "${NUMPY}" != "=1.8" ]; then
conda install --yes --quiet $ENSURE_PACKAGES mayavi$MAYAVI;
pip install pysurfer;
fi;
fi;
fi;
- if [ "${DEPS}" == "nodata" ]; then
conda install --yes $ENSURE_PACKAGES sphinx;
pip install flake8 codespell numpydoc;
fi;
- pip install -q codecov nose-timer
# check our versions for the major packages
- NP_VERSION=`python -c 'import numpy; print(numpy.__version__)'`
- if [ -n "$NUMPY" ] && [ "${NUMPY:(-3)}" != "${NP_VERSION::3}" ]; then
echo "Incorrect numpy version $NP_VERSION";
exit 1;
fi;
- SP_VERSION=`python -c 'import scipy; print(scipy.__version__)'`
- if [ -n "$SCIPY" ] && [ "${SCIPY:(-4)}" != "${SP_VERSION::4}" ]; then
echo "Incorrect scipy version $SP_VERSION";
exit 1;
fi;
- MPL_VERSION=`python -c 'import matplotlib; print(matplotlib.__version__)'`
- if [ -n "$MPL" ] && [ "${MPL:(-3)}" != "${MPL_VERSION::3}" ]; then
echo "Incorrect matplotlib version $MPL_VERSION";
exit 1;
fi;
# Suppress the parallel outputs for logging cleanliness
- export MNE_LOGGING_LEVEL=warning
- python setup.py build
- python setup.py install
- myscripts='browse_raw bti2fiff surf2bem'
- for script in $myscripts; do mne $script --help; done;
- SRC_DIR=$(pwd)
- cd ~
# Trigger download of testing data. Note that
# the testing dataset has been constructed to contain the necessary
# files to act as a FREESURFER_HOME for the coreg tests
- if [ "${DEPS}" != "nodata" ]; then
python -c 'import mne; mne.datasets.testing.data_path(verbose=True)';
if [ "${DEPS}" == "full" ]; then
export FREESURFER_HOME=$(python -c 'import mne; print(mne.datasets.testing.data_path())');
export MNE_SKIP_FS_FLASH_CALL=1;
fi;
else
export MNE_SKIP_TESTING_DATASET_TESTS=true;
fi;
- MNE_DIR=$(python -c 'import mne;print(mne.__path__[0])')
# We run two versions: one out of the source directory (that makes
# coveralls coverage work), and one out of the install directory (that
# ensures we have included all necessary files).
- if [ "${TEST_LOCATION}" == "install" ]; then
ln -s ${SRC_DIR}/mne/io/tests/data ${MNE_DIR}/io/tests/data;
ln -s ${SRC_DIR}/mne/io/bti/tests/data ${MNE_DIR}/io/bti/tests/data;
ln -s ${SRC_DIR}/mne/io/edf/tests/data ${MNE_DIR}/io/edf/tests/data;
ln -s ${SRC_DIR}/mne/io/kit/tests/data ${MNE_DIR}/io/kit/tests/data;
ln -s ${SRC_DIR}/mne/io/brainvision/tests/data ${MNE_DIR}/io/brainvision/tests/data;
ln -s ${SRC_DIR}/mne/io/egi/tests/data ${MNE_DIR}/io/egi/tests/data;
ln -s ${SRC_DIR}/mne/io/nicolet/tests/data ${MNE_DIR}/io/nicolet/tests/data;
ln -s ${SRC_DIR}/mne/preprocessing/tests/data ${MNE_DIR}/preprocessing/tests/data;
ln -s ${SRC_DIR}/setup.cfg ${MNE_DIR}/../setup.cfg;
ln -s ${SRC_DIR}/.coveragerc ${MNE_DIR}/../.coveragerc;
cd ${MNE_DIR}/../;
COVERAGE=;
else
cd ${SRC_DIR};
COVERAGE=--with-coverage;
fi;
script:
- nosetests -a '!ultra_slow_test' --with-timer --timer-top-n 30 --verbosity=2 $COVERAGE
- if [ "${DEPS}" == "nodata" ]; then
make flake;
fi;
- if [ "${DEPS}" == "nodata" ]; then
make codespell-error;
fi;
after_success:
# Need to run from source dir to exectue "git" commands
- if [ "${TEST_LOCATION}" == "src" ]; then
echo "Running codecov";
cd ${SRC_DIR};
codecov;
fi;