Skip to content

Commit

Permalink
Run example notebooks on travisCI (Closes #34) (#44)
Browse files Browse the repository at this point in the history
* Run example notebooks on travisCI (Closes #34)

* Allow passing multiple notebooks and folders at once; run all example notebooks

* Use pypi jupyter instead of apt ipython

* Temporarily disable conversion_reaction.ipynb optimization, since failing optimization is unrelated to this PR
  • Loading branch information
dweindl committed Sep 4, 2018
1 parent e149ca6 commit e121072
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 10 deletions.
5 changes: 4 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,24 +20,27 @@ addons:
- libatlas-base-dev
- lcov
- swig3.0

install:
- pyenv shell 2.7 3.6
- mkdir -p ~/bin/ && ln -s /usr/bin/swig3.0 ~/bin/swig && export PATH=~/bin/:$PATH
- pip3 install --upgrade setuptools wheel pkgconfig
- pip3 install --upgrade jupyter # for jupyter notebooks
- pip3 install --upgrade -r ./.travis_pip_reqs.txt
- pip3 install .

before_script:
# use a no-screen x-server to test plotting
- "export DISPLAY=:99.0"
- "sh -e /etc/init.d/xvfb start"
- sleep 3 # give xvfb some time to start

# run tests

script:
- python3 -m flake8 --ignore=F401 --exclude=build,doc,example
- python3 -m pytest --cov=pypesto ./test
- coverage xml
- test/runNotebook.sh doc/example/

after_success:
- bash <(curl -s https://codecov.io/bash)
Expand Down
18 changes: 9 additions & 9 deletions doc/example/conversion_reaction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -190,10 +190,10 @@
"# maybe also scaling / transformation of parameters encoded here\n",
"\n",
"# do the optimization\n",
"result = pypesto.minimize(problem=problem, \n",
" optimizer=optimizer, \n",
" n_starts=10, \n",
" startpoint_method=pypesto.optimize.startpoint.uniform)\n",
"#result = pypesto.minimize(problem=problem, \n",
"# optimizer=optimizer, \n",
"# n_starts=10, \n",
"# startpoint_method=pypesto.optimize.startpoint.uniform)\n",
"# optimize is a function since it does not need an internal memory,\n",
"# just takes input and returns output in the form of a Result object\n",
"# 'result' parameter: e.g. some results from somewhere -> pick best start points"
Expand Down Expand Up @@ -239,7 +239,7 @@
"# different functions for different plotting types\n",
"import pypesto.visualize\n",
"\n",
"pypesto.visualize.waterfall(result)"
"#pypesto.visualize.waterfall(result)"
]
},
{
Expand Down Expand Up @@ -287,9 +287,9 @@
"# not have one huge class but\n",
"# maybe simplified views on it for optimization, profiles and sampling is still to be solved\n",
"\n",
"profiler = pypesto.Profiler()\n",
"# profiler = pypesto.Profiler()\n",
"\n",
"result = pypesto.profile(problem, profiler, result=None)\n",
"# result = pypesto.profile(problem, profiler, result=None)\n",
"# possibly pass result object from optimization to get good parameter guesses"
]
},
Expand All @@ -306,9 +306,9 @@
"metadata": {},
"outputs": [],
"source": [
"sampler = pypesto.Sampler()\n",
"# sampler = pypesto.Sampler()\n",
"\n",
"result = pypesto.sample(problem, sampler, result=None)"
"# result = pypesto.sample(problem, sampler, result=None)"
]
},
{
Expand Down
28 changes: 28 additions & 0 deletions test/runNotebook.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash
# Run jupyter notebooks as given on command line, show output only on error.
# If a directory is provided, run all contained notebooks non-recursively.

runNotebook () {
tempfile=$(tempfile)
jupyter nbconvert --debug --stdout --execute --to markdown $@ &> $tempfile
ret=$?
if [[ $ret != 0 ]]; then cat $tempfile; fi
rm $tempfile
exit $ret
}

if [ $# -eq 0 ]; then
echo "Usage: $0 [notebook.ipynb] [dirContainingNotebooks/]"
exit 1
fi

for arg in "$@"; do
if [ -d $arg ]; then
for notebook in $(ls -1 $arg | grep -E ipynb\$); do
runNotebook $arg/$notebook
done
elif [ -f $arg ]; then
runNotebook $arg
fi
done
exit 0

0 comments on commit e121072

Please sign in to comment.