Skip to content

Commit

Permalink
Merge pull request #307 from hanke/v24docfix
Browse files Browse the repository at this point in the history
WiP: Kitchen sink of updates for v2.4
  • Loading branch information
yarikoptic committed May 8, 2015
2 parents bf4632b + 24508d0 commit a0cb050
Show file tree
Hide file tree
Showing 181 changed files with 861 additions and 594 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ testexamples: te-svdclf te-smlr te-sensanas te-pylab_2d \
te-erp_plot te-match_distribution te-permutation_test \
te-searchlight_minimal te-smlr te-start_easy te-topo_plot \
te-gpr te-gpr_model_selection0 te-mri_plot te-searchlight \
te-clfs_examples
te-eventrelated te-clfs_examples

testdocstrings: dt-mvpa

Expand Down
30 changes: 14 additions & 16 deletions bin/pymvpa2-tutorial
Original file line number Diff line number Diff line change
Expand Up @@ -107,24 +107,22 @@ def main():
if notebook_srcdir is False:
notebook_srcdir = '/usr/share/doc/python-mvpa2/notebooks'
# assemble session dir
for dir in ('data', 'results'):
src = os.path.join(os.path.abspath(tutorial_datadir), dir)
dst = os.path.join(os.path.abspath(workdir), dir)
if os.path.lexists(dst):
print "Use existing tutorial '%s' path at '%s'." \
% (dir, dst)
continue
dst = os.path.join(os.path.abspath(workdir), 'data')
src = os.path.abspath(tutorial_datadir)
if os.path.lexists(dst):
print "Use existing tutorial data path at '%s'." % (dst,)
else:
if not os.path.exists(src):
# only the data dir is absolutely critical
if dir == 'data':
print "Cannot find tutorial data at '%s'" % src
sys.exit(1)
else:
continue
if hasattr(os, 'symlink'):
os.symlink(src, dst)
print "Cannot find tutorial data at '%s'" % (src,)
sys.exit(1)
else:
shutil.copytree(src, dst)
if hasattr(os, 'symlink'):
os.symlink(src, dst)
else:
shutil.copytree(src, dst)
# point pymvpa to the personalized folder
os.environ['MVPA_LOCATION_TUTORIAL_DATA'] = dst
# prep IPython call
ipy_args = sys.argv[1:]
ipy_args.append('--quiet')
if not sum([arg.startswith('--pylab') for arg in ipy_args]):
Expand Down
10 changes: 2 additions & 8 deletions doc/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -40,20 +40,14 @@ SPHINXEXT_PATH=$(shell python -c 'import sphinx.ext; from distutils.version impo
generate: generate-stamp
generate-stamp: cache_intersphinx
mkdir -p $(BUILDDIR)
@MVPA_EXTERNALS_RAISE_EXCEPTION=off \
PYTHONPATH=$(CURDIR)/sphinxext:$(CURDIR)/..:$(PYTHONPATH) \
$(PYTHON) $(SPHINXEXT_PATH)/autosummary/generate.py \
-t $(CURDIR)/templates -o source/generated source/*.rst
# wipe everything that is not pymvpa to prevent suffering from the bugs
# of other in subsequent stages
find source/generated -type f ! -name 'mvpa*' -delete
mkdir -p source/generated
set -e; set -o pipefail; for man in $(BUILDDIR)/man/pymvpa*; do \
manfile=$$(basename $${man}) ; \
cmd=$${manfile#pymvpa2-*} ; \
cmd=$${cmd%*.1} ; \
man2html $${man} | sed -n '/<HR>/{:a;n;/<HR>/b;p;ba}' | sed -n '/<A NAME/{:a;n;/This document was created by/b;p;ba}' > source/generated/$${manfile}.html ; \
underline="*****************$$(echo $${cmd} | tr A-Za-z0-9 \*)" ; \
echo ".. index:: command line, $${cmd}\n.. _chap_cmdline_$${cmd}\n\n$${underline}\nMan page for \`\`$${cmd}\`\`\n$${underline}\n\n.. raw:: html\n :file: $${manfile}.html\n" > source/generated/cmd_$${cmd}.rst ; \
printf ".. index:: command line, $${cmd}\n.. _chap_cmdline_$${cmd}:\n\n$${underline}\nMan page for \`\`$${cmd}\`\`\n$${underline}\n\n.. raw:: html\n :file: $${manfile}.html\n" > source/generated/cmd_$${cmd}.rst ; \
done
@touch $@

Expand Down
2 changes: 1 addition & 1 deletion doc/examples/cmdline/fmri_analyses.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ set -u
# BOILERPLATE

# where is the data; support standard env variable switch
dataroot=${MVPA_DATA_ROOT:-"mvpa2/data/openfmri"}
dataroot=${MVPA_DATA_ROOT:-"mvpa2/data/haxby2001"}

# where to place output; into tmp by default
outdir=${MVPA_EXAMPLE_WORKDIR:-}
Expand Down
6 changes: 3 additions & 3 deletions doc/examples/eventrelated.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

from mvpa2.suite import *

ds = load_datadb_tutorial_data(roi=(36,38,39,40))
ds = load_tutorial_data(roi=(36,38,39,40))

"""
Expand Down Expand Up @@ -137,7 +137,7 @@
"""

clf = LinearCSVMC()
sclf = SplitClassifier(clf, enable_ca=['confusion'])
sclf = SplitClassifier(clf, enable_ca=['stats'])

# Compute sensitivity, which internally trains the classifier
analyzer = sclf.get_sensitivity_analyzer()
Expand All @@ -150,7 +150,7 @@
"""

print sclf.ca.confusion
print sclf.ca.stats

"""
Expand Down
4 changes: 2 additions & 2 deletions doc/examples/mri_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
from mvpa2.suite import *

# load PyMVPA example dataset
datapath = os.path.join(mvpa2.cfg.get('location', 'tutorial data'), 'data')
dataset = load_datadb_tutorial_data(roi='gray')
datapath = os.path.join(mvpa2.cfg.get('location', 'tutorial data'), 'haxby2001')
dataset = load_tutorial_data(roi='gray')

# do chunkswise linear detrending on dataset
poly_detrend(dataset, chunks_attr='chunks')
Expand Down
29 changes: 14 additions & 15 deletions doc/examples/rsa_fmri.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

import numpy as np
import pylab as pl
from os.path import join as pjoin
from mvpa2 import cfg

"""
Expand All @@ -39,9 +40,9 @@
"""

# load dataset -- ventral and occipital ROIs
from mvpa2.misc.data_generators import load_datadb_tutorial_data
from mvpa2 import pymvpa_datadbroot
ds = load_datadb_tutorial_data(roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))
from mvpa2.datasets.sources.native import load_tutorial_data
datapath = pjoin(cfg.get('location', 'tutorial data'))
ds = load_tutorial_data(roi=(15, 16, 23, 24, 36, 38, 39, 40, 48))

"""
We only do minimal pre-processing: linear trend removal and Z-scoring all voxel
Expand Down Expand Up @@ -204,25 +205,23 @@ def plot_mtx(mtx, labels, title):
# plot the spatial distribution using NiPy
vol = ds.a.mapper.reverse1(slres_tdsm.samples[0])
import nibabel as nb
from os.path import join as pjoin
anat = nb.load(pjoin(pymvpa_datadbroot, 'tutorial_data', 'tutorial_data',
'data', 'sub001', 'anatomy', 'highres001.nii.gz'))
anat = nb.load(pjoin(datapath, 'sub001', 'anatomy', 'highres001.nii.gz'))

from nipy.labs.viz_tools.activation_maps import plot_map
pl.figure(figsize=(15,4))
sp = pl.subplot(121)
pl.title('Distribution of target similarity structure correlation')
slices = plot_map(
vol,
ds.a.imghdr.get_best_affine(),
cut_coords=np.array((12,-42,-20)),
threshold=.5,
cmap="bwr",
vmin=0,
vmax=1.,
axes=sp,
anat=anat.get_data(),
anat_affine=anat.get_affine(),
ds.a.imgaffine,
cut_coords=np.array((12,-42,-20)),
threshold=.5,
cmap="bwr",
vmin=0,
vmax=1.,
axes=sp,
anat=anat.get_data(),
anat_affine=anat.get_affine(),
)
img = pl.gca().get_images()[1]
cax = pl.axes([.05, .05, .05, .9])
Expand Down
4 changes: 2 additions & 2 deletions doc/examples/searchlight.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@
selectivity contrast for voxels ventral temporal cortex."""

# data path
datapath = os.path.join(mvpa2.cfg.get('location', 'tutorial data'), 'data')
dataset = load_datadb_tutorial_data(
datapath = os.path.join(mvpa2.cfg.get('location', 'tutorial data'), 'haxby2001')
dataset = load_tutorial_data(
roi='brain',
add_fa={'vt_thr_glm': os.path.join(datapath, 'sub001', 'masks',
'orig', 'vt.nii.gz')})
Expand Down
3 changes: 3 additions & 0 deletions doc/source/cmdline.rst
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ Create, modify and convert datasets
generated/cmd_select
generated/cmd_preproc
generated/cmd_dump
generated/cmd_describe

Perform analyses
----------------
Expand All @@ -86,6 +87,8 @@ Auxilliary command
generated/cmd_info
generated/cmd_exec
generated/cmd_atlaslabeler
generated/cmd_ofmotionqc
generated/cmd_ttest

.. _cmdline_example_scripts:

Expand Down
6 changes: 3 additions & 3 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,10 @@

# the following doesn't work with sphinx < 1.0, but will make a separate
# sphinx-autogen run obsolete in the future
#autosummary_generate = True
autosummary_generate = True

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ['../templates']

# The suffix of source filenames.
source_suffix = '.rst'
Expand All @@ -89,7 +89,7 @@

# General substitutions.
project = 'PyMVPA'
copyright = '2006-2013, PyMVPA Authors'
copyright = '2006-2015, PyMVPA Authors'

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
Expand Down
2 changes: 1 addition & 1 deletion doc/source/datadb/haxby2001.rst
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ Instructions
... targets=attrs.labels, chunks=attrs.chunks,
... mask=os.path.join(subjpath, 'mask4_vt.nii.gz'))
>>> print ds
<Dataset: 1452x577@int16, <sa: chunks,targets,time_coords,time_indices>, <fa: voxel_indices>, <a: imghdr,imgtype,mapper,voxel_dim,voxel_eldim>>
<Dataset: 1452x577@int16, <sa: chunks,targets,time_coords,time_indices>, <fa: voxel_indices>, <a: imgaffine,imghdr,imgtype,mapper,voxel_dim,voxel_eldim>>


References
Expand Down
4 changes: 2 additions & 2 deletions doc/source/datadb/tutorial_data.rst
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ Tarballs are available at:
Tarball Content
===============

data/
haxby2001/
Contains data files:

bold.nii.gz
Expand Down Expand Up @@ -169,7 +169,7 @@ Instructions

>>> from mvpa2.suite import *
>>> datapath = os.path.join(pymvpa_datadbroot, 'tutorial_data',
... 'tutorial_data', 'data')
... 'tutorial_data', 'haxby2001')
>>> attrs = SampleAttributes(os.path.join(datapath, 'attributes.txt'))
>>> ds = fmri_dataset(samples=os.path.join(datapath, 'bold.nii.gz'),
... targets=attrs.targets, chunks=attrs.chunks,
Expand Down
8 changes: 7 additions & 1 deletion doc/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ PyMVPA is a Python_ package intended to ease statistical learning analyses of
large datasets. It offers an extensible framework with a high-level interface
to a broad range of algorithms for classification, regression, feature
selection, data import and export. It is designed to integrate well with
related software packages, such as scikit-learn_, and MDP_. While it is not
related software packages, such as scikit-learn_, shogun_, MDP_, etc. While it is not
limited to the neuroimaging domain, it is eminently suited for such datasets.
PyMVPA is free software and requires nothing but free-software to run.

Expand Down Expand Up @@ -111,6 +111,9 @@ here.

* 3dsvm_: AFNI_ plugin to apply support vector machine classifiers to fMRI data.

* CoSMoMVPA_: Matlab/Octave toolbox designed after PyMVPA and with good
interoperability with PyMVPA.

* Elefant_: Efficient Learning, Large-scale Inference, and Optimization
Toolkit. Multi-purpose open source library for machine learning.

Expand All @@ -121,6 +124,9 @@ here.
* `MVPA Toolbox`_: Matlab-based toolbox to facilitate multi-voxel pattern
analysis of fMRI neuroimaging data.

* nilearn_: `scikit-learn`_ based Python module for fast and easy statistical
learning on NeuroImaging data.

* NiPy_: Project with growing functionality to analyze brain imaging data. NiPy_
is heavily connected to SciPy and lots of functionality developed within
NiPy becomes part of SciPy.
Expand Down
17 changes: 16 additions & 1 deletion doc/source/link_names.txt
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@

.. Not so python imaging projects
.. _Caret: http://brainvis.wustl.edu/wiki/index.php/Caret:About
.. _CoSMoMVPA: http://www.github.com/CoSMoMVPA
.. _matlab: http://www.mathworks.com
.. _spm: http://www.fil.ion.ucl.ac.uk/spm
.. _spm8: http://www.fil.ion.ucl.ac.uk/spm/software/spm8
Expand All @@ -116,7 +117,7 @@
.. _PyML: http://pyml.sourceforge.net
.. _MVPA Toolbox: http://www.csbmb.princeton.edu/mvpa/
.. _scikit-learn: http://scikit-learn.org

.. _nilearn: http://nilearn.github.io

.. File formats
.. _DICOM: http://medical.nema.org/
Expand All @@ -142,6 +143,7 @@
.. _Debian: http://www.debian.org
.. _`Debian project`: http://www.debian.org
.. _Ubuntu: http://www.ubuntu.com
.. _Xubuntu: http://www.xubuntu.org
.. _exppsy: http://alioth.debian.org/projects/pkg-exppsy
.. _`Debian Med`: http://debian-med.alioth.debian.org
.. _`Debian Science`: http://wiki.debian.org/DebianScience
Expand All @@ -151,6 +153,18 @@
.. _Lin4Neuro: http://www.nemotos.net/lin4neuro
.. _fail2ban: http://www.fail2ban.org

.. Debian releases
.. _Debian 6.0 (squeeze): http://www.debian.org/releases/squeeze
.. _Debian 7.0 (wheezy): http://www.debian.org/releases/wheezy
.. _Debian 7.8 (wheezy): http://www.debian.org/releases/wheezy
.. _Debian 8.0 (jessie): http://www.debian.org/releases/jessie

.. Desktop environments
.. _GNOME: http://www.gnome.org/
.. _GNOME3: http://www.gnome.org/gnome-3/
.. _XFCE: http://www.xfce.org/
.. _XFCE4: http://www.xfce.org/

.. Functional imaging labs
.. _`functional imaging laboratory`: http://www.fil.ion.ucl.ac.uk
.. _FMRIB: http://www.fmrib.ox.ac.uk
Expand Down Expand Up @@ -199,6 +213,7 @@
.. |ITP| replace:: :abbr:`ITP (Intent to Package)`
.. |RFP| replace:: :abbr:`RFP (Request for packaging)`
.. |DBTS| replace:: :abbr:`DBTS (Debian Bug Tracking System)`
.. _debian-installer: http://www.debian.org/devel/debian-installer/

.. mailing lists
.. _neurodebian-users: http://lists.alioth.debian.org/mailman/listinfo/neurodebian-users
Expand Down
6 changes: 5 additions & 1 deletion doc/source/modref.rst
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ Datasets: Input, Output, Storage and Preprocessing
datasets.cosmo
datasets.eeglab
datasets.miscfx
datasets.sources.native
datasets.sources.openfmri
datasets.sources.skl_data


Expand Down Expand Up @@ -287,4 +289,6 @@ Basic Plotting Utilities
support.nibabel.surf_caret
support.nibabel.surf_gifti
support.nibabel.surf



.. include:: link_names.txt
Loading

0 comments on commit a0cb050

Please sign in to comment.