Skip to content
Browse files

DOC more cleaning here and there

  • Loading branch information...
1 parent 94038f9 commit a571e50745a7fdd0b5451ff7e8b1393a745043ec @iannimmosmith iannimmosmith committed Jan 16, 2011
View
2 dipy/tracking/distances.pyx
@@ -615,7 +615,7 @@ cdef inline void min_distances(size_t t1_len,
def mam_distances(xyz1,xyz2,metric='all'):
- ''' Min/Max/Mean Average Minimume Distance between tracks xyz1 and xyz2
+ ''' Min/Max/Mean Average Minimum Distance between tracks xyz1 and xyz2
Based on the metrics in Zhang, Correia, Laidlaw 2008
http://ieeexplore.ieee.org/xpl/freeabs_all.jsp?arnumber=4479455
View
20 dipy/tracking/metrics.py
@@ -182,15 +182,25 @@ def frenet_serret(xyz):
In the following equations the prime ($'$) indicates differentiation
with respect to the parameter $s$ of a parametrised curve $\mathbf{r}(s)$.
- $\mathbf{T}=\mathbf{r'}/|\mathbf{r'}|$ (Tangent vector)
+.. math::
+
+ \mathbf{T}=\mathbf{r'}/|\mathbf{r'}|\qquad \mathrm{(Tangent vector)}
- $\mathbf{N}=\mathbf{T'}/|\mathbf{T'}|$ (Normal vector)
+.. math::
+
+ \mathbf{N}=\mathbf{T'}/|\mathbf{T'}|\qquad \mathrm{(Normal vector)}
- $\mathbf{B}=\mathbf{T}\wedge\mathbf{N}$ (Binormal vector)
+.. math::
+
+ \mathbf{B}=\mathbf{T}\times\mathbf{N}\qquad \mathrm{(Binormal vector)}
- $\kappa=|\mathbf{T'}|$ (Curvature)
+.. math::
+
+ \kappa=|\mathbf{T'}|\qquad \mathrm{(Curvature)}
- $\mathrm{tau}=-\mathbf{B'}\cdot\mathbf{N}$ (Torsion)
+.. math::
+
+ \mathrm{\tau}=-\mathbf{B'}\cdot\mathbf{N} \mathrm{(Torsion)}
Parameters
------------
View
42 doc/examples/find_correspondence.py
@@ -32,6 +32,7 @@
``get_skeleton`` provides two skeletons 'C1' and 'C3'
previously generated from Local Skeleton Clustering (LSC)
"""
+
C1=get_skeleton('C1')
C3=get_skeleton('C3')
@@ -56,42 +57,58 @@
fvtk.add(r,fvtk.line(T3s,fvtk.gray))
-#fvtk.show(r)
+#fvtk.show(r) bout
+
+"""
+For each track in T1 find the minimum average distance to all the
+tracks in T3 and put information about it in ``track2track``.
+"""
indices=range(len(T1))
track2track=[]
+mam_threshold=6.
for i in indices:
rt=[mam_distances(T1[i],t,'avg') for t in T3]
rt=np.array(rt)
- if rt.min()< 5:
+ if rt.min()< mam_threshold:
track2track.append(np.array([i,rt.argmin(),rt.min()]))
track2track=np.array(track2track)
np.set_printoptions(2)
-print track2track
-toberemoved=[]
+"""
+When a track in T3 is simultaneously the nearest track to more than one track in T1 we identify the track
+in T1 that has the best correspondence and remove the other.
+"""
+
+good_correspondence=[]
for i in track2track[:,1]:
check= np.where(track2track[:,1]==i)[0]
- if len(check)>=2:
+ if len(check) == 1:
+ good_correspondence.append(check[0])
+ elif len(check)>=2:
#print check,check[np.argmin(track2track[check][:,2])]
- toberemoved.append(check[np.argmin(track2track[check][:,2])])
- #toberemoved.append())
+ good_correspondence.append(check[np.argmin(track2track[check][:,2])])
+ #good_correspondence.append())
-#print toberemoved
-toberemoved=list(set(toberemoved))
+#print goo_correspondenced
+good_correspondence=list(set(good_correspondence))
-track2track=np.delete(track2track,toberemoved,0)
+track2track=track2track[good_correspondence,:]
-print track2track
+print 'With mam_threshold %f we find %d correspondence pairs' % (mam_threshold, np.size(track2track,0))
#fvtk.clear(r)
+"""
+Now plot the corresponding tracks in the same colours
+"""
+
for row in track2track:
-
+
color=np.random.rand(3)
T=[T1[int(row[0])],T3s[int(row[1])]]
fvtk.add(r,fvtk.line(T,color,linewidth=10))
@@ -110,3 +127,4 @@
+
View
31 doc/examples/nii_2_tracks.py
@@ -14,35 +14,35 @@
First import the necessary modules
----------------------------------
-* ``numpy`` is for numerical computation
+``numpy`` is for numerical computation
"""
import numpy as np
"""
-* ``nibabel`` is for data formats
+``nibabel`` is for data formats
"""
import nibabel as nib
"""
-* ``dipy.reconst`` is for the reconstruction algorithms which we use to create directionality models
+``dipy.reconst`` is for the reconstruction algorithms which we use to create directionality models
for a voxel from the raw data.
"""
import dipy.reconst.gqi as gqi
import dipy.reconst.dti as dti
"""
-* ``dipy.tracking`` is for tractography algorithms which create sets of tracks by integrating
+``dipy.tracking`` is for tractography algorithms which create sets of tracks by integrating
directionality models across voxels.
"""
from dipy.tracking.propagation import EuDX
"""
-* ``dipy.data`` is for small datasets we use in tests and examples.
+``dipy.data`` is for small datasets we use in tests and examples.
"""
from dipy.data import get_data
@@ -71,13 +71,13 @@
fimg,fbvals,fbvecs=get_data('small_101D')
"""
-* **Load the nifti file found at path fimg as an Nifti1Image.**
+**Load the nifti file found at path fimg as an Nifti1Image.**
"""
img=nib.load(fimg)
"""
-* **Read the datasets from the Nifti1Image.**
+**Read the datasets from the Nifti1Image.**
"""
data=img.get_data()
@@ -91,21 +91,21 @@
As you would expect, the raw diffusion weighted MR data is 4-dimensional as
we have one 3-d volume (6 by 10 by 10) for each gradient direction.
-* **Read the affine matrix**
+**Read the affine matrix**
which gives the mapping between volume indices (voxel coordinates) and world coordinates.
"""
affine=img.get_affine()
"""
-* **Read the b-values** which are a function of the strength, duration, temporal spacing and timing parameters of the
- specific paradigm used in the scanner, one per gradient direction.
+**Read the b-values** which are a function of the strength, duration, temporal spacing
+and timing parameters of the specific paradigm used in the scanner, one per gradient direction.
"""
bvals=np.loadtxt(fbvals)
"""
-* **Read the b-vectors**, the unit gradient directions.
+**Read the b-vectors**, the unit gradient directions.
"""
gradients=np.loadtxt(fbvecs).T
@@ -116,13 +116,13 @@
We are now set up with all the data and parameters to start calculating directional models
for voxels and their associated parameters, e.g. anisotropy.
-* **Calculate the Single Tensor Model (STM).**
+**Calculate the Single Tensor Model (STM).**
"""
ten=dti.Tensor(data,bvals,gradients,thresh=50)
"""
-* **Calculate Fractional Anisotropy (FA) from STM**
+**Calculate Fractional Anisotropy (FA) from STM**
"""
FA=ten.fa()
@@ -279,12 +279,13 @@
fvtk.add(r,fvtk.line(ten_tracks,fvtk.red,opacity=0.05))
gqs_tracks2=[t+np.array([10,0,0]) for t in gqs_tracks]
fvtk.add(r,fvtk.line(gqs_tracks2,fvtk.green,opacity=0.05))
-#fvtk.show(r,png_magnify=1)
"""
-Press 's' to save this beautiful screenshot.
+Press 's' to save this beautiful screenshot when you have displayed it with ``fvtk.show``.
**Thank you!**
--------------
"""
+#fvtk.show(r,png_magnify=1)
+
View
30 doc/examples/tractography_clustering.py
@@ -13,7 +13,7 @@
First import the necessary modules
----------------------------------
-* ``numpy`` is for numerical computation
+``numpy`` is for numerical computation
"""
@@ -37,33 +37,33 @@
print(fname)
"""
-Loading Trackvis file
+Load Trackvis file for *Fornix*:
"""
streams,hdr=tv.read(fname)
"""
-Copying tracks...
+Copy tracks:
"""
T=[i[0] for i in streams]
#T=T[:1000]
"""
-Representing tracks using only 3 pts
+Downsample tracks to just 3 points:
"""
tracks=[tm.downsample(t,3) for t in T]
"""
-Deleting unnecessary data...
+Delete unnecessary data:
"""
del streams,hdr
"""
-Local Skeleton Clustering
+Perform Local Skeleton Clustering (LSC) with a 5mm threshold:
"""
now=time.clock()
@@ -72,21 +72,21 @@
"""
-Reducing the number of points for faster visualization
+Reduce the number of points for faster visualization using the ``approx_polygon_track`` algorithm which retains points depending on how much they are need to define the shape of the track:
"""
T=[td.approx_polygon_track(t) for t in T]
"""
-Show initial dataset.
+Show the initial *Fornix* dataset:
"""
r=fvtk.ren()
fvtk.add(r,fvtk.line(T,fvtk.white,opacity=1))
fvtk.show(r)
"""
-Showing dataset after clustering. (with random bundle colors)
+Show the *Fornix* after clustering (with random bundle colors):
"""
fvtk.clear(r)
@@ -109,7 +109,7 @@
print('tripletons %d' % lens.count(3))
"""
-Finding most representative tracks in bundle (cluster)
+Find and display the skeleton of most representative tracks in each cluster:
"""
skeleton=[]
@@ -127,13 +127,14 @@
fvtk.show(r)
"""
-Lets save in the dictionary the skeleton information
+Save the skeleton information in the dictionary. Now try to play with different thresholds LSC and check the different results.
+Try it with your datasets and gives us some feedback.
+
"""
for (i,c) in enumerate(C):
C[c]['most']=skeleton[i]
-
for c in C:
print('Keys in bundle %d' % c)
print(C[c].keys())
@@ -142,10 +143,5 @@
pkl.save_pickle('skeleton_fornix.pkl',C)
-"""
-Try to play with different thresholds LSC and check the different results.
-Try it with your datasets and gives us some feedback.
-"""
-
View
47 doc/examples/visualize_crossings.py
@@ -9,45 +9,44 @@
Overview
========
-**This example visualizes the crossings struture of a few voxels.**
+**This example visualizes the crossings structure of a few voxels.**
First import the necessary modules
----------------------------------
-* ``numpy`` is for numerical computation
+``numpy`` is for numerical computation
"""
import numpy as np
"""
-* ``nibabel`` is for data formats
+``nibabel`` is for data formats
"""
import nibabel as nib
"""
-* ``dipy.reconst`` is for the reconstruction algorithms which we use to create directionality models
+``dipy.reconst`` is for the reconstruction algorithms which we use to create directionality models
for a voxel from the raw data.
"""
import dipy.reconst.gqi as gqi
import dipy.reconst.dti as dti
"""
-* ``dipy.tracking`` is for tractography algorithms which create sets of tracks by integrating
+``dipy.tracking`` is for tractography algorithms which create sets of tracks by integrating
directionality models across voxels.
"""
from dipy.tracking.propagation import EuDX
"""
-* ``dipy.data`` is for small datasets we use in tests and examples.
+``dipy.data`` is for small datasets we use in tests and examples.
"""
from dipy.data import get_data
-
"""
Isotropic voxel sizes required
------------------------------
@@ -71,13 +70,13 @@
fimg,fbvals,fbvecs=get_data('small_101D')
"""
-* **Load the nifti file found at path fimg as an Nifti1Image.**
+**Load the nifti file found at path fimg as an Nifti1Image.**
"""
img=nib.load(fimg)
"""
-* **Read the datasets from the Nifti1Image.**
+**Read the datasets from the Nifti1Image.**
"""
data=img.get_data()
@@ -91,21 +90,21 @@
As you would expect, the raw diffusion weighted MR data is 4-dimensional as
we have one 3-d volume (6 by 10 by 10) for each gradient direction.
-* **Read the affine matrix**
+**Read the affine matrix**
which gives the mapping between volume indices (voxel coordinates) and world coordinates.
"""
affine=img.get_affine()
"""
-* **Read the b-values** which are a function of the strength, duration, temporal spacing and timing parameters of the
- specific paradigm used in the scanner, one per gradient direction.
+**Read the b-values** which are a function of the strength, duration, temporal spacing and timing parameters of the
+specific paradigm used in the scanner, one per gradient direction.
"""
bvals=np.loadtxt(fbvals)
"""
-* **Read the b-vectors**, the unit gradient directions.
+**Read the b-vectors**, the unit gradient directions.
"""
gradients=np.loadtxt(fbvecs).T
@@ -144,14 +143,14 @@
qa=QA[0,0,0]
"""
-- qa is the quantitative anisotropy metric
+``qa`` is the quantitative anisotropy metric
"""
IN=gqs.ind()
ind=IN[0,0,0]
"""
-- ind holds the indices of the vertices of (up to 5) gqi odf local maxima
+``ind`` holds the indices of the vertices of (up to 5) gqi odf local maxima
"""
print 'quantitative anisotropy metric =', qa
@@ -210,8 +209,13 @@
1, 2, and 3 crossings.
``fvtk.crossing`` is a helper function which we use to graph the orientations of the maxima
-for these 3 voxels. We use 3 different colours and offset the graphs to display them
-in one diagram.
+of all the voxels in our dataset. We use 3 different colourings and offset the graphs to display them
+in one diagram. The colourings are:
+
+- all blue, with the 3 voxels used above ([3,8,4], [3,8,5], and [3,8,6]) marked in blue, indigo, and red.
+- the Boys' colour map (see ``colormap.boys2rgb.py``)
+- the orientation colour map (see ``colormap.orient2rgb.py`` with red: left-right; green: anteroposterior; blue: superior-inferior.
+
"""
#3,8,4 no crossing
@@ -242,18 +246,11 @@
colors=np.zeros((len(all),3))
colors2=np.zeros((len(all),3))
for (i,a) in enumerate(all):
- print a[0]
+ #print a[0]
colors[i]=cm.boys2rgb(a[0])
colors2[i]=cm.orient2rgb(a[0])
fvtk.add(r,fvtk.line(all_shift,colors,linewidth=1.))
fvtk.add(r,fvtk.line(all_shift2,colors2,linewidth=2.))
fvtk.show(r)
-
-
-"""
-**Hope that helps!**
----------------------
-"""
-

0 comments on commit a571e50

Please sign in to comment.
Something went wrong with that request. Please try again.