diff --git a/dipy/align/bundlemin.pyx b/dipy/align/bundlemin.pyx index 12df31c4e1..bd1b5967f4 100644 --- a/dipy/align/bundlemin.pyx +++ b/dipy/align/bundlemin.pyx @@ -132,8 +132,8 @@ def _bundle_minimum_distance_matrix(double [:, ::1] static, return np.asarray(D) -def _bundle_minimum_distance(double [:, ::1] stat, - double [:, ::1] mov, +def _bundle_minimum_distance(double [:, ::1] static, + double [:, ::1] moving, cnp.npy_intp static_size, cnp.npy_intp moving_size, cnp.npy_intp rows, @@ -207,8 +207,8 @@ def _bundle_minimum_distance(double [:, ::1] stat, for j in range(moving_size): - tmp = min_direct_flip_dist(&stat[i * rows, 0], - &mov[j * rows, 0], rows) + tmp = min_direct_flip_dist(&static[i * rows, 0], + &moving[j * rows, 0], rows) if have_openmp: openmp.omp_set_lock(&lock) @@ -242,6 +242,98 @@ def _bundle_minimum_distance(double [:, ::1] stat, return dist + +def _bundle_minimum_distance_asymmetric(double [:, ::1] static, + double [:, ::1] moving, + cnp.npy_intp static_size, + cnp.npy_intp moving_size, + cnp.npy_intp rows): + """ MDF-based pairwise distance optimization function + + We minimize the distance between moving streamlines of the same number of + points as they align with the static streamlines. + + Parameters + ----------- + static : array + Static streamlines + moving : array + Moving streamlines + static_size : int + Number of static streamlines + moving_size : int + Number of moving streamlines + rows : int + Number of points per streamline + + Returns + ------- + cost : double + + Notes + ----- + The difference with ``_bundle_minimum_distance`` is that we sum the + minimum values only for the static. Therefore, this is an asymetric + distance metric. This means that we are weighting only one direction of the + registration. Not both directions. This can be very useful when we want + to register a big set of bundles to a small set of bundles. + See [Wanyan17]_. + + References + ---------- + .. [Wanyan17] Wanyan and Garyfallidis, Important new insights for the + reduction of false positives in tractograms emerge from streamline-based + registration and pruning, International Society for Magnetic Resonance in + Medicine, Honolulu, Hawai, 2017. + + """ + + cdef: + cnp.npy_intp i=0, j=0 + double sum_i=0, sum_j=0, tmp=0 + double inf = np.finfo('f8').max + double dist=0 + double * min_j + openmp.omp_lock_t lock + + with nogil: + + if have_openmp: + openmp.omp_init_lock(&lock) + + min_j = malloc(static_size * sizeof(double)) + + for i in range(static_size): + min_j[i] = inf + + for i in prange(static_size): + + for j in range(moving_size): + + tmp = min_direct_flip_dist(&static[i * rows, 0], + &moving[j * rows, 0], rows) + + if have_openmp: + openmp.omp_set_lock(&lock) + if tmp < min_j[i]: + min_j[i] = tmp + + if have_openmp: + openmp.omp_unset_lock(&lock) + + if have_openmp: + openmp.omp_destroy_lock(&lock) + + for i in range(static_size): + sum_i += min_j[i] + + free(min_j) + + dist = sum_i / static_size + + return dist + + def distance_matrix_mdf(streamlines_a, streamlines_b): r""" Minimum direct flipped distance matrix between two streamline sets diff --git a/dipy/align/streamlinear.py b/dipy/align/streamlinear.py index eac62f0ba7..165d787dfa 100644 --- a/dipy/align/streamlinear.py +++ b/dipy/align/streamlinear.py @@ -3,18 +3,29 @@ from dipy.utils.six import with_metaclass from dipy.core.optimize import Optimizer from dipy.align.bundlemin import (_bundle_minimum_distance, + _bundle_minimum_distance_asymmetric, distance_matrix_mdf) from dipy.tracking.streamline import (transform_streamlines, unlist_streamlines, - center_streamlines) + center_streamlines, + set_number_of_points, + select_random_set_of_streamlines, + length) +from dipy.segment.clustering import QuickBundles from dipy.core.geometry import (compose_transformations, compose_matrix, decompose_matrix) from dipy.utils.six import string_types +from time import time MAX_DIST = 1e10 LOG_MAX_DIST = np.log(MAX_DIST) +DEFAULT_BOUNDS = [(-35, 35), (-35, 35), (-35, 35), + (-45, 45), (-45, 45), (-45, 45), + (0.6, 1.4), (0.6, 1.4), (0.6, 1.4), + (-10, 10), (-10, 10), (-10, 10)] + class StreamlineDistanceMetric(with_metaclass(abc.ABCMeta, object)): @@ -156,6 +167,18 @@ def distance(self, xopt): return bundle_min_distance(xopt, self.static, self.moving) +class BundleMinDistanceAsymmetricMetric(BundleMinDistanceMetric): + """ Asymmetric Bundle-based Minimum distance + """ + + def distance(self, xopt): + + return bundle_min_distance_asymmetric_fast(xopt, + self.static_centered_pts, + self.moving_centered_pts, + self.block_size) + + class BundleSumDistanceMatrixMetric(BundleMinDistanceMatrixMetric): """ Bundle-based Sum Distance aka BMD @@ -189,7 +212,7 @@ class StreamlineLinearRegistration(object): def __init__(self, metric=None, x0="rigid", method='L-BFGS-B', bounds=None, verbose=False, options=None, evolution=False, num_threads=None): - r""" Linear registration of 2 sets of streamlines [Garyfallidis14]_. + r""" Linear registration of 2 sets of streamlines [Garyfallidis15]_. Parameters ---------- @@ -258,10 +281,15 @@ def __init__(self, metric=None, x0="rigid", method='L-BFGS-B', References ---------- + .. [Garyfallidis15] Garyfallidis et al. "Robust and efficient linear + registration of white-matter fascicles in the space of + streamlines", NeuroImage, 117, 124--140, 2015 .. [Garyfallidis14] Garyfallidis et al., "Direct native-space fiber bundle alignment for group comparisons", ISMRM, 2014. - + .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter + bundles using local and global streamline-based registration and + clustering, Neuroimage, 2017. """ self.x0 = self._set_x0(x0) @@ -376,32 +404,43 @@ def _set_x0(self, x0): if hasattr(x0, 'ndim'): - if len(x0) not in [6, 7, 12]: - msg = 'Only 1D arrays of 6, 7 and 12 elements are allowed' - raise ValueError(msg) + if len(x0) not in [3, 6, 7, 9, 12]: + m_ = 'Only 1D arrays of 3, 6, 7, 9 and 12 elements are allowed' + raise ValueError(m_) if x0.ndim != 1: raise ValueError("Array should have only one dimension") return x0 if isinstance(x0, string_types): + + if x0.lower() == 'translation': + return np.zeros(3) + if x0.lower() == 'rigid': return np.zeros(6) if x0.lower() == 'similarity': return np.array([0, 0, 0, 0, 0, 0, 1.]) + if x0.lower() == 'scaling': + return np.array([0, 0, 0, 0, 0, 0, 1., 1., 1.]) + if x0.lower() == 'affine': return np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) if isinstance(x0, int): - if x0 not in [6, 7, 12]: - msg = 'Only 6, 7 and 12 are accepted as integers' + if x0 not in [3, 6, 7, 9, 12]: + msg = 'Only 3, 6, 7, 9 and 12 are accepted as integers' raise ValueError(msg) else: + if x0 == 3: + return np.zeros(3) if x0 == 6: return np.zeros(6) if x0 == 7: return np.array([0, 0, 0, 0, 0, 0, 1.]) + if x0 == 9: + return np.array([0, 0, 0, 0, 0, 0, 1., 1., 1.]) if x0 == 12: return np.array([0, 0, 0, 0, 0, 0, 1., 1., 1., 0, 0, 0]) @@ -604,6 +643,333 @@ def bundle_min_distance_fast(t, static, moving, block_size, num_threads): num_threads) +def bundle_min_distance_asymmetric_fast(t, static, moving, block_size): + """ MDF-based pairwise distance optimization function (MIN) + + We minimize the distance between moving streamlines as they align + with the static streamlines. + + Parameters + ----------- + t : array + 1D array. t is a vector of of affine transformation parameters with + size at least 6. + If size is 6, t is interpreted as translation + rotation. + If size is 7, t is interpreted as translation + rotation + + isotropic scaling. + If size is 12, t is interpreted as translation + rotation + + scaling + shearing. + + static : array + N*M x 3 array. All the points of the static streamlines. With order of + streamlines intact. Where N is the number of streamlines and M + is the number of points per streamline. + + moving : array + K*M x 3 array. All the points of the moving streamlines. With order of + streamlines intact. Where K is the number of streamlines and M + is the number of points per streamline. + + block_size : int + Number of points per streamline. All streamlines in static and moving + should have the same number of points M. + + Returns + ------- + cost: float + """ + + aff = compose_matrix44(t) + moving = np.dot(aff[:3, :3], moving.T).T + aff[:3, 3] + moving = np.ascontiguousarray(moving, dtype=np.float64) + + rows = static.shape[0] / block_size + cols = moving.shape[0] / block_size + + return _bundle_minimum_distance_asymmetric(static, moving, + rows, + cols, + block_size) + + +def remove_clusters_by_size(clusters, min_size=0): + by_size = lambda c: len(c) >= min_size + return filter(by_size, clusters) + + +def progressive_slr(static, moving, metric, x0, bounds, + method='L-BFGS-B', verbose=True, num_threads=None): + """ Progressive SLR + + This is an utility function that allows for example to do affine + registration using Streamline-based Linear Registration (SLR) + [Garyfallidis15]_ by starting with translation first, then rigid, + then similarity, scaling and finally affine. + + Similarly, if for example you want to perform rigid then you start with + translation first. This progressive strategy can helps with finding the + optimal parameters of the final transformation. + + Parameters + ---------- + static : Streamlines + moving : Streamlines + metric : StreamlineDistanceMetric + x0 : string + Could be any of 'translation', 'rigid', 'similarity', 'scaling', + 'affine' + bounds : array + Boundaries of registration parameters. See variable `DEFAULT_BOUNDS` + for example. + method : string + L_BFGS_B' or 'Powell' optimizers can be used. Default is 'L_BFGS_B'. + verbose : bool + If True show messages in stdout (default True). + num_threads : int + Number of threads. If None (default) then all available threads + will be used. Only metrics using OpenMP will use this variable. + + References + ---------- + .. [Garyfallidis15] Garyfallidis et al. "Robust and efficient linear + registration of white-matter fascicles in the space of streamlines", + NeuroImage, 117, 124--140, 2015 + """ + + if verbose: + print('Progressive Registration is Enabled') + + if x0 == 'translation' or x0 == 'rigid' or \ + x0 == 'similarity' or x0 == 'scaling' or x0 == 'affine': + + if verbose: + print(' Translation (3 parameters)...') + slr_t = StreamlineLinearRegistration(metric=metric, + x0='translation', + bounds=bounds[:3], + method=method) + + slm_t = slr_t.optimize(static, moving) + + if x0 == 'rigid' or x0 == 'similarity' or \ + x0 == 'scaling' or x0 == 'affine': + + x_translation = slm_t.xopt + x = np.zeros(6) + x[:3] = x_translation + + if verbose: + print(' Rigid (6 parameters) ...') + slr_r = StreamlineLinearRegistration(metric=metric, + x0=x, + bounds=bounds[:6], + method=method) + slm_r = slr_r.optimize(static, moving) + + if x0 == 'similarity' or x0 == 'scaling' or x0 == 'affine': + + x_rigid = slm_r.xopt + x = np.zeros(7) + x[:6] = x_rigid + x[6] = 1. + + if verbose: + print(' Similarity (7 parameters) ...') + slr_s = StreamlineLinearRegistration(metric=metric, + x0=x, + bounds=bounds[:7], + method=method) + slm_s = slr_s.optimize(static, moving) + + if x0 == 'scaling' or x0 == 'affine': + + x_similarity = slm_s.xopt + x = np.zeros(9) + x[:6] = x_similarity[:6] + x[6:] = np.array((x_similarity[6],) * 3) + + if verbose: + print(' Scaling (9 parameters) ...') + + slr_c = StreamlineLinearRegistration(metric=metric, + x0=x, + bounds=bounds[:9], + method=method) + slm_c = slr_c.optimize(static, moving) + + if x0 == 'affine': + + x_scaling = slm_c.xopt + x = np.zeros(12) + x[:9] = x_scaling[:9] + x[9:] = np.zeros(3) + + if verbose: + print(' Affine (12 parameters) ...') + + slr_a = StreamlineLinearRegistration(metric=metric, + x0=x, + bounds=bounds[:12], + method=method) + slm_a = slr_a.optimize(static, moving) + + if x0 == 'translation': + slm = slm_t + elif x0 == 'rigid': + slm = slm_r + elif x0 == 'similarity': + slm = slm_s + elif x0 == 'scaling': + slm = slm_c + elif x0 == 'affine': + slm = slm_a + else: + raise ValueError('Incorrect SLR transform') + + return slm + + +def slr_with_qb(static, moving, + x0='affine', + rm_small_clusters=50, + maxiter=100, + select_random=None, + verbose=False, + greater_than=50, + less_than=250, + qb_thr=15, + nb_pts=20, + progressive=True, num_threads=None): + """ Utility function for registering large tractograms. + + For efficiency we apply the registration on cluster centroids and remove + small clusters. + + Parameters + ---------- + static : Streamlines + moving : Streamlines + x0 : str + rigid, similarity or affine transformation model (default affine) + + rm_small_clusters : int + Remove clusters that have less than `rm_small_clusters` (default 50) + + verbose : bool, + If True then information about the optimization is shown. + + select_random : int + If not None select a random number of streamlines to apply clustering + Default None. + + options : None or dict, + Extra options to be used with the selected method. + + num_threads : int + Number of threads. If None (default) then all available threads + will be used. Only metrics using OpenMP will use this variable. + + Notes + ----- + The order of operations is the following. First short or long streamlines + are removed. Second the tractogram or a random selection of the tractogram + is clustered with QuickBundles. Then SLR [Garyfallidis15]_ is applied. + + References + ---------- + .. [Garyfallidis15] Garyfallidis et al. "Robust and efficient linear + registration of white-matter fascicles in the space of streamlines" + , NeuroImage, 117, 124--140, 2015 + .. [Garyfallidis14] Garyfallidis et al., "Direct native-space fiber + bundle alignment for group comparisons", ISMRM, 2014. + .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter + bundles using local and global streamline-based registration and + clustering, Neuroimage, 2017. + """ + if verbose: + print('Static streamlines size {}'.format(len(static))) + print('Moving streamlines size {}'.format(len(moving))) + + def check_range(streamline, gt=greater_than, lt=less_than): + + if (length(streamline) > gt) & (length(streamline) < lt): + return True + else: + return False + + # TODO change this to the new Streamlines API + streamlines1 = [s for s in static if check_range(s)] + streamlines2 = [s for s in moving if check_range(s)] + + if verbose: + + print('Static streamlines after length reduction {}' + .format(len(streamlines1))) + print('Moving streamlines after length reduction {}' + .format(len(streamlines2))) + + if select_random is not None: + rstreamlines1 = select_random_set_of_streamlines(streamlines1, + select_random) + else: + rstreamlines1 = streamlines1 + + rstreamlines1 = set_number_of_points(rstreamlines1, nb_pts) + qb1 = QuickBundles(threshold=qb_thr) + rstreamlines1 = [s.astype('f4') for s in rstreamlines1] + cluster_map1 = qb1.cluster(rstreamlines1) + clusters1 = remove_clusters_by_size(cluster_map1, rm_small_clusters) + qb_centroids1 = [cluster.centroid for cluster in clusters1] + + if select_random is not None: + rstreamlines2 = select_random_set_of_streamlines(streamlines2, + select_random) + else: + rstreamlines2 = streamlines2 + + rstreamlines2 = set_number_of_points(rstreamlines2, nb_pts) + qb2 = QuickBundles(threshold=qb_thr) + rstreamlines2 = [s.astype('f4') for s in rstreamlines2] + cluster_map2 = qb2.cluster(rstreamlines2) + clusters2 = remove_clusters_by_size(cluster_map2, rm_small_clusters) + qb_centroids2 = [cluster.centroid for cluster in clusters2] + + if verbose: + t = time() + + if not progressive: + slr = StreamlineLinearRegistration(x0=x0, + options={'maxiter': maxiter}, + num_threads=num_threads) + slm = slr.optimize(qb_centroids1, qb_centroids2) + else: + bounds = DEFAULT_BOUNDS + + slm = progressive_slr(qb_centroids1, qb_centroids2, + x0=x0, metric=None, + bounds=bounds, num_threads=num_threads) + + if verbose: + print('QB static centroids size %d' % len(qb_centroids1,)) + print('QB moving centroids size %d' % len(qb_centroids2,)) + duration = time() - t + print('SLR finished in %0.3f seconds.' % (duration,)) + if slm.iterations is not None: + print('SLR iterations: %d ' % (slm.iterations,)) + + moved = slm.transform(moving) + + return moved, slm.matrix, qb_centroids1, qb_centroids2 + + +# In essence whole_brain_slr can be thought as a combination of +# SLR on QuickBundles centroids and some thresholding see +# Garyfallidis et al. Recognition of white matter +# bundles using local and global streamline-based registration and +# clustering, Neuroimage, 2017. +whole_brain_slr = slr_with_qb + + def _threshold(x, th): return np.maximum(np.minimum(x, th), -th) @@ -615,10 +981,13 @@ def compose_matrix44(t, dtype=np.double): ----------- t : ndarray This is a 1D vector of of affine transformation parameters with - size at least 6. + size at least 3. + If size is 3, t is interpreted as translation. If size is 6, t is interpreted as translation + rotation. If size is 7, t is interpreted as translation + rotation + isotropic scaling. + If size is 9, t is interpreted as translation + rotation + + anisotropic scaling. If size is 12, t is interpreted as translation + rotation + scaling + shearing. @@ -632,17 +1001,18 @@ def compose_matrix44(t, dtype=np.double): t = np.array(t) size = t.size - if size not in [6, 7, 12]: - raise ValueError('Accepted number of parameters is 6, 7 and 12') + if size not in [3, 6, 7, 9, 12]: + raise ValueError('Accepted number of parameters is 3, 6, 7, 9 and 12') scale, shear, angles, translate = (None, ) * 4 - if size in [6, 7, 12]: - translate = _threshold(t[0:3], MAX_DIST) + translate = _threshold(t[0:3], MAX_DIST) + if size in [6, 7, 9, 12]: angles = np.deg2rad(t[3:6]) if size == 7: scale = np.array((t[6],) * 3) - if size == 12: + if size in [9, 12]: scale = t[6: 9] + if size == 12: shear = t[9: 12] return compose_matrix(scale=scale, shear=shear, angles=angles, @@ -657,28 +1027,33 @@ def decompose_matrix44(mat, size=12): mat : array Homogeneous 4x4 transformation matrix size : int - Size of output vector. 6 for rigid, 7 for similarity and 12 - for affine. Default is 12. + Size of output vector. 3, for translation, 6 for rigid, + 7 for similarity, 9 for scaling and 12 for affine. Default is 12. Returns ------- t : ndarray - One dimensional ndarray of 6, 7 or 12 affine parameters. + One dimensional ndarray of 3, 6, 7, 9 or 12 affine parameters. """ scale, shear, angles, translate, _ = decompose_matrix(mat) t = np.zeros(12) t[:3] = translate + if size == 3: + return t[:3] t[3: 6] = np.rad2deg(angles) if size == 6: return t[:6] if size == 7: t[6] = np.mean(scale) return t[:7] + if size == 9: + t[6:9] = scale + return t[:9] if size == 12: t[6: 9] = scale t[9: 12] = shear return t - raise ValueError('Size can be 6, 7 or 12') + raise ValueError('Size can be 3, 6, 7, 9 or 12') diff --git a/dipy/align/tests/test_streamlinear.py b/dipy/align/tests/test_streamlinear.py index 99b63602fe..2991b71dcf 100644 --- a/dipy/align/tests/test_streamlinear.py +++ b/dipy/align/tests/test_streamlinear.py @@ -303,7 +303,7 @@ def test_from_to_rigid(): def test_matrix44(): assert_raises(ValueError, compose_matrix44, np.ones(5)) - assert_raises(ValueError, compose_matrix44, np.ones(9)) + assert_raises(ValueError, compose_matrix44, np.ones(13)) assert_raises(ValueError, compose_matrix44, np.ones(16)) diff --git a/dipy/align/tests/test_whole_brain_slr.py b/dipy/align/tests/test_whole_brain_slr.py new file mode 100644 index 0000000000..6d8d4fceda --- /dev/null +++ b/dipy/align/tests/test_whole_brain_slr.py @@ -0,0 +1,63 @@ +import numpy as np +import nibabel as nib +from numpy.testing import (assert_equal, run_module_suite, + assert_array_almost_equal) +from dipy.data import get_data +from dipy.tracking.streamline import Streamlines +from dipy.align.streamlinear import whole_brain_slr, slr_with_qb +from dipy.tracking.distances import bundles_distances_mam +from dipy.align.streamlinear import transform_streamlines +from dipy.align.streamlinear import compose_matrix44, decompose_matrix44 + + +def test_whole_brain_slr(): + streams, hdr = nib.trackvis.read(get_data('fornix')) + fornix = [s[0] for s in streams] + + f = Streamlines(fornix) + f1 = f.copy() + f2 = f.copy() + + # check translation + f2._data += np.array([50, 0, 0]) + + moved, transform, qb_centroids1, qb_centroids2 = whole_brain_slr( + f1, f2, verbose=True, rm_small_clusters=2, greater_than=0, + less_than=np.inf, qb_thr=5, progressive=True) + + # we can check the quality of registration by comparing the matrices + # MAM streamline distances before and after SLR + D12 = bundles_distances_mam(f1, f2) + D1M = bundles_distances_mam(f1, moved) + + d12_minsum = np.sum(np.min(D12, axis=0)) + d1m_minsum = np.sum(np.min(D1M, axis=0)) + + assert_equal(d1m_minsum < d12_minsum, True) + + assert_array_almost_equal(transform[:3, 3], [-50, -0, -0], 3) + + # check rotation + mat = compose_matrix44([0, 0, 0, 15, 0, 0]) + + f3 = f.copy() + f3 = transform_streamlines(f3, mat) + + moved, transform, qb_centroids1, qb_centroids2 = slr_with_qb( + f1, f3, verbose=False, rm_small_clusters=1, greater_than=20, + less_than=np.inf, qb_thr=2, progressive=False) + + # we can also check the quality by looking at the decomposed transform + assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2) + + moved, transform, qb_centroids1, qb_centroids2 = slr_with_qb( + f1, f3, verbose=False, rm_small_clusters=1, select_random=400, + greater_than=20, + less_than=np.inf, qb_thr=2, progressive=False) + + # we can also check the quality by looking at the decomposed transform + assert_array_almost_equal(decompose_matrix44(transform)[3], -15, 2) + + +if __name__ == '__main__': + run_module_suite() diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py new file mode 100644 index 0000000000..de92eeffed --- /dev/null +++ b/dipy/segment/bundles.py @@ -0,0 +1,404 @@ +import numpy as np +from dipy.tracking.streamline import (set_number_of_points, nbytes, + select_random_set_of_streamlines) +from dipy.segment.clustering import qbx_and_merge +from dipy.tracking.distances import (bundles_distances_mdf, + bundles_distances_mam) +from dipy.align.streamlinear import (StreamlineLinearRegistration, + BundleMinDistanceMetric, + BundleSumDistanceMatrixMetric, + BundleMinDistanceAsymmetricMetric) +from time import time +from itertools import chain + +from dipy.tracking.streamline import Streamlines +from nibabel.affines import apply_affine + + +class RecoBundles(object): + + def __init__(self, streamlines, cluster_map=None, clust_thr=15, nb_pts=20, + seed=42, verbose=True): + """ Recognition of bundles + + Extract bundles from a participants' tractograms using model bundles + segmented from a different subject or an atlas of bundles. + See [Garyfallidis17]_ for the details. + + Parameters + ---------- + streamlines : Streamlines + The tractogram in which you want to recognize bundles. + cluster_map : QB map + Provide existing clustering to start RB faster (default None). + clust_thr : float + Distance threshold in mm for clustering `streamlines` + seed : int + Setup for random number generator (default 42). + nb_pts : int + Number of points per streamline (default 20) + + Notes + ----- + Make sure that before creating this class that the streamlines and + the model bundles are roughly in the same space. + Also default thresholds are assumed in RAS 1mm^3 space. You may + want to adjust those if your streamlines are not in world coordinates. + + References + ---------- + .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter + bundles using local and global streamline-based registration and + clustering, Neuroimage, 2017. + """ + self.streamlines = streamlines + + self.nb_streamlines = len(self.streamlines) + self.verbose = verbose + + self.start_thr = [40, 25, 20] + + if cluster_map is None: + self._cluster_streamlines(clust_thr=clust_thr, nb_pts=nb_pts, + seed=seed) + else: + if self.verbose: + t = time() + + self.cluster_map = cluster_map + self.cluster_map.refdata = self.streamlines + self.centroids = self.cluster_map.centroids + self.nb_centroids = len(self.centroids) + self.indices = [cluster.indices for cluster in self.cluster_map] + + if self.verbose: + print(' Streamlines have %d centroids' + % (self.nb_centroids,)) + print(' Total loading duration %0.3f sec. \n' + % (time() - t,)) + + def _cluster_streamlines(self, clust_thr, nb_pts, seed): + + rng = np.random.RandomState(seed=seed) + + if self.verbose: + t = time() + print('# Cluster streamlines using QBx') + print(' Tractogram has %d streamlines' + % (len(self.streamlines), )) + print(' Size is %0.3f MB' % (nbytes(self.streamlines),)) + print(' Distance threshold %0.3f' % (clust_thr,)) + + # TODO this needs to become a default parameter + thresholds = self.start_thr + [clust_thr] + + merged_cluster_map = qbx_and_merge(self.streamlines, thresholds, + nb_pts, None, rng, self.verbose) + + self.cluster_map = merged_cluster_map + self.centroids = merged_cluster_map.centroids + self.nb_centroids = len(self.centroids) + self.indices = [cluster.indices for cluster in self.cluster_map] + + if self.verbose: + print(' Streamlines have %d centroids' + % (self.nb_centroids,)) + print(' Total duration %0.3f sec. \n' % (time() - t,)) + + def recognize(self, model_bundle, model_clust_thr, + reduction_thr=20, + reduction_distance='mdf', + slr=True, + slr_metric=None, + slr_x0=None, + slr_bounds=None, + slr_select=(400, 600), + slr_method='L-BFGS-B', + pruning_thr=10, + pruning_distance='mdf'): + """ Recognize the model_bundle in self.streamlines + + Parameters + ---------- + model_bundle : Streamlines + model_clust_thr : float + reduction_thr : float + reduction_distance : string + mdf or mam (default mam) + slr : bool + Use Streamline-based Linear Registration (SLR) locally + (default True) + slr_metric : BundleMinDistanceMetric + slr_x0 : array + (default None) + slr_bounds : array + (default None) + slr_select : tuple + Select the number of streamlines from model to neirborhood of + model to perform the local SLR. + slr_method : string + Optimization method (default 'L-BFGS-B') + pruning_thr : float + pruning_distance : string + MDF ('mdf') and MAM ('mam') + + Returns + ------- + recognized_transf : Streamlines + Recognized bundle in the space of the model tractogram + recognized_labels : array + Indices of recognized bundle in the original tractogram + recognized_bundle : Streamlines + Recognized bundle in the space of the original tractogram + + References + ---------- + .. [Garyfallidis17] Garyfallidis et al. Recognition of white matter + bundles using local and global streamline-based registration and + clustering, Neuroimage, 2017. + """ + if self.verbose: + t = time() + print('## Recognize given bundle ## \n') + + model_centroids = self._cluster_model_bundle( + model_bundle, + model_clust_thr=model_clust_thr) + neighb_streamlines, neighb_indices = self._reduce_search_space( + model_centroids, + reduction_thr=reduction_thr, + reduction_distance=reduction_distance) + if len(neighb_streamlines) == 0: + return Streamlines([]), [], Streamlines([]) + if slr: + transf_streamlines = self._register_neighb_to_model( + model_bundle, + neighb_streamlines, + metric=slr_metric, + x0=slr_x0, + bounds=slr_bounds, + select_model=slr_select[0], + select_target=slr_select[1], + method=slr_method) + else: + transf_streamlines = neighb_streamlines + + pruned_streamlines, labels = self._prune_what_not_in_model( + model_centroids, + transf_streamlines, + neighb_indices, + pruning_thr=pruning_thr, + pruning_distance=pruning_distance) + + if self.verbose: + print('Total duration of recognition time is %0.3f sec.\n' + % (time()-t,)) + # return recognized bundle in original streamlines, labels of + # recognized bundle and transformed recognized bundle + return pruned_streamlines, labels, self.streamlines[labels] + + def _cluster_model_bundle(self, model_bundle, model_clust_thr, nb_pts=20, + select_randomly=500000): + + if self.verbose: + t = time() + print('# Cluster model bundle using QBX') + print(' Model bundle has %d streamlines' + % (len(model_bundle), )) + print(' Distance threshold %0.3f' % (model_clust_thr,)) + thresholds = self.start_thr + [model_clust_thr] + + model_cluster_map = qbx_and_merge(model_bundle, thresholds, + nb_pts=nb_pts, + select_randomly=select_randomly, + rng=None, + verbose=self.verbose) + model_centroids = model_cluster_map.centroids + nb_model_centroids = len(model_centroids) + + if self.verbose: + print(' Model bundle has %d centroids' + % (nb_model_centroids,)) + print(' Duration %0.3f sec. \n' % (time() - t, )) + return model_centroids + + def _reduce_search_space(self, model_centroids, + reduction_thr=20, reduction_distance='mdf'): + if self.verbose: + t = time() + print('# Reduce search space') + print(' Reduction threshold %0.3f' % (reduction_thr,)) + print(' Reduction distance {}'.format(reduction_distance)) + + if reduction_distance.lower() == 'mdf': + if self.verbose: + print(' Using MDF') + centroid_matrix = bundles_distances_mdf(model_centroids, + self.centroids) + elif reduction_distance.lower() == 'mam': + if self.verbose: + print(' Using MAM') + centroid_matrix = bundles_distances_mdf(model_centroids, + self.centroids) + else: + raise ValueError('Given reduction distance not known') + + centroid_matrix[centroid_matrix > reduction_thr] = np.inf + + mins = np.min(centroid_matrix, axis=0) + close_clusters_indices = list(np.where(mins != np.inf)[0]) + + close_clusters = self.cluster_map[close_clusters_indices] + + neighb_indices = [cluster.indices for cluster in close_clusters] + + neighb_streamlines = Streamlines(chain(*close_clusters)) + + nb_neighb_streamlines = len(neighb_streamlines) + + if nb_neighb_streamlines == 0: + print(' You have no neighbor streamlines... No bundle recognition') + return Streamlines([]), [] + + if self.verbose: + print(' Number of neighbor streamlines %d' % + (nb_neighb_streamlines,)) + print(' Duration %0.3f sec. \n' % (time() - t,)) + + return neighb_streamlines, neighb_indices + + def _register_neighb_to_model(self, model_bundle, neighb_streamlines, + metric=None, x0=None, bounds=None, + select_model=400, select_target=600, + method='L-BFGS-B', + nb_pts=20): + + if self.verbose: + print('# Local SLR of neighb_streamlines to model') + t = time() + + if metric is None or metric == 'symmetric': + metric = BundleMinDistanceMetric() + if metric == 'asymmetric': + metric = BundleMinDistanceAsymmetricMetric() + if metric == 'diagonal': + metric = BundleSumDistanceMatrixMetric() + + if x0 is None: + x0 = 'similarity' + + if bounds is None: + bounds = [(-30, 30), (-30, 30), (-30, 30), + (-45, 45), (-45, 45), (-45, 45), (0.8, 1.2)] + + # TODO this can be speeded up by using directly the centroids + static = select_random_set_of_streamlines(model_bundle, + select_model) + moving = select_random_set_of_streamlines(neighb_streamlines, + select_target) + + static = set_number_of_points(static, nb_pts) + moving = set_number_of_points(moving, nb_pts) + + slr = StreamlineLinearRegistration(metric=metric, x0=x0, + bounds=bounds, + method=method) + slm = slr.optimize(static, moving) + + transf_streamlines = neighb_streamlines.copy() + transf_streamlines._data = apply_affine( + slm.matrix, transf_streamlines._data) + + transf_matrix = slm.matrix + slr_bmd = slm.fopt + slr_iterations = slm.iterations + + if self.verbose: + print(' Square-root of BMD is %.3f' % (np.sqrt(slr_bmd),)) + if slr_iterations is not None: + print(' Number of iterations %d' % (slr_iterations,)) + print(' Matrix size {}'.format(slm.matrix.shape)) + original = np.get_printoptions() + np.set_printoptions(3, suppress=True) + print(transf_matrix) + print(slm.xopt) + np.set_printoptions(**original) + + print(' Duration %0.3f sec. \n' % (time() - t,)) + + return transf_streamlines + + def _prune_what_not_in_model(self, model_centroids, + transf_streamlines, + neighb_indices, + mdf_thr=5, + pruning_thr=10, + pruning_distance='mdf'): + + if pruning_thr < 0: + print('Pruning_thr has to be greater or equal to 0') + + if self.verbose: + print('# Prune streamlines using the MDF distance') + print(' Pruning threshold %0.3f' % (pruning_thr,)) + print(' Pruning distance {}'.format(pruning_distance)) + t = time() + + thresholds = [40, 30, 20, 10, mdf_thr] + rtransf_cluster_map = qbx_and_merge(transf_streamlines, + thresholds, nb_pts=20, + select_randomly=500000, + rng=None, + verbose=self.verbose) + + if self.verbose: + print(' QB Duration %0.3f sec. \n' % (time() - t, )) + + rtransf_centroids = rtransf_cluster_map.centroids + + if pruning_distance.lower() == 'mdf': + if self.verbose: + print(' Using MDF') + dist_matrix = bundles_distances_mdf(model_centroids, + rtransf_centroids) + elif pruning_distance.lower() == 'mam': + if self.verbose: + print(' Using MAM') + dist_matrix = bundles_distances_mam(model_centroids, + rtransf_centroids) + else: + raise ValueError('Given pruning distance is not available') + dist_matrix[np.isnan(dist_matrix)] = np.inf + dist_matrix[dist_matrix > pruning_thr] = np.inf + + pruning_matrix = dist_matrix.copy() + + if self.verbose: + print(' Pruning matrix size is (%d, %d)' + % pruning_matrix.shape) + + mins = np.min(pruning_matrix, axis=0) + pruned_indices = [rtransf_cluster_map[i].indices + for i in np.where(mins != np.inf)[0]] + pruned_indices = list(chain(*pruned_indices)) + pruned_streamlines = [transf_streamlines[i] + for i in pruned_indices] + + initial_indices = list(chain(*neighb_indices)) + final_indices = [initial_indices[i] for i in pruned_indices] + labels = final_indices + + if self.verbose: + msg = ' Number of centroids: %d' + print(msg % (len(rtransf_centroids),)) + msg = ' Number of streamlines after pruning: %d' + print(msg % (len(pruned_streamlines),)) + + if len(pruned_streamlines) == 0: + print(' You have removed all streamlines') + return Streamlines([]), [] + + if self.verbose: + print(' Duration %0.3f sec. \n' % (time() - t, )) + + return pruned_streamlines, labels diff --git a/dipy/segment/clustering.py b/dipy/segment/clustering.py index d00c18dcae..a595401dad 100644 --- a/dipy/segment/clustering.py +++ b/dipy/segment/clustering.py @@ -1,12 +1,13 @@ import operator import numpy as np - +from time import time from abc import ABCMeta, abstractmethod from dipy.segment.metric import Metric from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric from dipy.segment.metric import MinimumAverageDirectFlipMetric +from dipy.tracking.streamline import set_number_of_points, nbytes class Identity: @@ -526,7 +527,8 @@ class QuickBundlesX(Clustering): metric : str or `Metric` object (optional) The distance metric to use when comparing two streamlines. By default, the Minimum average Direct-Flip (MDF) distance [Garyfallidis12]_ is - used and streamlines are automatically resampled so they have 12 points. + used and streamlines are automatically resampled so they have 12 + points. References ---------- @@ -542,7 +544,7 @@ class QuickBundlesX(Clustering): """ def __init__(self, thresholds, metric="MDF_12points"): self.thresholds = thresholds - + if isinstance(metric, MinimumAverageDirectFlipMetric): raise ValueError("Use AveragePointwiseEuclideanMetric instead") @@ -657,3 +659,104 @@ def _traverse(node, level=0): _traverse(self.root) return clusters + + +def qbx_and_merge(streamlines, thresholds, + nb_pts=20, select_randomly=None, rng=None, verbose=True): + """ Run QuickBundlesX and then run again on the centroids of the last layer + + Running again QuickBundles at a layer has the effect of merging + some of the clusters that maybe originally devided because of branching. + This function help obtain a result at a QuickBundles quality but with + QuickBundlesX speed. The merging phase has low cost because it is applied + only on the centroids rather than the entire dataset. + + Parameters + ---------- + streamlines : Streamlines + thresholds : sequence + List of distance thresholds for QuickBundlesX. + nb_pts : int + Number of points for discretizing each streamline + select_randomly : int + Randomly select a specific number of streamlines. If None all the + streamlines are used. + rng : RandomState + If None then RandomState is initialized internally. + verbose : bool + If True print information in stdout. + + Returns + ------- + clusters : obj + Contains the clusters of the last layer of QuickBundlesX after merging. + + References + ---------- + .. [Garyfallidis12] Garyfallidis E. et al., QuickBundles a method for + tractography simplification, Frontiers in Neuroscience, + vol 6, no 175, 2012. + + .. [Garyfallidis16] Garyfallidis E. et al. QuickBundlesX: Sequential + clustering of millions of streamlines in multiple + levels of detail at record execution time. Proceedings + of the, International Society of Magnetic Resonance + in Medicine (ISMRM). Singapore, 4187, 2016. + """ + if verbose: + t = time() + len_s = len(streamlines) + if select_randomly is None: + select_randomly = len_s + + if rng is None: + rng = np.random.RandomState() + indices = rng.choice(len_s, min(select_randomly, len_s), + replace=False) + sample_streamlines = set_number_of_points(streamlines, nb_pts) + + if verbose: + print(' Resampled to {} points'.format(nb_pts)) + print(' Size is %0.3f MB' % (nbytes(sample_streamlines),)) + print(' Duration of resampling is %0.3f sec.' % (time() - t,)) + print(' QBX phase starting...') + + qbx = QuickBundlesX(thresholds, + metric=AveragePointwiseEuclideanMetric()) + + if verbose: + t1 = time() + qbx_clusters = qbx.cluster(sample_streamlines, ordering=indices) + + if verbose: + print(' Merging phase starting ...') + + qbx_merge = QuickBundlesX([thresholds[-1]], + metric=AveragePointwiseEuclideanMetric()) + + final_level = len(thresholds) + len_qbx_fl = len(qbx_clusters.get_clusters(final_level)) + qbx_ordering_final = rng.choice(len_qbx_fl, len_qbx_fl, replace=False) + + qbx_merged_cluster_map = qbx_merge.cluster( + qbx_clusters.get_clusters(final_level).centroids, + ordering=qbx_ordering_final).get_clusters(1) + + qbx_cluster_map = qbx_clusters.get_clusters(final_level) + + merged_cluster_map = ClusterMapCentroid() + for cluster in qbx_merged_cluster_map: + merged_cluster = ClusterCentroid(centroid=cluster.centroid) + for i in cluster.indices: + merged_cluster.indices.extend(qbx_cluster_map[i].indices) + merged_cluster_map.add_cluster(merged_cluster) + + merged_cluster_map.refdata = streamlines + + if verbose: + print(' QuickBundlesX time for %d random streamlines' + % (select_randomly,)) + + print(' Duration %0.3f sec. \n' % (time() - t1,)) + + return merged_cluster_map diff --git a/dipy/segment/tests/test_qbx.py b/dipy/segment/tests/test_qbx.py index 2c3e25acd2..0288857b72 100644 --- a/dipy/segment/tests/test_qbx.py +++ b/dipy/segment/tests/test_qbx.py @@ -1,15 +1,14 @@ import itertools import numpy as np from numpy.testing import (assert_array_equal, assert_equal, assert_raises, - assert_array_almost_equal, run_module_suite) + run_module_suite) -from dipy.segment.clustering import QuickBundlesX, QuickBundles +from dipy.segment.clustering import QuickBundlesX, QuickBundles, qbx_and_merge from dipy.segment.featurespeed import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric from dipy.segment.metric import MinimumAverageDirectFlipMetric from dipy.tracking.streamline import set_number_of_points -from dipy.data import get_data -import nibabel.trackvis as tv +from dipy.tracking.streamline import Streamlines def straight_bundle(nb_streamlines=1, nb_pts=30, step_size=1, @@ -171,49 +170,69 @@ def test_with_simulated_bundles2(): tree = qbx_class.cluster(streamlines) # By default `refdata` refers to data being clustered. assert_equal(tree.refdata, streamlines) - + def test_circle_parallel_fornix(): - + circle = streamlines_in_circle(100, step_size=2) - + parallel = streamlines_parallel(100) thresholds = [1, 0.1] qbx_class = QuickBundlesX(thresholds) tree = qbx_class.cluster(circle) - - clusters = tree.get_clusters(0) + + clusters = tree.get_clusters(0) assert_equal(len(clusters), 1) - + clusters = tree.get_clusters(1) assert_equal(len(clusters), 3) - + clusters = tree.get_clusters(2) assert_equal(len(clusters), 34) - + thresholds = [.5] - + qbx_class = QuickBundlesX(thresholds) tree = qbx_class.cluster(parallel) - - clusters = tree.get_clusters(0) + + clusters = tree.get_clusters(0) assert_equal(len(clusters), 1) - + clusters = tree.get_clusters(1) assert_equal(len(clusters), 100) def test_raise_mdf(): - + thresholds = [1, 0.1] - + metric = MinimumAverageDirectFlipMetric() assert_raises(ValueError, QuickBundlesX, thresholds, metric=metric) assert_raises(ValueError, QuickBundles, thresholds[1], metric=metric) +def test_qbx_and_merge(): + + # Generate synthetic streamlines + bundles = bearing_bundles(4, 2) + bundles.append(straight_bundle(1)) + + streamlines = Streamlines(list(itertools.chain(*bundles))) + + thresholds = [10, 2, 1] + + rng = np.random.RandomState(seed=42) + qbxm_centroids = qbx_and_merge(streamlines, thresholds, rng=rng).centroids + + qbx = QuickBundlesX(thresholds) + tree = qbx.cluster(streamlines) + qbx_centroids = tree.get_clusters(3).centroids + + assert_equal(len(qbx_centroids) > len(qbxm_centroids), True) + + if __name__ == '__main__': run_module_suite() diff --git a/dipy/segment/tests/test_rb.py b/dipy/segment/tests/test_rb.py new file mode 100644 index 0000000000..8010f714af --- /dev/null +++ b/dipy/segment/tests/test_rb.py @@ -0,0 +1,133 @@ +import numpy as np +import nibabel as nib +from numpy.testing import assert_equal, run_module_suite +from dipy.data import get_data +from dipy.segment.bundles import RecoBundles +from dipy.tracking.distances import bundles_distances_mam +from dipy.tracking.streamline import Streamlines +from dipy.segment.clustering import qbx_and_merge + + +streams, hdr = nib.trackvis.read(get_data('fornix')) +fornix = [s[0] for s in streams] + +f = Streamlines(fornix) +f1 = f.copy() + +f2 = f1[:20].copy() +f2._data += np.array([50, 0, 0]) + +f3 = f1[200:].copy() +f3._data += np.array([100, 0, 0]) + +f.extend(f2) +f.extend(f3) + + +def test_rb_check_defaults(): + + rb = RecoBundles(f, clust_thr=10) + rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2, + model_clust_thr=5., + reduction_thr=10) + D = bundles_distances_mam(f2, recognized) + + # check if the bundle is recognized correctly + for row in D: + assert_equal(row.min(), 0) + + +def test_rb_disable_slr(): + + rb = RecoBundles(f, clust_thr=10) + + rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2, + model_clust_thr=5., + reduction_thr=10, + slr=False) + + D = bundles_distances_mam(f2, recognized) + + # check if the bundle is recognized correctly + for row in D: + assert_equal(row.min(), 0) + + +def test_rb_no_verbose_and_mam(): + + rb = RecoBundles(f, clust_thr=10, verbose=False) + + rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2, + model_clust_thr=5., + reduction_thr=10, + slr=True, + pruning_distance='mam') + + D = bundles_distances_mam(f2, recognized) + + # check if the bundle is recognized correctly + for row in D: + assert_equal(row.min(), 0) + + +def test_rb_clustermap(): + + cluster_map = qbx_and_merge(f, thresholds=[40, 25, 20, 10]) + + rb = RecoBundles(f, cluster_map=cluster_map, clust_thr=10) + rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2, + model_clust_thr=5., + reduction_thr=10) + D = bundles_distances_mam(f2, recognized) + + # check if the bundle is recognized correctly + for row in D: + assert_equal(row.min(), 0) + + +def test_rb_no_neighb(): + # what if no neighbors are found? No recognition + + b = Streamlines(fornix) + b1 = b.copy() + + b2 = b1[:20].copy() + b2._data += np.array([100, 0, 0]) + + b3 = b1[:20].copy() + b3._data += np.array([300, 0, 0]) + + b.extend(b3) + + rb = RecoBundles(b, clust_thr=10) + rec_trans, rec_labels, recognized = rb.recognize(model_bundle=b2, + model_clust_thr=5., + reduction_thr=10) + + assert_equal(len(recognized), 0) + assert_equal(len(rec_labels), 0) + assert_equal(len(rec_trans), 0) + + +def test_rb_reduction_mam(): + + rb = RecoBundles(f, clust_thr=10, verbose=True) + + rec_trans, rec_labels, recognized = rb.recognize(model_bundle=f2, + model_clust_thr=5., + reduction_thr=10, + reduction_distance='mam', + slr=True, + slr_metric='asymmetric', + pruning_distance='mam') + + D = bundles_distances_mam(f2, recognized) + + # check if the bundle is recognized correctly + for row in D: + assert_equal(row.min(), 0) + + +if __name__ == '__main__': + + run_module_suite() diff --git a/dipy/tracking/streamline.py b/dipy/tracking/streamline.py index 239f9aeb41..8c67e2c0f4 100644 --- a/dipy/tracking/streamline.py +++ b/dipy/tracking/streamline.py @@ -698,3 +698,7 @@ def values_from_volume(data, streamlines, affine=None): return _extract_vals(data, streamlines, affine=affine) else: raise ValueError("Data needs to have 3 or 4 dimensions") + + +def nbytes(streamlines): + return streamlines._data.nbytes / 1024. ** 2 diff --git a/dipy/tracking/utils.py b/dipy/tracking/utils.py index 467ee1b1c4..f32b32cf68 100644 --- a/dipy/tracking/utils.py +++ b/dipy/tracking/utils.py @@ -361,7 +361,6 @@ def seeds_from_mask(mask, density=[1, 1, 1], voxel_size=None, affine=None): >>> mask[0,0,0] = 1 >>> seeds_from_mask(mask, [1,1,1], [1,1,1]) array([[ 0.5, 0.5, 0.5]]) - >>> seeds_from_mask(mask, [1,2,3], [1,1,1]) array([[ 0.5 , 0.25 , 0.16666667], [ 0.5 , 0.75 , 0.16666667], @@ -375,7 +374,6 @@ def seeds_from_mask(mask, density=[1, 1, 1], voxel_size=None, affine=None): [ 0.55 , 0.55 , 1.875], [ 0.55 , 1.65 , 5.625], [ 0.55 , 1.65 , 6.875]]) - """ mask = np.array(mask, dtype=bool, copy=False, ndmin=3) if mask.ndim != 3: @@ -455,11 +453,9 @@ def random_seeds_from_mask(mask, seeds_count=1, seed_count_per_voxel=True, -------- >>> mask = np.zeros((3,3,3), 'bool') >>> mask[0,0,0] = 1 - >>> np.random.seed(1) >>> random_seeds_from_mask(mask, seeds_count=1, seed_count_per_voxel=True) array([[-0.082978 , 0.22032449, -0.49988563]]) - >>> random_seeds_from_mask(mask, seeds_count=6, seed_count_per_voxel=True) array([[-0.19766743, -0.35324411, -0.40766141], [-0.31373979, -0.15443927, -0.10323253], @@ -473,7 +469,6 @@ def random_seeds_from_mask(mask, seeds_count=1, seed_count_per_voxel=True, [ 0.37638915, 0.39460666, -0.41495579], [-0.46094522, 0.66983042, 2.3781425 ], [-0.40165317, 0.92110763, 2.45788953]]) - """ mask = np.array(mask, dtype=bool, copy=False, ndmin=3) if mask.ndim != 3: