From ea37be81f3fd88f6b12a3bce3d797b1edbfe361b Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Thu, 24 Mar 2016 18:57:55 -0400 Subject: [PATCH 01/12] First experimentation with centroids weigthed by their size, new distance metric --- dipy/align/bundlemin.pyx | 92 +++++++++++++++++++++ dipy/align/streamlinear.py | 158 ++++++++++++++++++++++++++++++++++--- dipy/segment/bundles.py | 83 +++++++++++++------ dipy/segment/clustering.py | 40 +++++++++- 4 files changed, 336 insertions(+), 37 deletions(-) diff --git a/dipy/align/bundlemin.pyx b/dipy/align/bundlemin.pyx index 38faaee282..28aeb876bd 100644 --- a/dipy/align/bundlemin.pyx +++ b/dipy/align/bundlemin.pyx @@ -206,6 +206,98 @@ def _bundle_minimum_distance(double [:, ::1] stat, return dist +def _bundle_centroids_minimum_distance(double [:, ::1] stat, + double [:, ::1] mov, + int [:] stat_clus_size, + int [:] mov_clus_size, + cnp.npy_intp static_size, + cnp.npy_intp moving_size, + cnp.npy_intp rows): + """ MDF-based pairwise distance optimization function + + We minimize the distance between moving streamlines of the same number of + points as they align with the static streamlines. + + Parameters + ----------- + static : array + Static streamlines + moving : array + Moving streamlines + static_size : int + Number of static streamlines + moving_size : int + Number of moving streamlines + rows : int + Number of points per streamline + + Returns + ------- + cost : double + + Notes + ----- + The difference with ``_bundle_minimum_distance_matrix`` is that it does not + save the full distance matrix and therefore needs much less memory. + """ + + cdef: + cnp.npy_intp i=0, j=0 + double sum_i=0, sum_j=0, tmp=0 + double inf = np.finfo('f8').max + double dist=0 + double * min_j + double * min_i + openmp.omp_lock_t lock + + with nogil: + + if have_openmp: + openmp.omp_init_lock(&lock) + + min_j = malloc(static_size * sizeof(double)) + min_i = malloc(moving_size * sizeof(double)) + + for i in range(static_size): + min_j[i] = inf + + for j in range(moving_size): + min_i[j] = inf + + for i in prange(static_size): + + for j in range(moving_size): + + tmp = min_direct_flip_dist(&stat[i * rows, 0], + &mov[j * rows, 0], rows) + + if have_openmp: + openmp.omp_set_lock(&lock) + if tmp < min_j[i]: + min_j[i] = tmp + + if tmp < min_i[j]: + min_i[j] = tmp + if have_openmp: + openmp.omp_unset_lock(&lock) + + if have_openmp: + openmp.omp_destroy_lock(&lock) + + for i in range(static_size): + sum_i += stat_clus_size[i]*min_j[i] + + for j in range(moving_size): + sum_j += mov_clus_size[j]*min_i[j] + + free(min_j) + free(min_i) + + dist = (sum_i / static_size + sum_j / moving_size) + + dist = 0.25 * dist * dist + + return dist def _bundle_minimum_distance_static(double [:, ::1] stat, double [:, ::1] mov, diff --git a/dipy/align/streamlinear.py b/dipy/align/streamlinear.py index 99994d54bd..4a68c14b72 100644 --- a/dipy/align/streamlinear.py +++ b/dipy/align/streamlinear.py @@ -3,6 +3,7 @@ from dipy.utils.six import with_metaclass from dipy.core.optimize import Optimizer from dipy.align.bundlemin import (_bundle_minimum_distance, + _bundle_centroids_minimum_distance, _bundle_minimum_distance_static, distance_matrix_mdf) from dipy.tracking.streamline import (transform_streamlines, @@ -11,7 +12,7 @@ set_number_of_points, select_random_set_of_streamlines, length) -from dipy.segment.clustering import QuickBundles +from dipy.segment.clustering import QuickBundles, qbx_with_merge from dipy.core.geometry import (compose_transformations, compose_matrix, decompose_matrix) @@ -102,6 +103,69 @@ def distance(self, xopt): self.moving_centered_pts, self.block_size) +class BundleCentroidsMinDistanceMetric(StreamlineDistanceMetric): + """ Bundle-based Minimum Distance aka BMD + + This is the cost function used by the StreamlineLinearRegistration + + Methods + ------- + setup(static, moving) + distance(xopt) + + References + ---------- + .. [Garyfallidis14] Garyfallidis et al., "Direct native-space fiber + bundle alignment for group comparisons", ISMRM, + 2014. + """ + + def setup(self, static, moving): + """ Setup static and moving sets of streamlines + + Parameters + ---------- + static : streamlines + Fixed or reference set of streamlines. + moving : streamlines + Moving streamlines. + + Notes + ----- + Call this after the object is initiated and before distance. + """ + + self._set_static_centroids(static) + self._set_moving_centroids(moving) + + def _set_static_centroids(self, static): + static_centered_pts, st_idx = unlist_streamlines(static[0]) + self.static_centered_pts = np.ascontiguousarray(static_centered_pts, + dtype=np.float64) + self.block_size = st_idx[0] + self.static_clusters_size = static[1] + + def _set_moving_centroids(self, moving): + self.moving_centered_pts, _ = unlist_streamlines(moving[0]) + self.moving_clusters_size = moving[1] + + + def distance(self, xopt): + """ Distance calculated from this Metric + + Parameters + ---------- + xopt : sequence + List of affine parameters as an 1D vector, + + """ + return bundle_min_distance_centroids_fast(xopt, + self.static_centered_pts, + self.moving_centered_pts, + self.static_clusters_size, + self.moving_clusters_size, + self.block_size) + class BundleMinDistanceStaticMetric(BundleMinDistanceMetric): def distance(self, xopt): @@ -298,34 +362,46 @@ def optimize(self, static, moving, mat=None): map : StreamlineRegistrationMap """ + if type(static) is tuple and type(moving) is tuple: + static_centroids = static[0] + static_cluster_sizes = static[1] + moving_centroids = moving[0] + moving_cluster_sizes = moving[1] + else: + static_centroids = static + moving_centroids = moving msg = 'need to have the same number of points. Use ' msg += 'set_number_of_points from dipy.tracking.streamline' - if not np.all(np.array(list(map(len, static))) == static[0].shape[0]): + if not np.all(np.array(list(map(len, static_centroids))) == static_centroids[0].shape[0]): raise ValueError('Static streamlines ' + msg) - if not np.all(np.array(list(map(len, moving))) == moving[0].shape[0]): + if not np.all(np.array(list(map(len, moving_centroids))) == moving_centroids[0].shape[0]): raise ValueError('Moving streamlines ' + msg) - if not np.all(np.array(list(map(len, moving))) == static[0].shape[0]): + if not np.all(np.array(list(map(len, moving_centroids))) == static_centroids[0].shape[0]): raise ValueError('Static and moving streamlines ' + msg) if mat is None: - static_centered, static_shift = center_streamlines(static) - moving_centered, moving_shift = center_streamlines(moving) + static_centered, static_shift = center_streamlines(static_centroids) + moving_centered, moving_shift = center_streamlines(moving_centroids) + static_mat = compose_matrix44([static_shift[0], static_shift[1], static_shift[2], 0, 0, 0]) moving_mat = compose_matrix44([-moving_shift[0], -moving_shift[1], -moving_shift[2], 0, 0, 0]) else: - static_centered = static - moving_centered = transform_streamlines(moving, mat) + static_centered = static_centroids + moving_centered = transform_streamlines(moving_centroids, mat) static_mat = np.eye(4) moving_mat = mat - self.metric.setup(static_centered, moving_centered) + if type(static) is tuple and type(moving) is tuple: + self.metric.setup((static_centered, static_cluster_sizes), (moving_centered, moving_cluster_sizes)) + else: + self.metric.setup(static_centered, moving_centered) distance = self.metric.distance @@ -338,7 +414,7 @@ def optimize(self, static, moving, mat=None): method=self.method, options=self.options, evolution=self.evolution) - if self.method == 'L-BFGS-B': + if self.method == 'L-BFGS-B' or self.method == 'centroids': if self.options is None: self.options = {'maxcor': 10, 'ftol': 1e-7, @@ -613,6 +689,68 @@ def bundle_min_distance_fast(t, static, moving, block_size): block_size) +def bundle_min_distance_centroids_fast(t, static, moving, + static_clusters_size, moving_clusters_size, + block_size): + """ MDF-based pairwise distance optimization function (MIN) + + We minimize the distance between moving streamlines as they align + with the static streamlines. + + Parameters + ----------- + t : array + 1D array. t is a vector of of affine transformation parameters with + size at least 6. + If size is 6, t is interpreted as translation + rotation. + If size is 7, t is interpreted as translation + rotation + + isotropic scaling. + If size is 12, t is interpreted as translation + rotation + + scaling + shearing. + + static : array + N*M x 3 array. All the points of the static streamlines. With order of + streamlines intact. Where N is the number of streamlines and M + is the number of points per streamline. + + moving : array + K*M x 3 array. All the points of the moving streamlines. With order of + streamlines intact. Where K is the number of streamlines and M + is the number of points per streamline. + + block_size : int + Number of points per streamline. All streamlines in static and moving + should have the same number of points M. + + Returns + ------- + cost: float + + Notes + ----- + This is a faster implementation of ``bundle_min_distance``, which requires + that all the points of each streamline are allocated into an ndarray + (of shape N*M by 3, with N the number of points per streamline and M the + number of streamlines). This can be done by calling + `dipy.tracking.streamlines.unlist_streamlines`. + + """ + + aff = compose_matrix44(t) + moving = np.dot(aff[:3, :3], moving.T).T + aff[:3, 3] + moving = np.ascontiguousarray(moving, dtype=np.float64) + + rows = static.shape[0] / block_size + cols = moving.shape[0] / block_size + + static_clusters_size = np.asarray(static_clusters_size, dtype=np.int32) + moving_clusters_size = np.asarray(moving_clusters_size, dtype=np.int32) + return _bundle_centroids_minimum_distance(static, moving, + static_clusters_size, + moving_clusters_size, + rows, cols, + block_size) + def bundle_min_distance_static_fast(t, static, moving, block_size): """ MDF-based pairwise distance optimization function (MIN) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index 3e2a6ee349..b5a1e277cc 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -10,7 +10,8 @@ from dipy.align.streamlinear import (StreamlineLinearRegistration, BundleMinDistanceMetric, BundleSumDistanceMatrixMetric, - BundleMinDistanceStaticMetric) + BundleMinDistanceStaticMetric, + BundleCentroidsMinDistanceMetric) from dipy.align.bundlemin import distance_matrix_mdf from time import time from itertools import chain @@ -134,7 +135,7 @@ def cluster_streamlines(self, clust_thr=15, nb_pts=20): thresholds = [40, 25, 20, clust_thr] merged_cluster_map = qbx_with_merge(self.streamlines, thresholds, - nb_pts, None, self.verbose) + nb_pts, 500000, self.verbose) self.cluster_map = merged_cluster_map self.centroids = merged_cluster_map.centroids @@ -207,7 +208,7 @@ def cluster_model_bundle(self, model_clust_thr, nb_pts=20): thresholds = [40, 25, 20, model_clust_thr] self.model_cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts, - select_randomly=500000, verbose=self.verbose) + select_randomly=50000, verbose=self.verbose) self.model_centroids = self.model_cluster_map.centroids self.nb_model_centroids = len(self.model_centroids) @@ -242,12 +243,21 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf'): close_clusters_indices = list(np.where(mins != np.inf)[0]) # set_trace() - + close_clusters_indices_tuple = [] # TODO overflow with the next line close_clusters = self.cluster_map[close_clusters_indices] + for i in close_clusters_indices: + close_clusters_indices_tuple.append((i, self.cluster_map.clusters_sizes()[i])) + sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) + + close_clusters_indices = [] + for j in range(len(sorted_tuple)): + if j < 600 and sorted_tuple[-(j+1)][1] > 10: + close_clusters_indices.append(sorted_tuple[-(j+1)][0]) close_centroids = [self.centroids[i] for i in close_clusters_indices] + close_indices = [cluster.indices for cluster in close_clusters] close_streamlines = ArraySequence(chain(*close_clusters)) @@ -255,6 +265,8 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf'): self.neighb_streamlines = close_streamlines self.neighb_clusters = close_clusters + + self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) self.neighb_centroids = close_centroids self.neighb_indices = close_indices @@ -289,37 +301,49 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, metric = BundleMinDistanceStaticMetric() if metric == 'diagonal': metric = BundleSumDistanceMatrixMetric() - - if x0 is None: - x0 = 'similarity' - - if bounds is None: - bounds = [(-30, 30), (-30, 30), (-30, 30), - (-45, 45), (-45, 45), (-45, 45), (0.8, 1.2)] - - if not use_centroids: + if metric == 'centroids': + metric = BundleCentroidsMinDistanceMetric() + moving = (self.neighb_centroids, np.asarray(self.neighb_clusters_size, dtype=np.int32)) + static = (self.model_centroids, np.asarray(self.model_cluster_map.clusters_sizes(), dtype=np.int32)) + else: static = select_random_set_of_streamlines(self.model_bundle, select_model) moving = select_random_set_of_streamlines(self.neighb_streamlines, select_target) - static = set_number_of_points(static, nb_pts) moving = set_number_of_points(moving, nb_pts) - else: - static = self.model_centroids + if x0 is None: + x0 = 'similarity' - thresholds = [40, 30, 20, 10, 5] - cluster_map = qbx_with_merge(moving_all, thresholds, nb_pts=nb_pts, - select_randomly=500000, verbose=self.verbose) + if bounds is None: + bounds = [(-30, 30), (-30, 30), (-30, 30), + (-45, 45), (-45, 45), (-45, 45), (0.8, 1.2)] - moving = cluster_map.centroids + #if not use_centroids: + # static = select_random_set_of_streamlines(self.model_bundle, + # select_model) + # moving = select_random_set_of_streamlines(self.neighb_streamlines, + # select_target) +# + # static = set_number_of_points(static, nb_pts) + # moving = set_number_of_points(moving, nb_pts) +# + #else: + # static = self.model_centroids +# + # thresholds = [40, 30, 20, 10, 5] + # cluster_map = qbx_with_merge(moving_all, thresholds, nb_pts=nb_pts, + # select_randomly=500000, verbose=self.verbose) +# + # moving = cluster_map.centroids if progressive == False: slr = StreamlineLinearRegistration(metric=metric, x0=x0, bounds=bounds, method=method) + #slm = slr.optimize(self.model_cluster_map, self.neighb_centroids) slm = slr.optimize(static, moving) if progressive == True: @@ -396,12 +420,21 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, self.slr_bmd = slm.fopt self.slr_iterations = slm.iterations - self.slr_initial_matrix = distance_matrix_mdf( - static, moving) + if type(static) is tuple and type(moving) is tuple: + self.slr_initial_matrix = distance_matrix_mdf( + static[0], moving[0]) + + self.slr_final_matrix = distance_matrix_mdf( + static[0], transform_streamlines(moving[0], slm.matrix)) + self.slr_xopt = slm.xopt + else: + self.slr_initial_matrix = distance_matrix_mdf( + static, moving) + + self.slr_final_matrix = distance_matrix_mdf( + static, transform_streamlines(moving, slm.matrix)) + self.slr_xopt = slm.xopt - self.slr_final_matrix = distance_matrix_mdf( - static, transform_streamlines(moving, slm.matrix)) - self.slr_xopt = slm.xopt if self.verbose: print(' Square-root of BMD is %.3f' % (np.sqrt(self.slr_bmd),)) diff --git a/dipy/segment/clustering.py b/dipy/segment/clustering.py index 04065f3986..9072ee382d 100644 --- a/dipy/segment/clustering.py +++ b/dipy/segment/clustering.py @@ -6,6 +6,7 @@ from dipy.segment.metric import Metric from dipy.segment.metric import ResampleFeature from dipy.segment.metric import AveragePointwiseEuclideanMetric +from dipy.tracking.streamline import set_number_of_points class Identity: @@ -580,7 +581,6 @@ def cluster(self, streamlines, ordering=None): cluster_map.refdata = streamlines return cluster_map - class QuickBundlesOnline(Clustering): r""" Clusters streamlines using QuickBundles [Garyfallidis12]_. @@ -692,7 +692,6 @@ def cluster(self, streamline, idx=None): self._dirty = True return cluster_id - class QuickBundlesX(Clustering): r""" Clusters streamlines using QuickBundlesX. @@ -751,6 +750,7 @@ def cluster(self, streamlines, ordering=None): #cluster_map.refdata = streamlines return qbx +<<<<<<< HEAD class QuickBundlesXOnline(Clustering): r""" Clusters streamlines using QuickBundlesX. @@ -835,3 +835,39 @@ def cluster(self, streamline, idx=None): self._qbx_state, path = self._cluster_fct(streamline, self._idx) self._dirty = True return path +======= +def qbx_with_merge(streamlines, thresholds, nb_pts=20): + + sample_streamlines = set_number_of_points(streamlines, nb_pts) + + qbx = QuickBundlesX(thresholds, + metric=AveragePointwiseEuclideanMetric()) + + qbx_clusters = qbx.cluster(sample_streamlines) + + qbx_merge = QuickBundlesX([thresholds[-1]], + metric=AveragePointwiseEuclideanMetric()) + + final_level = len(thresholds) + + qbx_ordering_final = np.random.choice( + len(qbx_clusters.get_clusters(final_level)), + len(qbx_clusters.get_clusters(final_level)), replace=False) + + qbx_merged_cluster_map = qbx_merge.cluster( + qbx_clusters.get_clusters(final_level).centroids, + ordering=qbx_ordering_final).get_clusters(1) + + qbx_cluster_map = qbx_clusters.get_clusters(final_level) + + merged_cluster_map = ClusterMapCentroid() + for cluster in qbx_merged_cluster_map: + merged_cluster = ClusterCentroid(centroid=cluster.centroid) + for i in cluster.indices: + merged_cluster.indices.extend(qbx_cluster_map[i].indices) + merged_cluster_map.add_cluster(merged_cluster) + + merged_cluster_map.refdata = streamlines + + return merged_cluster_map +>>>>>>> First experimentation with centroids weigthed by their size, new distance metric From 6f51faca0ec0e3477fe8713aab05bf0ad7db815c Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Thu, 24 Mar 2016 19:16:35 -0400 Subject: [PATCH 02/12] Change function descriptions/variables/return for new metric --- dipy/align/bundlemin.pyx | 22 +++++++++++++--------- dipy/align/streamlinear.py | 34 +++++++++++++++++++++++----------- dipy/segment/bundles.py | 25 ++++--------------------- 3 files changed, 40 insertions(+), 41 deletions(-) diff --git a/dipy/align/bundlemin.pyx b/dipy/align/bundlemin.pyx index 28aeb876bd..de70809d02 100644 --- a/dipy/align/bundlemin.pyx +++ b/dipy/align/bundlemin.pyx @@ -215,21 +215,25 @@ def _bundle_centroids_minimum_distance(double [:, ::1] stat, cnp.npy_intp rows): """ MDF-based pairwise distance optimization function - We minimize the distance between moving streamlines of the same number of - points as they align with the static streamlines. + We minimize the distance between moving centroids of the same number of + points as they align with the static centroids. Parameters ----------- - static : array - Static streamlines - moving : array - Moving streamlines + stat : array + Static centroids + mov : array + Moving centroids + stat_clus_size : array + Size of clusters + mov_clus_size : array + Size of clusters static_size : int - Number of static streamlines + Number of static centroids moving_size : int - Number of moving streamlines + Number of moving centroids rows : int - Number of points per streamline + Number of points per centroids Returns ------- diff --git a/dipy/align/streamlinear.py b/dipy/align/streamlinear.py index 4a68c14b72..0ca67789cd 100644 --- a/dipy/align/streamlinear.py +++ b/dipy/align/streamlinear.py @@ -12,7 +12,7 @@ set_number_of_points, select_random_set_of_streamlines, length) -from dipy.segment.clustering import QuickBundles, qbx_with_merge +from dipy.segment.clustering import QuickBundles from dipy.core.geometry import (compose_transformations, compose_matrix, decompose_matrix) @@ -105,6 +105,7 @@ def distance(self, xopt): class BundleCentroidsMinDistanceMetric(StreamlineDistanceMetric): """ Bundle-based Minimum Distance aka BMD + Using centroids and their size as weight This is the cost function used by the StreamlineLinearRegistration @@ -125,9 +126,9 @@ def setup(self, static, moving): Parameters ---------- - static : streamlines + static : tuple - (centroids, cluster sizes) Fixed or reference set of streamlines. - moving : streamlines + moving : tuple - (centroids, cluster sizes) Moving streamlines. Notes @@ -347,9 +348,9 @@ def optimize(self, static, moving, mat=None): Parameters ---------- - static : streamlines + static : streamlines OR tuple - (centroids, weight) Reference or fixed set of streamlines. - moving : streamlines + moving : streamlines OR tuple - (centroids, weight) Moving set of streamlines. mat : array Transformation (4, 4) matrix to start the registration. ``mat`` @@ -362,6 +363,8 @@ def optimize(self, static, moving, mat=None): map : StreamlineRegistrationMap """ + #If we are using centroids and their weight, static/moving are tuple + #The function only need the centroids if type(static) is tuple and type(moving) is tuple: static_centroids = static[0] static_cluster_sizes = static[1] @@ -709,14 +712,23 @@ def bundle_min_distance_centroids_fast(t, static, moving, scaling + shearing. static : array - N*M x 3 array. All the points of the static streamlines. With order of - streamlines intact. Where N is the number of streamlines and M - is the number of points per streamline. + N*M x 3 array. All the points of the static centroids. With order of + centroids intact. Where N is the number of centroids and M + is the number of points per centroids. moving : array - K*M x 3 array. All the points of the moving streamlines. With order of - streamlines intact. Where K is the number of streamlines and M - is the number of points per streamline. + K*M x 3 array. All the points of the moving centroids. With order of + centroids intact. Where K is the number of centroids and M + is the number of points per centroids. + + + static_clusters_size : array + An array of size K. Int representing the size of the cluster associated + with their respective centroid (static) + + moving_clusters_size : array + An array of size K. Int representing the size of the cluster associated + with their respective centroid (moving) block_size : int Number of points per streamline. All streamlines in static and moving diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index b5a1e277cc..c12097d910 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -251,6 +251,7 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf'): sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) close_clusters_indices = [] + #Only keep the cluster first 600 clusters that are big enough for j in range(len(sorted_tuple)): if j < 600 and sorted_tuple[-(j+1)][1] > 10: close_clusters_indices.append(sorted_tuple[-(j+1)][0]) @@ -320,30 +321,12 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, bounds = [(-30, 30), (-30, 30), (-30, 30), (-45, 45), (-45, 45), (-45, 45), (0.8, 1.2)] - #if not use_centroids: - # static = select_random_set_of_streamlines(self.model_bundle, - # select_model) - # moving = select_random_set_of_streamlines(self.neighb_streamlines, - # select_target) -# - # static = set_number_of_points(static, nb_pts) - # moving = set_number_of_points(moving, nb_pts) -# - #else: - # static = self.model_centroids -# - # thresholds = [40, 30, 20, 10, 5] - # cluster_map = qbx_with_merge(moving_all, thresholds, nb_pts=nb_pts, - # select_randomly=500000, verbose=self.verbose) -# - # moving = cluster_map.centroids - if progressive == False: slr = StreamlineLinearRegistration(metric=metric, x0=x0, bounds=bounds, method=method) - #slm = slr.optimize(self.model_cluster_map, self.neighb_centroids) + slm = slr.optimize(static, moving) if progressive == True: @@ -413,13 +396,13 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, raise ValueError('Incorrect SLR transform') self.transf_streamlines = self.neighb_streamlines.copy() self.transf_streamlines._data = apply_affine(slm.matrix, self.transf_streamlines._data) - #self.transf_streamlines = transform_streamlines( - # self.neighb_streamlines, slm.matrix) self.transf_matrix = slm.matrix self.slr_bmd = slm.fopt self.slr_iterations = slm.iterations + #If we are using centroids and their weight, static/moving are tuple + #The function only need the centroids if type(static) is tuple and type(moving) is tuple: self.slr_initial_matrix = distance_matrix_mdf( static[0], moving[0]) From 72718304cff92e560faa5ef65772a4205b438c99 Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Thu, 24 Mar 2016 23:17:35 -0400 Subject: [PATCH 03/12] Create structures according to metric --- dipy/segment/bundles.py | 108 ++++++++++++++++++++++++++++------------ 1 file changed, 76 insertions(+), 32 deletions(-) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index c12097d910..e5b4a39649 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -167,10 +167,15 @@ def recognize(self, model_bundle, model_clust_thr, print('## Recognize given bundle ## \n') self.model_bundle = model_bundle - self.cluster_model_bundle(model_clust_thr=model_clust_thr) + self.cluster_model_bundle(model_clust_thr=model_clust_thr, metric=slr_metric) + success = self.reduce_search_space( reduction_thr=reduction_thr, - reduction_distance=reduction_distance) + reduction_distance=reduction_distance, + metric=slr_metric + + ) + if not success: self.pruned_streamlines = None self.transf_streamlines = None @@ -197,7 +202,7 @@ def recognize(self, model_bundle, model_clust_thr, % (time()-t,)) return self.pruned_streamlines - def cluster_model_bundle(self, model_clust_thr, nb_pts=20): + def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None): self.model_clust_thr = model_clust_thr t = time() if self.verbose: @@ -209,15 +214,37 @@ def cluster_model_bundle(self, model_clust_thr, nb_pts=20): self.model_cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts, select_randomly=50000, verbose=self.verbose) - self.model_centroids = self.model_cluster_map.centroids - self.nb_model_centroids = len(self.model_centroids) + + # TODO overflow with the next line + if metric == 'centroids': + close_clusters_indices_tuple = [] + cluster_sizes = self.model_cluster_map.clusters_sizes() + for i in range(len(self.model_cluster_map.centroids)): + close_clusters_indices_tuple.append((i, cluster_sizes[i])) + sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) + + valid_centroids = [] + #Only keep the cluster first 600 clusters that are big enough + for j in range(len(sorted_tuple)): + if j < 400 and sorted_tuple[-(j+1)][1] > 10: + valid_centroids.append(sorted_tuple[-(j+1)][0]) + + self.model_centroids = [self.model_cluster_map.centroids[i] + for i in valid_centroids] + self.nb_model_centroids = len(self.model_centroids) + self.model_clusters_sizes = [cluster_sizes[i] + for i in valid_centroids] + else: + self.model_centroids = self.model_cluster_map.centroids + self.nb_model_centroids = len(self.model_centroids) + self.model_clusters_sizes = self.model_cluster_map.clusters_sizes() if self.verbose: print(' Model bundle has %d centroids' % (self.nb_model_centroids,)) print(' Duration %0.3f sec. \n' % (time() - t, )) - def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf'): + def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf', metric=None): t = time() if self.verbose: print('# Reduce search space') @@ -241,37 +268,56 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf'): mins = np.min(centroid_matrix, axis=0) close_clusters_indices = list(np.where(mins != np.inf)[0]) - # set_trace() - close_clusters_indices_tuple = [] - # TODO overflow with the next line - close_clusters = self.cluster_map[close_clusters_indices] - for i in close_clusters_indices: - close_clusters_indices_tuple.append((i, self.cluster_map.clusters_sizes()[i])) - sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) + if metric == 'centroids': + close_clusters_indices_tuple = [] + # TODO overflow with the next line + close_clusters = self.cluster_map[close_clusters_indices] + for i in close_clusters_indices: + close_clusters_indices_tuple.append((i, self.cluster_map.clusters_sizes()[i])) + sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) + + close_clusters_indices = [] + #Only keep the cluster first 600 clusters that are big enough + for j in range(len(sorted_tuple)): + if j < 600 and sorted_tuple[-(j+1)][1] > 10: + close_clusters_indices.append(sorted_tuple[-(j+1)][0]) + + close_clusters=self.cluster_map[close_clusters_indices] + close_centroids = [self.centroids[i] + for i in close_clusters_indices] - close_clusters_indices = [] - #Only keep the cluster first 600 clusters that are big enough - for j in range(len(sorted_tuple)): - if j < 600 and sorted_tuple[-(j+1)][1] > 10: - close_clusters_indices.append(sorted_tuple[-(j+1)][0]) + close_indices = [cluster.indices for cluster in close_clusters] - close_centroids = [self.centroids[i] - for i in close_clusters_indices] + close_streamlines = ArraySequence(chain(*close_clusters)) + self.centroid_matrix = centroid_matrix.copy() - close_indices = [cluster.indices for cluster in close_clusters] + self.neighb_streamlines = close_streamlines + self.neighb_clusters = close_clusters - close_streamlines = ArraySequence(chain(*close_clusters)) - self.centroid_matrix = centroid_matrix.copy() + self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) + self.neighb_centroids = close_centroids + self.neighb_indices = close_indices + + self.nb_neighb_streamlines = len(self.neighb_streamlines) + else: + close_clusters=self.cluster_map[close_clusters_indices] + close_centroids = [self.centroids[i] + for i in close_clusters_indices] - self.neighb_streamlines = close_streamlines - self.neighb_clusters = close_clusters + close_indices = [cluster.indices for cluster in close_clusters] - self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) - self.neighb_centroids = close_centroids - self.neighb_indices = close_indices + close_streamlines = ArraySequence(chain(*close_clusters)) + self.centroid_matrix = centroid_matrix.copy() - self.nb_neighb_streamlines = len(self.neighb_streamlines) + self.neighb_streamlines = close_streamlines + self.neighb_clusters = close_clusters + + self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) + self.neighb_centroids = close_centroids + self.neighb_indices = close_indices + + self.nb_neighb_streamlines = len(self.neighb_streamlines) if self.nb_neighb_streamlines == 0: print(' You have no neighbor streamlines... No bundle recognition') @@ -295,7 +341,6 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, print('# Local SLR of neighb_streamlines to model') t = time() - if metric is None or metric == 'symmetric': metric = BundleMinDistanceMetric() if metric == 'asymmetric': @@ -305,7 +350,7 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, if metric == 'centroids': metric = BundleCentroidsMinDistanceMetric() moving = (self.neighb_centroids, np.asarray(self.neighb_clusters_size, dtype=np.int32)) - static = (self.model_centroids, np.asarray(self.model_cluster_map.clusters_sizes(), dtype=np.int32)) + static = (self.model_centroids, np.asarray(self.model_clusters_sizes, dtype=np.int32)) else: static = select_random_set_of_streamlines(self.model_bundle, select_model) @@ -322,7 +367,6 @@ def register_neighb_to_model(self, metric=None, x0=None, bounds=None, (-45, 45), (-45, 45), (-45, 45), (0.8, 1.2)] if progressive == False: - slr = StreamlineLinearRegistration(metric=metric, x0=x0, bounds=bounds, method=method) From d9197895e6e528c0b6a5e94ef45645488e09bdac Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Sat, 2 Apr 2016 08:56:40 -0400 Subject: [PATCH 04/12] Testing and refactoring the metric 'centroids' --- dipy/segment/bundles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index e5b4a39649..ee60ed76e2 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -132,7 +132,7 @@ def cluster_streamlines(self, clust_thr=15, nb_pts=20): print(' Size is %0.3f MB' % (nbytes(self.streamlines),)) print(' Distance threshold %0.3f' % (clust_thr,)) - thresholds = [40, 25, 20, clust_thr] + thresholds = [40, 30, 20, clust_thr] merged_cluster_map = qbx_with_merge(self.streamlines, thresholds, nb_pts, 500000, self.verbose) From 8fa389e1085078aed0fc437181fbee38a9e00300 Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Wed, 6 Apr 2016 13:54:29 -0400 Subject: [PATCH 05/12] Removed duplicated lines of code, lines that are common for all metrics --- dipy/segment/bundles.py | 43 ++++++++++---------------------------- dipy/segment/clustering.py | 5 +---- 2 files changed, 12 insertions(+), 36 deletions(-) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index ee60ed76e2..e137e02dbc 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -215,7 +215,6 @@ def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None): self.model_cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts, select_randomly=50000, verbose=self.verbose) - # TODO overflow with the next line if metric == 'centroids': close_clusters_indices_tuple = [] cluster_sizes = self.model_cluster_map.clusters_sizes() @@ -224,7 +223,7 @@ def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None): sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) valid_centroids = [] - #Only keep the cluster first 600 clusters that are big enough + #Only keep the cluster first 400 clusters that are big enough for j in range(len(sorted_tuple)): if j < 400 and sorted_tuple[-(j+1)][1] > 10: valid_centroids.append(sorted_tuple[-(j+1)][0]) @@ -268,11 +267,9 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf', metric mins = np.min(centroid_matrix, axis=0) close_clusters_indices = list(np.where(mins != np.inf)[0]) - # set_trace() + if metric == 'centroids': close_clusters_indices_tuple = [] - # TODO overflow with the next line - close_clusters = self.cluster_map[close_clusters_indices] for i in close_clusters_indices: close_clusters_indices_tuple.append((i, self.cluster_map.clusters_sizes()[i])) sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) @@ -287,37 +284,19 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf', metric close_centroids = [self.centroids[i] for i in close_clusters_indices] - close_indices = [cluster.indices for cluster in close_clusters] - - close_streamlines = ArraySequence(chain(*close_clusters)) - self.centroid_matrix = centroid_matrix.copy() - - self.neighb_streamlines = close_streamlines - self.neighb_clusters = close_clusters - - self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) - self.neighb_centroids = close_centroids - self.neighb_indices = close_indices - - self.nb_neighb_streamlines = len(self.neighb_streamlines) - else: - close_clusters=self.cluster_map[close_clusters_indices] - close_centroids = [self.centroids[i] - for i in close_clusters_indices] - - close_indices = [cluster.indices for cluster in close_clusters] + close_indices = [cluster.indices for cluster in close_clusters] - close_streamlines = ArraySequence(chain(*close_clusters)) - self.centroid_matrix = centroid_matrix.copy() + close_streamlines = ArraySequence(chain(*close_clusters)) + self.centroid_matrix = centroid_matrix.copy() - self.neighb_streamlines = close_streamlines - self.neighb_clusters = close_clusters + self.neighb_streamlines = close_streamlines + self.neighb_clusters = close_clusters - self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) - self.neighb_centroids = close_centroids - self.neighb_indices = close_indices + self.neighb_clusters_size = np.array([len(c) for c in self.neighb_clusters], dtype=np.int16) + self.neighb_centroids = close_centroids + self.neighb_indices = close_indices - self.nb_neighb_streamlines = len(self.neighb_streamlines) + self.nb_neighb_streamlines = len(self.neighb_streamlines) if self.nb_neighb_streamlines == 0: print(' You have no neighbor streamlines... No bundle recognition') diff --git a/dipy/segment/clustering.py b/dipy/segment/clustering.py index 9072ee382d..4e4388b16d 100644 --- a/dipy/segment/clustering.py +++ b/dipy/segment/clustering.py @@ -750,8 +750,6 @@ def cluster(self, streamlines, ordering=None): #cluster_map.refdata = streamlines return qbx -<<<<<<< HEAD - class QuickBundlesXOnline(Clustering): r""" Clusters streamlines using QuickBundlesX. @@ -835,7 +833,7 @@ def cluster(self, streamline, idx=None): self._qbx_state, path = self._cluster_fct(streamline, self._idx) self._dirty = True return path -======= + def qbx_with_merge(streamlines, thresholds, nb_pts=20): sample_streamlines = set_number_of_points(streamlines, nb_pts) @@ -870,4 +868,3 @@ def qbx_with_merge(streamlines, thresholds, nb_pts=20): merged_cluster_map.refdata = streamlines return merged_cluster_map ->>>>>>> First experimentation with centroids weigthed by their size, new distance metric From e857aaaecd2b0ccff63096744b62e03c85ddbe5d Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Thu, 7 Apr 2016 16:02:44 -0400 Subject: [PATCH 06/12] Fix indentation for the Symmetric Metric --- dipy/segment/bundles.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index e137e02dbc..143ca9b66e 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -280,8 +280,8 @@ def reduce_search_space(self, reduction_thr=20, reduction_distance='mdf', metric if j < 600 and sorted_tuple[-(j+1)][1] > 10: close_clusters_indices.append(sorted_tuple[-(j+1)][0]) - close_clusters=self.cluster_map[close_clusters_indices] - close_centroids = [self.centroids[i] + close_clusters=self.cluster_map[close_clusters_indices] + close_centroids = [self.centroids[i] for i in close_clusters_indices] close_indices = [cluster.indices for cluster in close_clusters] From 67bc367b16018634d834b40cf13fbae247a5f4ec Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Wed, 3 Aug 2016 08:33:16 -0400 Subject: [PATCH 07/12] Deterministic QBx --- dipy/segment/bundles.py | 110 +++++++++---------------------------- dipy/segment/clustering.py | 2 +- 2 files changed, 27 insertions(+), 85 deletions(-) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index 143ca9b66e..da575443dd 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -2,7 +2,7 @@ from dipy.tracking.streamline import (transform_streamlines, set_number_of_points, length, select_random_set_of_streamlines) -from dipy.segment.clustering import (QuickBundlesX, +from dipy.segment.clustering import (qbx_with_merge, ClusterMapCentroid, ClusterCentroid, AveragePointwiseEuclideanMetric) from dipy.tracking.distances import (bundles_distances_mdf, @@ -32,67 +32,6 @@ def nbytes(streamlines): return streamlines._data.nbytes / 1024. ** 2 -def qbx_with_merge(streamlines, thresholds, - nb_pts=20, select_randomly=None, verbose=True): - - t = time() - len_s = len(streamlines) - if select_randomly is None: - select_randomly = len_s - indices = np.random.choice(len_s, min(select_randomly, len_s), - replace=False) - sample_streamlines = set_number_of_points(streamlines, nb_pts) - - if verbose: - print(' Resampled to {} points'.format(nb_pts)) - print(' Size is %0.3f MB' % (nbytes(sample_streamlines),)) - print(' Duration of resampling is %0.3f sec.' % (time() - t,)) - - if verbose: - print(' QBX phase starting...') - - qbx = QuickBundlesX(thresholds, - metric=AveragePointwiseEuclideanMetric()) - - t1 = time() - qbx_clusters = qbx.cluster(sample_streamlines, ordering=indices) - - if verbose: - print(' Merging phase starting ...') - - qbx_merge = QuickBundlesX([thresholds[-1]], - metric=AveragePointwiseEuclideanMetric()) - - final_level = len(thresholds) - - qbx_ordering_final = np.random.choice( - len(qbx_clusters.get_clusters(final_level)), - len(qbx_clusters.get_clusters(final_level)), replace=False) - - qbx_merged_cluster_map = qbx_merge.cluster( - qbx_clusters.get_clusters(final_level).centroids, - ordering=qbx_ordering_final).get_clusters(1) - - qbx_cluster_map = qbx_clusters.get_clusters(final_level) - - merged_cluster_map = ClusterMapCentroid() - for cluster in qbx_merged_cluster_map: - merged_cluster = ClusterCentroid(centroid=cluster.centroid) - for i in cluster.indices: - merged_cluster.indices.extend(qbx_cluster_map[i].indices) - merged_cluster_map.add_cluster(merged_cluster) - - merged_cluster_map.refdata = streamlines - - if verbose: - print(' QuickBundlesX time for %d random streamlines' - % (select_randomly,)) - - print(' Duration %0.3f sec. \n' % (time() - t1, )) - - return merged_cluster_map - - class RecoBundles(object): def __init__(self, streamlines, cluster_map=None, clust_thr=15, @@ -167,7 +106,7 @@ def recognize(self, model_bundle, model_clust_thr, print('## Recognize given bundle ## \n') self.model_bundle = model_bundle - self.cluster_model_bundle(model_clust_thr=model_clust_thr, metric=slr_metric) + self.cluster_model_bundle(model_clust_thr=model_clust_thr, metric=slr_metric, min_cluster_size=5) success = self.reduce_search_space( reduction_thr=reduction_thr, @@ -202,7 +141,7 @@ def recognize(self, model_bundle, model_clust_thr, % (time()-t,)) return self.pruned_streamlines - def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None): + def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None, min_cluster_size=10): self.model_clust_thr = model_clust_thr t = time() if self.verbose: @@ -210,29 +149,32 @@ def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None): print(' Model bundle has %d streamlines' % (len(self.model_bundle), )) print(' Distance threshold %0.3f' % (model_clust_thr,)) - thresholds = [40, 25, 20, model_clust_thr] + thresholds = [20, 15, 10, model_clust_thr] self.model_cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts, - select_randomly=50000, verbose=self.verbose) + select_randomly=None, verbose=self.verbose) if metric == 'centroids': - close_clusters_indices_tuple = [] - cluster_sizes = self.model_cluster_map.clusters_sizes() - for i in range(len(self.model_cluster_map.centroids)): - close_clusters_indices_tuple.append((i, cluster_sizes[i])) - sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) - - valid_centroids = [] - #Only keep the cluster first 400 clusters that are big enough - for j in range(len(sorted_tuple)): - if j < 400 and sorted_tuple[-(j+1)][1] > 10: - valid_centroids.append(sorted_tuple[-(j+1)][0]) - - self.model_centroids = [self.model_cluster_map.centroids[i] - for i in valid_centroids] - self.nb_model_centroids = len(self.model_centroids) - self.model_clusters_sizes = [cluster_sizes[i] - for i in valid_centroids] + self.model_centroids = set_number_of_points(self.model_bundle, nb_pts) + self.nb_model_centroids = len(self.model_bundle) + self.model_clusters_sizes = [1 for i in range(len(self.model_bundle))] + #close_clusters_indices_tuple = [] + #cluster_sizes = self.model_cluster_map.clusters_sizes() + #for i in range(len(self.model_cluster_map.centroids)): + # close_clusters_indices_tuple.append((i, cluster_sizes[i])) + #sorted_tuple = sorted(close_clusters_indices_tuple, key=lambda size: size[1]) +# + #valid_centroids = [] + ##Only keep the cluster first 400 clusters that are big enough + #for j in range(len(sorted_tuple)): + # if j < 400 and sorted_tuple[-(j+1)][1] > min_cluster_size: + # valid_centroids.append(sorted_tuple[-(j+1)][0]) +# + #self.model_centroids = [self.model_cluster_map.centroids[i] + # for i in valid_centroids] + #self.nb_model_centroids = len(self.model_centroids) + #self.model_clusters_sizes = [cluster_sizes[i] + # for i in valid_centroids] else: self.model_centroids = self.model_cluster_map.centroids self.nb_model_centroids = len(self.model_centroids) @@ -468,7 +410,7 @@ def prune_what_not_in_model(self, mdf_thr=5, pruning_thr=10, thresholds = [40, 30, 20, 10, mdf_thr] self.rtransf_cluster_map = qbx_with_merge(self.transf_streamlines, thresholds, nb_pts=20, - select_randomly=500000, verbose=self.verbose) + select_randomly=None, verbose=self.verbose) if self.verbose: print(' QB Duration %0.3f sec. \n' % (time() - t, )) diff --git a/dipy/segment/clustering.py b/dipy/segment/clustering.py index 4e4388b16d..444998f733 100644 --- a/dipy/segment/clustering.py +++ b/dipy/segment/clustering.py @@ -847,7 +847,7 @@ def qbx_with_merge(streamlines, thresholds, nb_pts=20): metric=AveragePointwiseEuclideanMetric()) final_level = len(thresholds) - + np.random.seed(0) qbx_ordering_final = np.random.choice( len(qbx_clusters.get_clusters(final_level)), len(qbx_clusters.get_clusters(final_level)), replace=False) From 19932e3162ae82576145a845d70f0f3d9b9cf7dc Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Thu, 4 Aug 2016 05:24:25 -0400 Subject: [PATCH 08/12] Removed old QBx call --- dipy/segment/bundles.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/dipy/segment/bundles.py b/dipy/segment/bundles.py index da575443dd..c2a6ce800e 100644 --- a/dipy/segment/bundles.py +++ b/dipy/segment/bundles.py @@ -73,8 +73,7 @@ def cluster_streamlines(self, clust_thr=15, nb_pts=20): thresholds = [40, 30, 20, clust_thr] - merged_cluster_map = qbx_with_merge(self.streamlines, thresholds, - nb_pts, 500000, self.verbose) + merged_cluster_map = qbx_with_merge(self.streamlines, thresholds) self.cluster_map = merged_cluster_map self.centroids = merged_cluster_map.centroids @@ -151,8 +150,7 @@ def cluster_model_bundle(self, model_clust_thr, nb_pts=20, metric=None, min_clus print(' Distance threshold %0.3f' % (model_clust_thr,)) thresholds = [20, 15, 10, model_clust_thr] - self.model_cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts, - select_randomly=None, verbose=self.verbose) + self.model_cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts) if metric == 'centroids': self.model_centroids = set_number_of_points(self.model_bundle, nb_pts) @@ -409,8 +407,7 @@ def prune_what_not_in_model(self, mdf_thr=5, pruning_thr=10, t = time() thresholds = [40, 30, 20, 10, mdf_thr] - self.rtransf_cluster_map = qbx_with_merge(self.transf_streamlines, thresholds, nb_pts=20, - select_randomly=None, verbose=self.verbose) + self.rtransf_cluster_map = qbx_with_merge(self.transf_streamlines, thresholds, nb_pts=20) if self.verbose: print(' QB Duration %0.3f sec. \n' % (time() - t, )) @@ -482,8 +479,7 @@ def __init__(self, streamlines, labels): def build_kdtree(self, nb_pts=20, mdf_thr=10, mam_metric='mdf', leaf_size=10): thresholds = [40, 30, 20, 10, mdf_thr] - cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts, - select_randomly=500000, verbose=self.verbose) + cluster_map = qbx_with_merge(self.model_bundle, thresholds, nb_pts=nb_pts) print('Number of centroids %d' % (len(cluster_map.centroids),)) From 2466efbd6373399a3f44f6f38449e84af24a7e37 Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Fri, 26 Aug 2016 03:56:13 -0400 Subject: [PATCH 09/12] Fix for FVTK --- .gitignore | 3 ++- dipy/segment/clustering.py | 2 +- dipy/viz/fvtk.py | 10 +++++++--- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 6486dba2a3..12a3069227 100644 --- a/.gitignore +++ b/.gitignore @@ -26,4 +26,5 @@ nibabel*egg/ pyx-stamps __config__.py .DS_Store -.eggs \ No newline at end of file +.eggs +*.idea* diff --git a/dipy/segment/clustering.py b/dipy/segment/clustering.py index 444998f733..1acdc366a4 100644 --- a/dipy/segment/clustering.py +++ b/dipy/segment/clustering.py @@ -847,7 +847,7 @@ def qbx_with_merge(streamlines, thresholds, nb_pts=20): metric=AveragePointwiseEuclideanMetric()) final_level = len(thresholds) - np.random.seed(0) + #np.random.seed(0) qbx_ordering_final = np.random.choice( len(qbx_clusters.get_clusters(final_level)), len(qbx_clusters.get_clusters(final_level)), replace=False) diff --git a/dipy/viz/fvtk.py b/dipy/viz/fvtk.py index 30ebed9e90..5c8a5b15d6 100644 --- a/dipy/viz/fvtk.py +++ b/dipy/viz/fvtk.py @@ -163,10 +163,13 @@ def point(points, colors, opacity=1, point_radius=0.1, theta=8, phi=8): pts = vtk.vtkPoints() cnt_colors = 0 - + polys = vtk.vtkCellArray() for p in points: - pts.InsertNextPoint(p[0], p[1], p[2]) + id = pts.InsertNextPoint(p[0], p[1], p[2]) + polys.InsertNextCell(1) + polys.InsertCellPoint(id) + scalars.InsertNextTuple3( round(255 * colors[cnt_colors][0]), round(255 * colors[cnt_colors][1]), round(255 * colors[cnt_colors][2])) cnt_colors += 1 @@ -178,6 +181,7 @@ def point(points, colors, opacity=1, point_radius=0.1, theta=8, phi=8): polyData = vtk.vtkPolyData() polyData.SetPoints(pts) + polyData.SetPolys(polys) polyData.GetPointData().SetScalars(scalars) glyph = vtk.vtkGlyph3D() @@ -193,7 +197,7 @@ def point(points, colors, opacity=1, point_radius=0.1, theta=8, phi=8): if major_version <= 5: mapper.SetInput(glyph.GetOutput()) else: - mapper.SetInputData(glyph.GetOutput()) + mapper.SetInputConnection(glyph.GetOutputPort()) actor = vtk.vtkActor() actor.SetMapper(mapper) actor.GetProperty().SetOpacity(opacity) From 866602d0d213575f02dd777b7ad54db6f0cb3f98 Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Tue, 1 Nov 2016 17:56:58 -0400 Subject: [PATCH 10/12] Correction after rebase --- dipy/align/streamlinear.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/dipy/align/streamlinear.py b/dipy/align/streamlinear.py index 0ca67789cd..5fccbfc6ba 100644 --- a/dipy/align/streamlinear.py +++ b/dipy/align/streamlinear.py @@ -12,7 +12,7 @@ set_number_of_points, select_random_set_of_streamlines, length) -from dipy.segment.clustering import QuickBundles +from dipy.segment.clustering import QuickBundles, qbx_with_merge from dipy.core.geometry import (compose_transformations, compose_matrix, decompose_matrix) @@ -953,10 +953,9 @@ def check_range(streamline, gt=greater_than, lt=less_than): else: rstreamlines1 = streamlines1 - rstreamlines1 = set_number_of_points(rstreamlines1, nb_pts) - qb1 = QuickBundles(threshold=qb_thr) - rstreamlines1 = [s.astype('f4') for s in rstreamlines1] - cluster_map1 = qb1.cluster(rstreamlines1) + thresholds = [40, 30, 20, qb_thr] + + cluster_map1 = qbx_with_merge(rstreamlines1, thresholds, nb_pts=nb_pts) clusters1 = remove_clusters_by_size(cluster_map1, rm_small_clusters) qb_centroids1 = [cluster.centroid for cluster in clusters1] @@ -966,10 +965,7 @@ def check_range(streamline, gt=greater_than, lt=less_than): else: rstreamlines2 = streamlines2 - rstreamlines2 = set_number_of_points(rstreamlines2, nb_pts) - qb2 = QuickBundles(threshold=qb_thr) - rstreamlines2 = [s.astype('f4') for s in rstreamlines2] - cluster_map2 = qb2.cluster(rstreamlines2) + cluster_map2 = qbx_with_merge(rstreamlines2, thresholds, nb_pts=nb_pts) clusters2 = remove_clusters_by_size(cluster_map2, rm_small_clusters) qb_centroids2 = [cluster.centroid for cluster in clusters2] @@ -986,7 +982,7 @@ def check_range(streamline, gt=greater_than, lt=less_than): (-10, 10), (-10, 10), (-10, 10)] slm = progressive_slr(qb_centroids1, qb_centroids2, x0=x0, metric=None, - bounds=bounds) + bounds=bounds,verbose=verbose) if verbose: print('QB static centroids size %d' % len(qb_centroids1,)) From abc43f91cccbb2c97d4c1a884f534c94a1e6be89 Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Thu, 17 Nov 2016 10:54:09 -0500 Subject: [PATCH 11/12] Minor update for centroids metrics --- dipy/tracking/distances.pyx | 134 ++++++++++++++++++++++++++++++++++++ 1 file changed, 134 insertions(+) diff --git a/dipy/tracking/distances.pyx b/dipy/tracking/distances.pyx index 3a200df12d..b6492db2f3 100644 --- a/dipy/tracking/distances.pyx +++ b/dipy/tracking/distances.pyx @@ -606,6 +606,81 @@ def bundles_distances_mdf(tracksA, tracksB): return DM +@cython.boundscheck(False) +@cython.wraparound(False) +def bundles_weighted_distances_mdf(tracksA, tracksB, weight): + ''' Calculate distances between list of tracks A and list of tracks B + + All tracks need to have the same number of points + + Parameters + ---------- + tracksA : sequence + of tracks as arrays, [(N,3) .. (N,3)] + tracksB : sequence + of tracks as arrays, [(N,3) .. (N,3)] + + Returns + ------- + DM : array, shape (len(tracksA), len(tracksB)) + distances between tracksA and tracksB according to metric + + See Also + --------- + dipy.metrics.downsample + + ''' + cdef: + size_t i, j, lentA, lentB + # preprocess tracks + cdef: + size_t longest_track_len = 0, track_len + longest_track_lenA, longest_track_lenB + cnp.ndarray[object, ndim=1] tracksA32 + cnp.ndarray[object, ndim=1] tracksB32 + cnp.ndarray[object, ndim=1] weight32 + cnp.ndarray[cnp.double_t, ndim=2] DM + lentA = len(tracksA) + lentB = len(tracksB) + tracksA32 = np.zeros((lentA,), dtype=object) + tracksB32 = np.zeros((lentB,), dtype=object) + weight32 = np.zeros((lentA,), dtype=object) + DM = np.zeros((lentA,lentB), dtype=np.double) + # process tracks to predictable memory layout + for i in range(lentA): + tracksA32[i] = np.ascontiguousarray(tracksA[i], dtype=f32_dt) + for i in range(lentB): + tracksB32[i] = np.ascontiguousarray(tracksB[i], dtype=f32_dt) + for i in range(lentA): + weight32[i] = np.ascontiguousarray(weight[i], dtype=f32_dt) + # preallocate buffer array for track distance calculations + cdef: + cnp.float32_t *t1_ptr, *t2_ptr, *weight_ptr, *min_buffer + # cycle over tracks + cdef: + cnp.ndarray [cnp.float32_t, ndim=2] t1, t2 + cnp.ndarray [cnp.float32_t, ndim=1] weight_ + size_t t1_len, t2_len + float d[2] + t_len = tracksA32[0].shape[0] + + for i from 0 <= i < lentA: + t1 = tracksA32[i] + #t1_len = t1.shape[0] + t1_ptr = t1.data + weight_ = weight32[i] + weight_ptr = weight_.data + for j from 0 <= j < lentB: + t2 = tracksB32[j] + #t2_len = t2.shape[0] + t2_ptr = t2.data + #DM[i,j] = czhang(t1_len, t1_ptr, t2_len, t2_ptr, min_buffer, metric_type) + track_direct_flip_weighted_dist(t1_ptr, t2_ptr,t_len, weight_ptr, d) + if d[0]rows +@cython.boundscheck(False) +@cython.wraparound(False) +@cython.cdivision(True) +cdef void track_direct_flip_weighted_dist(float *a,float *b,long rows, float *w, float *out) nogil: + r''' Direct and flip average distance between two tracks + + Parameters + ---------- + a : first track + b : second track + rows : number of points of the track + both tracks need to have the same number of points + + Returns + ------- + out : direct and flipped average distance added + + Notes + ----- + The distance calculated between two tracks:: + + t_1 t_2 + + 0* a *0 + \ | + \ | + 1* | + | b *1 + | \ + 2* \ + c *2 + + is equal to $(a+b+c)/3$ where $a$ the euclidean distance between t_1[0] and + t_2[0], $b$ between t_1[1] and t_2[1] and $c$ between t_1[2] and t_2[2]. + Also the same with t2 flipped (so t_1[0] compared to t_2[2] etc). + + See also + -------- + dipy.tracking.distances.local_skeleton_clustering + ''' + cdef: + long i=0,j=0 + float sub=0,subf=0,distf=0,dist=0,tmprow=0, tmprowf=0 + + for i from 0<=irows + out[1]=distf/rows + + @cython.cdivision(True) cdef inline void track_direct_flip_3dist(float *a1, float *b1,float *c1,float *a2, float *b2, float *c2, float *out) nogil: ''' Calculate the euclidean distance between two 3pt tracks From b615b0dbb97790d0c1bd8346e507989361e50fb2 Mon Sep 17 00:00:00 2001 From: Francois Rheault Date: Mon, 28 Nov 2016 14:47:50 -0500 Subject: [PATCH 12/12] Multiple merge for QBx --- dipy/segment/clustering.py | 38 +++++++++++++++++++++++++++++++++++++ dipy/tracking/distances.pyx | 6 +++--- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/dipy/segment/clustering.py b/dipy/segment/clustering.py index 1acdc366a4..7183fe270f 100644 --- a/dipy/segment/clustering.py +++ b/dipy/segment/clustering.py @@ -868,3 +868,41 @@ def qbx_with_merge(streamlines, thresholds, nb_pts=20): merged_cluster_map.refdata = streamlines return merged_cluster_map + +def qbx_with_multiple_merge(streamlines, thresholds, added_layer, nb_pts=20): + + sample_streamlines = set_number_of_points(streamlines, nb_pts) + for i in added_layer: + thresholds.append(i) + + qbx = QuickBundlesX(thresholds, + metric=AveragePointwiseEuclideanMetric()) + + qbx_clusters = qbx.cluster(sample_streamlines) + merged_cluster_map_list = [] + for i in range(len(added_layer)): + qbx_merge = QuickBundlesX([added_layer[i]], + metric=AveragePointwiseEuclideanMetric()) + + final_level = len(thresholds) - len(added_layer) + i + 1 + #np.random.seed(0) + qbx_ordering_final = np.random.choice( + len(qbx_clusters.get_clusters(final_level)), + len(qbx_clusters.get_clusters(final_level)), replace=False) + + qbx_merged_cluster_map = qbx_merge.cluster( + qbx_clusters.get_clusters(final_level).centroids, + ordering=qbx_ordering_final).get_clusters(1) + + qbx_cluster_map = qbx_clusters.get_clusters(final_level) + + merged_cluster_map = ClusterMapCentroid() + for cluster in qbx_merged_cluster_map: + merged_cluster = ClusterCentroid(centroid=cluster.centroid) + for i in cluster.indices: + merged_cluster.indices.extend(qbx_cluster_map[i].indices) + merged_cluster_map.add_cluster(merged_cluster) + + merged_cluster_map.refdata = streamlines + merged_cluster_map_list.append(merged_cluster_map) + return merged_cluster_map_list \ No newline at end of file diff --git a/dipy/tracking/distances.pyx b/dipy/tracking/distances.pyx index b6492db2f3..c6d3d7b8f1 100644 --- a/dipy/tracking/distances.pyx +++ b/dipy/tracking/distances.pyx @@ -535,7 +535,7 @@ def bundles_distances_mam(tracksA, tracksB, metric='avg'): return DM - +from time import time @cython.boundscheck(False) @cython.wraparound(False) def bundles_distances_mdf(tracksA, tracksB): @@ -589,11 +589,11 @@ def bundles_distances_mdf(tracksA, tracksB): float d[2] t_len = tracksA32[0].shape[0] - for i from 0 <= i < lentA: + for i in range(lentA): t1 = tracksA32[i] #t1_len = t1.shape[0] t1_ptr = t1.data - for j from 0 <= j < lentB: + for j in range(lentB): t2 = tracksB32[j] #t2_len = t2.shape[0] t2_ptr = t2.data