diff --git a/.coveragerc b/.coveragerc index ccb81b2..5a942bd 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,6 +2,7 @@ omit = *test_package* *test_* + *generate_test_results* *auto* *segmentation_stitch* *stack_np* diff --git a/.gitignore b/.gitignore index 0ce0961..ade790f 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,13 @@ build-* tmp/ data/ cluster + +# Cython +*.c +*.so +*.html + +# Python bytecode *.pyc .*.swp ipython_log.py* diff --git a/.travis.yml b/.travis.yml index 9989f07..91985c8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,12 @@ language: python +sudo: false python: - "2.7" + - "3.4" virtualenv: - system_site_packages: true + system_site_packages: false install: # all installing is now handled by conda as it is faster and more robust - - sudo apt-get update - - pip install -U setuptools - wget http://repo.continuum.io/miniconda/Miniconda-3.4.2-Linux-x86_64.sh -O miniconda.sh; - bash miniconda.sh -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..6dc0d21 --- /dev/null +++ b/Makefile @@ -0,0 +1,7 @@ +clean: + rm gala/features/*.{c,html,so} + rm gala/*.{c,html,so} + rm -rf build + rm -rf gala.egg-info + rm -rf dist + rm -rf *.pyc */*.pyc */*/*.pyc diff --git a/bin/comparestacks b/bin/comparestacks index a93db77..14b87ff 100755 --- a/bin/comparestacks +++ b/bin/comparestacks @@ -1,3 +1,5 @@ +from __future__ import absolute_import +from __future__ import print_function #!/usr/bin/env python import os @@ -69,7 +71,7 @@ def main(argv): if args.relabel1: stack1_int, dummy1, dummy2 = ev.relabel_from_one(stack1_int) - print "Imported first stack" + print("Imported first stack") stackbase=args.stackbase @@ -92,15 +94,15 @@ def main(argv): if args.relabelbase: stackbase_int, dummy1, dummy2 = ev.relabel_from_one(stackbase_int) - print "Imported base stack" + print("Imported base stack") if args.synapsejson != '': synaptic_vi, synaptic_comps = ev.make_synaptic_functions(args.synapsejson, [ev.split_vi_mem, ev.split_vi_mem]) merge, split, stack1_bodies2, stack1_vis2, gt_bodies2, gt_vis2 = synaptic_vi(stack1_int, stackbase_int) - print "SynMergeSplit: " + str((merge, split)) + print("SynMergeSplit: " + str((merge, split))) else: merge, split, stack1_bodies2, stack1_vis2, gt_bodies2, gt_vis2 = ev.split_vi_mem(stack1_int, stackbase_int) - print "MergeSplit: " + str((merge, split)) + print("MergeSplit: " + str((merge, split))) data=[] database=[] diff --git a/bin/gala-auto b/bin/gala-auto index dffe6ed..131c3db 100755 --- a/bin/gala-auto +++ b/bin/gala-auto @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-evaluate b/bin/gala-evaluate index e29ebfe..73b3af0 100755 --- a/bin/gala-evaluate +++ b/bin/gala-evaluate @@ -1,3 +1,5 @@ +from __future__ import absolute_import +from __future__ import print_function #!/usr/bin/env python # ensure imported package is local to this executable @@ -10,7 +12,7 @@ pkg_dir = os.path.abspath(os.path.join(this_dir, '..')) sys.path.insert(0, pkg_dir) # Python standard library -import argparse, cPickle +import argparse, six.moves.cPickle import subprocess as sp import logging @@ -83,4 +85,4 @@ if __name__ == '__main__': vi = np.concatenate((np.array([args.threshold]), ev.split_vi(seg, gt)))[..., np.newaxis] imio.write_h5_stack(vi, segfn, group='vi') - print vi[:, :max(6, len(vi[0]))] + print(vi[:, :max(6, len(vi[0]))]) diff --git a/bin/gala-pixel b/bin/gala-pixel index 11348d5..3858796 100755 --- a/bin/gala-pixel +++ b/bin/gala-pixel @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-remove-inclusions b/bin/gala-remove-inclusions index 94ca0d0..86cc6ae 100755 --- a/bin/gala-remove-inclusions +++ b/bin/gala-remove-inclusions @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-segment b/bin/gala-segment index f6229f6..81ccc16 100755 --- a/bin/gala-segment +++ b/bin/gala-segment @@ -1,16 +1,18 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable import os import sys +from six.moves import map this_dir = os.path.dirname(sys.argv[0]) pkg_dir = os.path.abspath(os.path.join(this_dir, '..')) sys.path.insert(0, pkg_dir) # Python standard library -import argparse, cPickle +import argparse, six.moves.cPickle import subprocess as sp import logging import functools diff --git a/bin/gala-segmentation-pipeline b/bin/gala-segmentation-pipeline index 44fc450..402fe8c 100755 --- a/bin/gala-segmentation-pipeline +++ b/bin/gala-segmentation-pipeline @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-segmentation-stitch b/bin/gala-segmentation-stitch index 470a959..0f07104 100755 --- a/bin/gala-segmentation-stitch +++ b/bin/gala-segmentation-stitch @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-test-package b/bin/gala-test-package index 5ae74a3..bc77855 100755 --- a/bin/gala-test-package +++ b/bin/gala-test-package @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-train b/bin/gala-train index 193c54c..a8272b2 100755 --- a/bin/gala-train +++ b/bin/gala-train @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/gala-valprob b/bin/gala-valprob index baab2b6..4b7ed7c 100755 --- a/bin/gala-valprob +++ b/bin/gala-valprob @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # ensure imported package is local to this executable diff --git a/bin/h5cat b/bin/h5cat index bdc49aa..e6bc352 100755 --- a/bin/h5cat +++ b/bin/h5cat @@ -1,3 +1,5 @@ +from __future__ import absolute_import +from __future__ import print_function #!/usr/bin/env python import os, sys, argparse @@ -20,7 +22,7 @@ if __name__ == '__main__': args = parser.parse_args() for fin in args.fin: - print '>>>', fin + print('>>>', fin) f = h5py.File(fin, 'r') if args.group is not None: groups = [args.group] @@ -28,10 +30,10 @@ if __name__ == '__main__': groups = [] f.visit(groups.append) for g in groups: - print '\n ', g + print('\n ', g) if type(f[g]) == h5py.highlevel.Dataset: a = f[g] - print ' shape: ', a.shape, '\n type: ', a.dtype + print(' shape: ', a.shape, '\n type: ', a.dtype) if args.verbose: a = array(f[g]) - print a + print(a) diff --git a/gala/__init__.py b/gala/__init__.py index a108e92..89f07d4 100644 --- a/gala/__init__.py +++ b/gala/__init__.py @@ -4,6 +4,7 @@ Gala is a Python package for nD image segmentation. """ +from __future__ import absolute_import import sys, logging if sys.version_info[:2] < (2,6): diff --git a/gala/adaboost.py b/gala/adaboost.py deleted file mode 100644 index 623c4ef..0000000 --- a/gala/adaboost.py +++ /dev/null @@ -1,94 +0,0 @@ -# system modules -import sys, math, random, logging -import operator -import cPickle - -# libraries -import numpy - -# local modules -from decision_tree import DecisionTree -from iterprogress import with_progress, NoProgressBar, StandardProgressBar - -class AdaBoost(object): - """Class for an adaboost classifier, adapted from pyclassic. """ - def __init__(self, progress=False, **kwargs): - if progress: - self.progressbar = StandardProgressBar('AdaBoost training...') - else: - self.progressbar = NoProgressBar() - - def fit(self, X, Y, w=None, w_asymmetric=None, depth=1, T=100, **kwargs): - self.X = X.copy() - self.Y = Y.copy() - N = len(self.Y) - - if w is None: - w = (1.0/float(N))*numpy.ones(N) - if w_asymmetric is None: - w_asymmetric = (1.0/float(N))*numpy.ones(N) - self.weights = w.copy() - self.weights_asymmetric = numpy.array([i**(1.0/float(T)) - for i in w_asymmetric]) - self.weights /= float(sum(self.weights)) - self.weak_classifier_ensemble = [] - self.alpha = [] - - for t in with_progress(range(T), pbar=self.progressbar): - # Apply asymmetric weights - self.weights *= self.weights_asymmetric - weak_learner = DecisionTree().fit(self.X,self.Y,self.weights, depth=depth) - Y_pred = weak_learner.predict(self.X) - e = sum(0.5*self.weights*abs(self.Y-Y_pred))/sum(self.weights) - if e > 0.5: - logging.warning(' ending training, no good weak classifiers.') - break - ee = (1.0-e)/float(e) - alpha = 0.5*math.log(ee) - # increase weights for wrongly classified: - self.weights *= numpy.exp(-alpha*self.Y*Y_pred) - self.weights /= sum(self.weights) - self.weak_classifier_ensemble.append(weak_learner) - self.alpha.append(alpha) - return self - - def predict_score(self,X): - X = numpy.array(X) - Y = sum([alpha * weak_classifier.predict(X) for alpha, weak_classifier - in zip(self.alpha, self.weak_classifier_ensemble)]) - return Y - - def predict_proba(self, X): - p = 1.0/(1.0 + numpy.exp(-2.0*self.predict_score(X))) - return numpy.concatenate((numpy.array([1.0-p]), numpy.array([p])), axis=0).T - - def save_to_disk(self, fn): - o = open(fn, 'w') - data = [self.X, self.Y, self.weights, self.weights_asymmetric, - self.alpha, self.weak_classifier_ensemble] - cPickle.dump(data, o, protocol=-1) - o.close() - - def load_from_disk(self, fn): - o = open(fn, 'r') - data = cPickle.load(o) - o.close() - self.X = data[0] - self.Y = data[1] - self.weights = data[2] - self.weights_asymmetric = data[3] - self.alpha = data[4] - self.weak_classifier_ensemble = data[5] - - -def measure_accuracy(Y, o, threshold=0): - oo = o.copy() - oo[numpy.where(o>threshold)[0]] = 1 - oo[numpy.where(o= m: return features, labels - idxs = random.sample(range(m), num_samples) + idxs = random.sample(list(range(m)), num_samples) return features[idxs], labels[idxs] def save_training_data_to_disk(data, fn, names=None, info='N/A'): @@ -309,7 +313,7 @@ def loss(g, n1, n2, gt): def label_merges(g, merge_history, feature_map_function, gt, loss_function): """Replay an agglomeration history and label the loss of each merge.""" labels = np.zeros(len(merge_history)) - number_of_features = feature_map_function(g, *g.edges_iter().next()).size + number_of_features = feature_map_function(g, *next(g.edges_iter())).size features = np.zeros((len(merge_history), number_of_features)) labeled_image = np.zeros(gt.shape, np.double) for i, nodes in enumerate(ip.with_progress( @@ -338,8 +342,6 @@ def select_classifier(cname, features=None, labels=None, **kwargs): else: raise RuntimeError('tried to use random forest classifier, ' + 'but neither scikit-learn nor vigra are available.') - elif 'adaboost'.startswith(cname): - c = AdaBoost(**kwargs) if features is not None and labels is not None: c = c.fit(features, labels, **kwargs) return c diff --git a/gala/decision_stump.py b/gala/decision_stump.py deleted file mode 100644 index 8cc4291..0000000 --- a/gala/decision_stump.py +++ /dev/null @@ -1,68 +0,0 @@ - -import math -import numpy -import operator - - -class DecisionStump(): - """ Class for a decision stump, adapted from pyclassic. """ - - def fit(self, X, Y, w): - feature_index, stump = train_decision_stump(X,Y,w) - self.feature_index = feature_index - self.stump = stump - return self - - def predict(self,X): - if len(X.shape)==1: - X = numpy.array([X]) - N, d = X.shape - feature_index = self.feature_index - threshold = self.stump.threshold - s = self.stump.s - return s*(2.0*(X[:,feature_index]>threshold).astype(numpy.uint8)-1) - -class Stump: - """1D stump""" - def __init__(self, score, threshold, s): - self.score = score - self.threshold = threshold - self.s = s - - def __cmp__(self, other): - return cmp(self.err, other.err) - - -def train_decision_stump(X,Y,w): - stumps = [build_stump_1d(x,Y,w) for x in X.T] - feature_index = numpy.argmax([s.score for s in stumps]) - best_stump = stumps[feature_index] - best_threshold = best_stump.threshold - return feature_index, best_stump - - -def build_stump_1d(x,y,w): - idx = x.argsort() - xsorted = x[idx] - wy = y[idx]*w[idx] - wy_pos = numpy.clip(wy, a_min=0, a_max=numpy.inf) - wy_neg = numpy.clip(wy, a_min=-numpy.inf, a_max=0) - score_left_pos = numpy.cumsum(wy_pos) - score_right_pos = numpy.cumsum(wy_pos[::-1]) - score_left_neg = numpy.cumsum(wy_neg) - score_right_neg = numpy.cumsum(wy_neg[::-1]) - - score1 = -score_left_pos[0:-1:1] + score_right_neg[-2::-1] - score2 = -score_left_neg[0:-1:1] + score_right_pos[-2::-1] - # using idx will ensure that we don't split between nodes with identical x values - idx = numpy.nonzero((xsorted[:-1] < xsorted[1:]).astype(numpy.uint8))[0] - if len(idx)==0: - return Stump(-numpy.inf, 0, 0) - - score = numpy.where(abs(score1)>abs(score2), score1, score2) - ind = idx[numpy.argmax(abs(score[idx]))] - maxscore = abs(score[ind]) - threshold = (xsorted[ind] + xsorted[ind+1])/2.0 - s = numpy.sign(score[ind]) # direction of -1 -> 1 change - return Stump(maxscore, threshold, s) - diff --git a/gala/decision_tree.py b/gala/decision_tree.py deleted file mode 100644 index e04ddf3..0000000 --- a/gala/decision_tree.py +++ /dev/null @@ -1,62 +0,0 @@ -from numpy import inf, unique, array, zeros -from decision_stump import DecisionStump - -class DecisionTree(): - """ Class for a decision tree. - The trees are grown until completion or up to a specified maximum depth. - The splits are based on the imlementation of DecisionStump, which currently - splits on weighted classification error. - """ - def fit(self, X, Y, w, depth=inf, curr_depth=0, curr_node=None): - - self.head = self.build_tree(X.copy(),Y.copy(),w.copy(),depth,curr_depth) - self.weights = w.copy() - return self - - def build_tree(self, X, Y, w, depth, curr_depth): - # See if we can do any splitting at all - tree = Node() - yw = Y*w - if len(X)<2 or len(unique(Y)) < 2 or curr_depth >= depth: - tree.stump = 1.0 if abs(sum(yw[yw>=0]))>abs(sum(yw[yw<0])) else -1.0 - return tree - # TODO: check for inconsistent data - - # Learn the decision stump - stump = DecisionStump().fit(X,Y,w) - side1 = stump.predict(X)>=0 - side2 = stump.predict(X)<0 - - tree.stump = stump - tree.left = self.build_tree(X[side1], Y[side1], w[side1], depth, curr_depth+1) - tree.right = self.build_tree(X[side2], Y[side2], w[side2], depth, curr_depth+1) - - return tree - - def predict(self, X, curr_node=None): - if len(X.shape)==1: - X = array([X]) - - if curr_node is None: - curr_node = self.head - - pred = zeros(len(X)) - if not isinstance(curr_node.stump, DecisionStump): - return curr_node.stump - - side1 = curr_node.stump.predict(X)>=0 - side2 = curr_node.stump.predict(X)<0 - - if sum(side1)>0: - pred[side1] = self.predict(X[side1], curr_node.left) - if sum(side2)>0: - pred[side2] = self.predict(X[side2], curr_node.right) - - return pred - - -class Node(): - def __init__(self): - self.left = None - self.right = None - self.stump = None diff --git a/gala/evaluate.py b/gala/evaluate.py index 39784b1..dfab87e 100644 --- a/gala/evaluate.py +++ b/gala/evaluate.py @@ -1,4 +1,6 @@ # coding=utf-8 +from __future__ import absolute_import +from __future__ import print_function import numpy as np import multiprocessing @@ -12,6 +14,9 @@ from scipy.ndimage.measurements import label from scipy.spatial.distance import pdist, squareform from sklearn.metrics import precision_recall_curve +from six.moves import map +from six.moves import range +from six.moves import zip def sparse_min(mat, axis=None): @@ -186,7 +191,7 @@ def wiggle_room_precision_recall(pred, boundary, margin=2, connectivity=1): pred_dil.ravel()[np.flatnonzero(pred==m)[0]] = m prec, _, ts = precision_recall_curve(gtd.ravel(), pred.ravel()) _, rec, _ = precision_recall_curve(boundary.ravel(), pred_dil.ravel()) - return zip(ts, prec, rec) + return list(zip(ts, prec, rec)) def get_stratified_sample(ar, n): @@ -250,7 +255,7 @@ def edit_distance(aseg, gt, size_threshold=1000, sp=None): if sp is None: return raw_edit_distance(aseg, gt, size_threshold) else: - import agglo + from . import agglo bps = agglo.best_possible_segmentation(sp, gt) return raw_edit_distance(aseg, bps, size_threshold) @@ -488,7 +493,7 @@ def make_synaptic_functions(fn, fcts): if not isinstance(fcts, coll.Iterable): return make_function(fcts) else: - return map(make_function, fcts) + return list(map(make_function, fcts)) def make_synaptic_vi(fn): @@ -809,7 +814,7 @@ def split_vi_mem(x, y): y_flat = y.ravel() count = 0 - print "Analyzing similarities" + print("Analyzing similarities") for pos in range(0,len(x_flat)): x_val = x_flat[pos] y_val = y_flat[pos] @@ -820,7 +825,7 @@ def split_vi_mem(x, y): (x_map[x_val])[y_val] += 1 (y_map[y_val])[x_val] += 1 count += 1 - print "Finished analyzing similarities" + print("Finished analyzing similarities") x_ents = {} y_ents = {} @@ -973,7 +978,7 @@ def vi_tables(x, y=None, ignore_x=[0], ignore_y=[0]): lpxgy[nzy] = xlogx(divide_columns(nzpxy, nzpy)).sum(axis=0) hxgy = -(py*lpxgy) - return [pxy] + map(np.asarray, [px, py, hxgy, hygx, lpygx, lpxgy]) + return [pxy] + list(map(np.asarray, [px, py, hxgy, hygx, lpygx, lpxgy])) def sorted_vi_components(s1, s2, ignore1=[0], ignore2=[0], compress=False): @@ -1052,7 +1057,7 @@ def split_components(idx, cont, num_elems=4, axis=0): idxs = (-cc).argsort()[:num_elems] probs = cc[idxs] probst = cct[idxs] - return zip(idxs, probs, probst) + return list(zip(idxs, probs, probst)) def rand_values(cont_table): diff --git a/gala/features/__init__.py b/gala/features/__init__.py index cc8db17..731a05b 100644 --- a/gala/features/__init__.py +++ b/gala/features/__init__.py @@ -4,6 +4,7 @@ All the features used in agglomeration should be put here. """ +from __future__ import absolute_import from . import \ base, \ diff --git a/gala/features/base.py b/gala/features/base.py index ea469ec..629c17c 100644 --- a/gala/features/base.py +++ b/gala/features/base.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import numpy as np class Null(object): diff --git a/gala/features/convex_hull.py b/gala/features/convex_hull.py index 2b8d894..430f312 100644 --- a/gala/features/convex_hull.py +++ b/gala/features/convex_hull.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import # python standard library import logging import itertools as it diff --git a/gala/features/graph.py b/gala/features/graph.py index b5f2ba6..b298ea9 100644 --- a/gala/features/graph.py +++ b/gala/features/graph.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import # external libraries import numpy as np import networkx as nx diff --git a/gala/features/inclusion.py b/gala/features/inclusion.py index 46aec5f..6495162 100644 --- a/gala/features/inclusion.py +++ b/gala/features/inclusion.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import numpy as np from . import base diff --git a/gala/features/io.py b/gala/features/io.py index 1c1baa7..dd4298a 100644 --- a/gala/features/io.py +++ b/gala/features/io.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import from . import base from . import inclusion, moments, histogram diff --git a/gala/features/orientation.py b/gala/features/orientation.py index 05a59f3..e82caef 100644 --- a/gala/features/orientation.py +++ b/gala/features/orientation.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import numpy as np from numpy.linalg import eig, norm from . import base diff --git a/gala/features/squiggliness.py b/gala/features/squiggliness.py index 963333c..83d1d0b 100644 --- a/gala/features/squiggliness.py +++ b/gala/features/squiggliness.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import numpy as np from . import base diff --git a/gala/filter.py b/gala/filter.py index 6b0d915..185c5af 100644 --- a/gala/filter.py +++ b/gala/filter.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import numpy as np from scipy import ndimage as nd diff --git a/gala/imio.py b/gala/imio.py index fb10971..ab34779 100644 --- a/gala/imio.py +++ b/gala/imio.py @@ -1,3 +1,5 @@ +from __future__ import absolute_import +from __future__ import print_function # built-ins import os import json @@ -10,6 +12,9 @@ # libraries import h5py +from six.moves import map +from six.moves import range +from six.moves import zip try: import Image except: @@ -17,7 +22,7 @@ try: from pylibtiff import TIFF except: - print "pylibtiff not available: http://www.lfd.uci.edu/~gohlke/pythonlibs/#pylibtiff" + print("pylibtiff not available: http://www.lfd.uci.edu/~gohlke/pythonlibs/#pylibtiff") from scipy.ndimage.measurements import label @@ -31,8 +36,8 @@ from skimage.io import imread # local files -import evaluate -import morpho +from . import evaluate +from . import morpho ### Auto-detect file format @@ -356,9 +361,9 @@ def write_vtk(ar, fn, spacing=[1.0, 1.0, 1.0]): f.write('created by write_vtk (Python implementation by JNI)\n') f.write('BINARY\n') f.write('DATASET STRUCTURED_POINTS\n') - f.write(' '.join(['DIMENSIONS'] + map(str, ar.shape[-1::-1])) + '\n') - f.write(' '.join(['ORIGIN'] + map(str, zeros(3))) + '\n') - f.write(' '.join(['SPACING'] + map(str, spacing)) + '\n') + f.write(' '.join(['DIMENSIONS'] + list(map(str, ar.shape[-1::-1]))) + '\n') + f.write(' '.join(['ORIGIN'] + list(map(str, zeros(3)))) + '\n') + f.write(' '.join(['SPACING'] + list(map(str, spacing))) + '\n') f.write('POINT_DATA ' + str(ar.size) + '\n') f.write('SCALARS image_data ' + numpy_type_to_vtk_string[ar.dtype.type] + '\n') @@ -452,7 +457,7 @@ def compute_sp_to_body_map(sps, bodies): such as non-matching shapes, or superpixels mapping to more than one segment, will result in undefined behavior downstream with no warning. """ - sp_to_body = unique(zip(sps.ravel(), bodies.ravel())).astype(uint64) + sp_to_body = unique(list(zip(sps.ravel(), bodies.ravel()))).astype(uint64) return sp_to_body def write_mapped_segmentation(superpixel_map, sp_to_body_map, fn, @@ -676,7 +681,7 @@ def segs_to_raveler(sps, bodies, min_size=0, do_conn_comp=False, sps_out=None): if sps_out is None: sps_out = raveler_serial_section_map(sps, min_size, do_conn_comp, False) segment_map = raveler_serial_section_map(bodies, min_size, do_conn_comp) - segment_to_body = unique(zip(segment_map.ravel(), bodies.ravel())) + segment_to_body = unique(list(zip(segment_map.ravel(), bodies.ravel()))) segment_to_body = segment_to_body[segment_to_body[:,0] != 0] segment_to_body = concatenate((array([[0,0]]), segment_to_body), axis=0) sp_to_segment = [] @@ -685,7 +690,7 @@ def segs_to_raveler(sps, bodies, min_size=0, do_conn_comp=False, sps_out=None): segment_map_i *= sp_map_i.astype(bool) valid = (sp_map_i != 0) + (segment_map_i == 0) sp_to_segment.append( - unique(zip(it.repeat(i), sp_map_i[valid], segment_map_i[valid]))) + unique(list(zip(it.repeat(i), sp_map_i[valid], segment_map_i[valid])))) valid = segment_map != 0 logging.debug('plane %i done'%i) logging.info('total superpixels before: ' + str(len(unique(sps))) + @@ -997,7 +1002,7 @@ def raveler_to_labeled_volume(rav_export_dir, get_glia=False, glia : list of int (optional, only returned if `get_glia` is True) The IDs in the segmentation corresponding to glial cells. """ - import morpho + from . import morpho spmap = read_image_stack( os.path.join(rav_export_dir, 'superpixel_maps', '*.png'), crop=crop) spmap = raveler_rgba_to_int(spmap) @@ -1009,7 +1014,7 @@ def raveler_to_labeled_volume(rav_export_dir, get_glia=False, max_sp = sp2seg_list[:,1].max() start_plane = sp2seg_list[:,0].min() for z, sp, seg in sp2seg_list: - if not sp2seg.has_key(z): + if z not in sp2seg: sp2seg[z] = zeros(max_sp+1, uint32) sp2seg[z][sp] = seg max_seg = seg2bod_list[:,0].max() @@ -1073,7 +1078,7 @@ def write_ilastik_project(images, labels, fn, label_names=None): if type(images) != list: images = [images] labels = [labels] - ulbs = unique(concatenate(map(unique, labels)))[1:] + ulbs = unique(concatenate(list(map(unique, labels))))[1:] colors = array(ilastik_label_colors[:len(ulbs)]) names = ['Label %i'%i for i in ulbs] names = array(names, '|S%i'%max(map(len, names))) @@ -1137,7 +1142,7 @@ def read_prediction_from_ilastik_batch(fn, **kwargs): ------- None """ - if not kwargs.has_key('group'): + if 'group' not in kwargs: kwargs['group'] = '/volume/prediction' a = squeeze(read_h5_stack(fn, **kwargs)) if kwargs.get('single_channel', True): diff --git a/gala/iterprogress.py b/gala/iterprogress.py index fcbfcf5..5bfee4f 100644 --- a/gala/iterprogress.py +++ b/gala/iterprogress.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import logging class NoProgressBar(object): diff --git a/gala/mergequeue.py b/gala/mergequeue.py index dceadd3..481af07 100644 --- a/gala/mergequeue.py +++ b/gala/mergequeue.py @@ -1,6 +1,7 @@ +from __future__ import absolute_import from heapq import heapify, heappush, heappop -from iterprogress import NoProgressBar, StandardProgressBar +from .iterprogress import NoProgressBar, StandardProgressBar class MergeQueue(object): def __init__(self, items=[], length=None, with_progress=False, diff --git a/gala/morpho.py b/gala/morpho.py index ca65fc3..e858709 100644 --- a/gala/morpho.py +++ b/gala/morpho.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python import numpy as np @@ -21,8 +22,11 @@ from scipy.ndimage.morphology import binary_opening, binary_closing, \ binary_dilation, grey_closing, iterate_structure #from scipy.spatial.distance import cityblock as manhattan_distance -import iterprogress as ip +from . import iterprogress as ip from .evaluate import relabel_from_one +from six.moves import map +from six.moves import range +from six.moves import zip try: import skimage.morphology @@ -343,7 +347,7 @@ def watershed_sequence(a, seeds=None, mask=None, axis=0, **kwargs): mask = it.repeat(None) ws = [watershed(i, seeds=s, mask=m, **kwargs) for i, s, m in zip(a, seeds, mask)] - counts = map(np.max, ws[:-1]) + counts = list(map(np.max, ws[:-1])) counts = np.concatenate((np.array([0]), counts)) counts = np.cumsum(counts) for c, w in zip(counts, ws): @@ -463,7 +467,7 @@ def pad(ar, vals, axes=None): if ar.size == 0: return ar if axes is None: - axes = range(ar.ndim) + axes = list(range(ar.ndim)) if not _is_container(vals): vals = [vals] if not _is_container(axes): @@ -504,7 +508,7 @@ def pad(ar, vals, axes=None): return pad(ar2, vals[1:], axes) def juicy_center(ar, skinsize=1): - for i in xrange(ar.ndim): + for i in range(ar.ndim): ar = ar.swapaxes(0,i) ar = ar[skinsize:-skinsize] ar = ar.swapaxes(0,i) @@ -512,7 +516,7 @@ def juicy_center(ar, skinsize=1): def surfaces(ar, skinsize=1): s = [] - for i in xrange(ar.ndim): + for i in range(ar.ndim): ar = ar.swapaxes(0, i) s.append(ar[0:skinsize].copy()) s.append(ar[-skinsize:].copy()) @@ -552,7 +556,7 @@ def get_neighbor_idxs(ar, idxs, connectivity=1): prod = array(list(it.product(*([[1,-1]]*i)))) i_strides = array(list(it.combinations(strides,i))).T steps.append(prod.dot(i_strides).ravel()) - return idxs[:,newaxis] + concatenate(steps) + return idxs[:,newaxis] + concatenate(steps).astype(int32) def orphans(a): """Find all the segments that do not touch the volume boundary. @@ -571,7 +575,7 @@ def non_traversing_segments(a): surface = hollowed(a) surface_ccs = label(surface)[0] idxs = flatnonzero(surface) - pairs = unique(zip(surface.ravel()[idxs], surface_ccs.ravel()[idxs])) + pairs = unique(list(zip(surface.ravel()[idxs], surface_ccs.ravel()[idxs]))) return flatnonzero(bincount(pairs.astype(int)[:,0])==1) def damify(a, in_place=False): diff --git a/gala/ncut.py b/gala/ncut.py index 5e4ca23..ae65600 100644 --- a/gala/ncut.py +++ b/gala/ncut.py @@ -1,9 +1,9 @@ +from __future__ import absolute_import import numpy -import agglo -import morpho import scipy.sparse import scipy.sparse.linalg import scipy.cluster.vq +from six.moves import range def ncutW(W, num_eigs=10, kmeans_iters=10, offset = 0.5, **kwargs): """Run the normalized cut algorithm diff --git a/gala/option_manager.py b/gala/option_manager.py index 7cd595b..cb6674b 100644 --- a/gala/option_manager.py +++ b/gala/option_manager.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import argparse import json @@ -53,7 +54,7 @@ def load_config(self, file_name, args=None): json_file = open(file_name) json_data = json.load(json_file) json_file.close() - except Exception, e: + except Exception as e: self.master_logger.warning("error in opening " + file_name + \ " (%s) , using flags only" % str(e)) diff --git a/gala/pixel.py b/gala/pixel.py index c7a62d0..99d9c3d 100644 --- a/gala/pixel.py +++ b/gala/pixel.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import os import sys import glob @@ -7,7 +8,7 @@ import shutil import traceback -import imio, option_manager, app_logger, session_manager, util +from . import imio, option_manager, app_logger, session_manager, util def image_stack_verify(options_parser, options, master_logger): if options.image_stack is not None: @@ -140,8 +141,8 @@ def entrypoint(argv): master_logger, applogger, create_pixel_options) gen_pixel_probabilities(session.session_location, session.options, master_logger) - except Exception, e: + except Exception as e: master_logger.error(str(traceback.format_exc())) - except KeyboardInterrupt, err: + except KeyboardInterrupt as err: master_logger.error(str(traceback.format_exc())) diff --git a/gala/segmentation_pipeline.py b/gala/segmentation_pipeline.py index be16f15..5001ac2 100755 --- a/gala/segmentation_pipeline.py +++ b/gala/segmentation_pipeline.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python @@ -549,9 +550,9 @@ def entrypoint(argv): master_logger, applogger, create_segmentation_pipeline_options) master_logger.info("Session location: " + session.session_location) run_segmentation_pipeline(session.session_location, session.options, master_logger) - except Exception, e: + except Exception as e: master_logger.error(str(traceback.format_exc())) - except KeyboardInterrupt, err: + except KeyboardInterrupt as err: master_logger.error(str(traceback.format_exc())) diff --git a/gala/segmentation_stitch.py b/gala/segmentation_stitch.py index be6598b..7ea46ab 100755 --- a/gala/segmentation_stitch.py +++ b/gala/segmentation_stitch.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import sys import os import argparse @@ -18,6 +19,7 @@ import datetime from . import imio, morpho, classify, evaluate, app_logger, session_manager, pixel, features, stack_np +from six.moves import range # Group where we store predictions in HDF5 file PREDICTIONS_HDF5_GROUP = '/volume/predictions' @@ -800,9 +802,9 @@ def entrypoint(argv): master_logger, applogger, create_stitching_options) master_logger.info("Session location: " + session.session_location) run_stitching(session.session_location, session.options, master_logger) - except Exception, e: + except Exception as e: master_logger.error(str(traceback.format_exc())) - except KeyboardInterrupt, err: + except KeyboardInterrupt as err: master_logger.error(str(traceback.format_exc())) diff --git a/gala/session_manager.py b/gala/session_manager.py index 372b087..c7ec0d9 100644 --- a/gala/session_manager.py +++ b/gala/session_manager.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python # # Copyright 2012 HHMI. All rights reserved. @@ -36,7 +37,7 @@ import datetime import getpass -import option_manager +from . import option_manager class Session: """The Session Manager diff --git a/gala/stack_np.py b/gala/stack_np.py index 2cdadd0..a80d3ad 100644 --- a/gala/stack_np.py +++ b/gala/stack_np.py @@ -1,10 +1,12 @@ +from __future__ import absolute_import # built-ins import libNeuroProofRag as neuroproof -import morpho +from . import morpho import json from numpy import zeros_like, array, double, zeros import numpy +from six.moves import range def get_prob_handle(classifier): def get_prob(features): diff --git a/gala/stitch.py b/gala/stitch.py index 2102762..6840017 100755 --- a/gala/stitch.py +++ b/gala/stitch.py @@ -1,14 +1,17 @@ +from __future__ import absolute_import #!/usr/bin/env python import sys, os, argparse import pdb -from agglo import Rag -from imio import read_image_stack -from morpho import juicy_center +from .agglo import Rag +from .imio import read_image_stack +from .morpho import juicy_center from numpy import zeros, bool, hstack, vstack, newaxis, array, savetxt from scipy.ndimage.filters import median_filter, gaussian_filter from scipy.ndimage.measurements import label from gala import single_arg_read_image_stack +from six.moves import range +from six.moves import zip class EvalAction(argparse.Action): def __call__(parser, namespace, values, option_string=None): @@ -90,7 +93,7 @@ def crop_probs_and_ws(crop, probs, ws): thickness = args.thickness zcrop1 = [0,thickness] overlaps = [2**i+1 for i in range(1,8)] - results_table = zeros([len(args.thresholds), len(range(1,8))], dtype=bool) + results_table = zeros([len(args.thresholds), len(list(range(1,8)))], dtype=bool) for j, overlap in enumerate(overlaps): zcrop2 = [thickness-overlap, 2*thickness-overlap] # pdb.set_trace() diff --git a/gala/test_package.py b/gala/test_package.py index 0df53e6..01303cb 100755 --- a/gala/test_package.py +++ b/gala/test_package.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import #!/usr/bin/env python import unittest diff --git a/gala/util.py b/gala/util.py index 49b9f27..36b41e5 100644 --- a/gala/util.py +++ b/gala/util.py @@ -1,8 +1,10 @@ +from __future__ import absolute_import import errno import itertools as it import json import os import uuid +import six all_sizes = [0.7, 1.0, 1.6, 3.5, 5.0] all_types = ['Color', 'Texture', 'Edge', 'Orientation'] @@ -11,7 +13,7 @@ default_feature_set = list(it.product(all_types[:-1], all_sizes[1:-1])) def write_segmentation_pipeline_json(jsonfn, ilfn, ilbfns, outdir='.'): - if isinstance(ilbfns, str) or isinstance(ilbfns, unicode): + if isinstance(ilbfns, str) or isinstance(ilbfns, six.text_type): ilbfns = [ilbfns] d = {} d['images'] = [{'name': ilbfn} for ilbfn in ilbfns] diff --git a/gala/valprob.py b/gala/valprob.py index 17f04a6..cc734d6 100644 --- a/gala/valprob.py +++ b/gala/valprob.py @@ -1,4 +1,6 @@ -import imio, option_manager, app_logger, session_manager +from __future__ import absolute_import +from __future__ import print_function +from . import imio, option_manager, app_logger, session_manager import libNeuroProofPriority as neuroproof import os import sys @@ -7,6 +9,7 @@ import numpy import json import traceback +from six.moves import range def image_stack_verify(options_parser, options, master_logger): if options.test_stack is not None: @@ -142,7 +145,7 @@ def auto_proofread(body2gtbody, rag_file, size_threshold, master_logger, test_st per = 0 else: per = (float(nomerge_hist[iter1])/float(tot_hist[iter1]) * 100) - print iter1, ", ", per , ", " , tot_hist[iter1] + print(iter1, ", ", per , ", " , tot_hist[iter1]) master_logger.info("Probability Actual Agreement with Groundtruth Est") for iter1 in range(0, 101): @@ -150,7 +153,7 @@ def auto_proofread(body2gtbody, rag_file, size_threshold, master_logger, test_st per = 0 else: per = (float(nomerge_hist2[iter1])/float(tot_hist2[iter1]) * 100) - print iter1, ", ", per , ", " , tot_hist2[iter1] + print(iter1, ", ", per , ", " , tot_hist2[iter1]) body2body = {} for key, vallist in bodyremap.items(): @@ -212,7 +215,7 @@ def valprob(session_location, options, master_logger): per = 0 else: per = (float(nomerge_hist[iter1])/float(tot_hist[iter1]) * 100) - print iter1, ", ", per , ", " , tot_hist[iter1] + print(iter1, ", ", per , ", " , tot_hist[iter1]) auto_proofread(body2gtbody, options.ragprob_file, options.size_threshold, master_logger, options.test_stack, session_location) @@ -226,8 +229,8 @@ def entrypoint(argv): master_logger, applogger, create_valprob_options) valprob(session.session_location, session.options, master_logger) - except Exception, e: + except Exception as e: master_logger.error(str(traceback.format_exc())) - except KeyboardInterrupt, err: + except KeyboardInterrupt as err: master_logger.error(str(traceback.format_exc())) diff --git a/gala/viz.py b/gala/viz.py index 067022c..73e7901 100644 --- a/gala/viz.py +++ b/gala/viz.py @@ -1,10 +1,13 @@ -from annotefinder import AnnoteFinder +from __future__ import absolute_import +from .annotefinder import AnnoteFinder from math import ceil import numpy as np import scipy -import evaluate +from . import evaluate from skimage import color import matplotlib +from six.moves import range +from six.moves import zip plt = matplotlib.pyplot cm = plt.cm import itertools as it diff --git a/setup.cfg b/setup.cfg index eb76fc9..3d29b40 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,4 +4,4 @@ ; run doctests (--doctest-modules) ; run coverage (--cov .) ; report uncovered lines (--cov-report term-missing) -addopts = --ignore gala/test_package.py --ignore tests/toy-data --ignore gala/auto.py --ignore gala/segmentation_stitch.py --ignore gala/stack_np.py --ignore gala/stitch.py --ignore gala/valprob.py --ignore tests/generate-test-results.py --ignore tests/example-data/example.py --doctest-modules --cov . --cov-report term-missing +addopts = --ignore gala/test_package.py --ignore tests/toy-data --ignore gala/auto.py --ignore gala/segmentation_stitch.py --ignore gala/stack_np.py --ignore gala/stitch.py --ignore gala/valprob.py --ignore tests/_util/generate-test-results.py --ignore tests/example-data/example.py --doctest-modules --cov . --cov-report term-missing diff --git a/setup.py b/setup.py index 9f46730..19165ca 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import #from distutils.core import setup from setuptools import setup from Cython.Build import cythonize diff --git a/tests/_util/generate-test-results.py b/tests/_util/generate-test-results.py new file mode 100644 index 0000000..e5b9ad5 --- /dev/null +++ b/tests/_util/generate-test-results.py @@ -0,0 +1,50 @@ + +import numpy as np +from gala import imio, classify, features, agglo, evaluate as ev +gt_train, pr_train, p4_train, ws_train = map(imio.read_h5_stack, ['example-data/train-gt.lzf.h5', 'example-data/train-p1.lzf.h5', 'example-data/train-p4.lzf.h5', 'example-data/train-ws.lzf.h5']) +gt_test, pr_test, p4_test, ws_test = map(imio.read_h5_stack, ['example-data/test-gt.lzf.h5', 'example-data/test-p1.lzf.h5', 'example-data/test-p4.lzf.h5', 'example-data/test-ws.lzf.h5']) +fm = features.moments.Manager() +fh = features.histogram.Manager() +fc = features.base.Composite(children=[fm, fh]) +g_train = agglo.Rag(ws_train, pr_train, feature_manager=fc) +np.random.RandomState(0) +(X, y, w, merges) = map(np.copy, map(np.ascontiguousarray, + g_train.learn_agglomerate(gt_train, fc)[0])) +print X.shape +np.savez('example-data/train-set.npz', X=X, y=y) +y = y[:, 0] +rf = classify.DefaultRandomForest() +X.shape +np.random.RandomState(0) +rf = rf.fit(X, y) +classify.save_classifier(rf, 'example-data/rf1.joblib') +learned_policy = agglo.classifier_probability(fc, rf) +g_test = agglo.Rag(ws_test, pr_test, learned_policy, feature_manager=fc) +g_test.agglomerate(0.5) +seg_test1 = g_test.get_segmentation() +imio.write_h5_stack(seg_test1, 'example-data/test-seg1.lzf.h5', compression='lzf') +g_train4 = agglo.Rag(ws_train, p4_train, feature_manager=fc) +np.random.RandomState(0) +(X4, y4, w4, merges4) = map(np.copy, map(np.ascontiguousarray, + g_train4.learn_agglomerate(gt_train, fc)[0])) +print X4.shape +np.savez('example-data/train-set4.npz', X=X4, y=y4) +y4 = y4[:, 0] +rf4 = classify.DefaultRandomForest() +np.random.RandomState(0) +rf4 = rf4.fit(X4, y4) +classify.save_classifier(rf4, 'example-data/rf4.joblib') +learned_policy4 = agglo.classifier_probability(fc, rf4) +g_test4 = agglo.Rag(ws_test, p4_test, learned_policy4, feature_manager=fc) +g_test4.agglomerate(0.5) +seg_test4 = g_test4.get_segmentation() +imio.write_h5_stack(seg_test4, 'example-data/test-seg4.lzf.h5', compression='lzf') + +results = np.vstack(( + ev.split_vi(ws_test, gt_test), + ev.split_vi(seg_test1, gt_test), + ev.split_vi(seg_test4, gt_test) + )) + +np.save('example-data/vi-results.npy', results) + diff --git a/tests/example-data/example.py b/tests/example-data/example.py index 2ed92cc..2330165 100644 --- a/tests/example-data/example.py +++ b/tests/example-data/example.py @@ -1,5 +1,8 @@ +from __future__ import absolute_import +from __future__ import print_function # imports from gala import imio, classify, features, agglo, evaluate as ev +from six.moves import map # read in training data gt_train, pr_train, ws_train = (map(imio.read_h5_stack, @@ -15,7 +18,7 @@ g_train = agglo.Rag(ws_train, pr_train, feature_manager=fc) (X, y, w, merges) = g_train.learn_agglomerate(gt_train, fc)[0] y = y[:, 0] # gala has 3 truth labeling schemes, pick the first one -print(X.shape, y.shape) # standard scikit-learn input format +print((X.shape, y.shape)) # standard scikit-learn input format # train a classifier, scikit-learn syntax rf = classify.DefaultRandomForest().fit(X, y) @@ -35,7 +38,7 @@ g_train4 = agglo.Rag(ws_train, p4_train, feature_manager=fc) (X4, y4, w4, merges4) = g_train4.learn_agglomerate(gt_train, fc)[0] y4 = y4[:, 0] -print(X4.shape, y4.shape) +print((X4.shape, y4.shape)) rf4 = classify.DefaultRandomForest().fit(X4, y4) learned_policy4 = agglo.classifier_probability(fc, rf4) p4_test = imio.read_h5_stack('test-p4.lzf.h5') diff --git a/tests/example-data/rf1-py3.joblib.tar.gz b/tests/example-data/rf1-py3.joblib.tar.gz new file mode 100644 index 0000000..6e99953 Binary files /dev/null and b/tests/example-data/rf1-py3.joblib.tar.gz differ diff --git a/tests/example-data/rf4-py3.joblib.tar.gz b/tests/example-data/rf4-py3.joblib.tar.gz new file mode 100644 index 0000000..cd41706 Binary files /dev/null and b/tests/example-data/rf4-py3.joblib.tar.gz differ diff --git a/tests/test_agglo.py b/tests/test_agglo.py index e6faf4a..dc8ca0d 100644 --- a/tests/test_agglo.py +++ b/tests/test_agglo.py @@ -1,4 +1,7 @@ +from __future__ import absolute_import import os +from six.moves import map +from six.moves import range D = os.path.dirname(os.path.abspath(__file__)) + '/' @@ -9,14 +12,14 @@ from gala import evaluate as ev -test_idxs = range(4) +test_idxs = list(range(4)) num_tests = len(test_idxs) fns = [D + 'toy-data/test-%02i-probabilities.txt' % i for i in test_idxs] -probs = map(np.loadtxt, fns) +probs = list(map(np.loadtxt, fns)) fns = [D + 'toy-data/test-%02i-watershed.txt' % i for i in test_idxs] wss = [np.loadtxt(fn, dtype=np.uint32) for fn in fns] fns = [D + 'toy-data/test-%02i-groundtruth.txt' % i for i in test_idxs] -results = map(np.loadtxt, fns) +results = list(map(np.loadtxt, fns)) landscape = np.array([1,0,1,2,1,3,2,0,2,4,1,0]) diff --git a/tests/test_features.py b/tests/test_features.py index f16ce43..06308be 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -1,14 +1,20 @@ +from __future__ import absolute_import import sys, os -import cPickle as pck +import six.moves.cPickle as pck from copy import deepcopy as copy import numpy as np from numpy.testing import (assert_allclose, assert_approx_equal, assert_equal) +from six.moves import zip rundir = os.path.dirname(__file__) sys.path.append(rundir) + +PYTHON = sys.version_info[0] + + from gala import agglo, features @@ -64,7 +70,11 @@ def run_matched(f, fn, c=1, p = probs1 if c == 1 else probs2 g = agglo.Rag(wss1, p, feature_manager=f) o = list_of_feature_arrays(g, f, edges, merges) - r = pck.load(open(fn, 'r')) + with open(fn, 'rb') as fin: + if PYTHON == 2: + r = pck.load(fin) + else: + r = pck.load(fin, encoding='bytes') assert_equal_lists_or_arrays(o, r) diff --git a/tests/test_gala.py b/tests/test_gala.py index 48630c7..82acf78 100644 --- a/tests/test_gala.py +++ b/tests/test_gala.py @@ -1,8 +1,20 @@ +from __future__ import absolute_import import os +import sys + +PYTHON_VERSION = sys.version_info[0] from numpy.testing import assert_allclose import numpy as np +from sklearn.externals import joblib +import subprocess as sp from gala import imio, classify, features, agglo, evaluate as ev +from six.moves import map + + +def tar_extract(fn): + sp.call(['tar', '-xzf', fn + '.tar.gz']) + rundir = os.path.dirname(__file__) @@ -50,8 +62,13 @@ def test_generate_examples_1_channel(): def test_segment_with_classifer_1_channel(): - rf = classify.load_classifier( + if PYTHON_VERSION == 2: + rf = classify.load_classifier( os.path.join(rundir, 'example-data/rf-1.joblib')) + else: + fn = os.path.join(rundir, 'example-data/rf1-py3.joblib') + tar_extract(fn) + rf = joblib.load(os.path.basename(fn)) learned_policy = agglo.classifier_probability(fc, rf) g_test = agglo.Rag(ws_test, pr_test, learned_policy, feature_manager=fc) g_test.agglomerate(0.5) @@ -73,8 +90,13 @@ def test_generate_examples_4_channel(): def test_segment_with_classifier_4_channel(): - rf = classify.load_classifier( + if PYTHON_VERSION == 2: + rf = classify.load_classifier( os.path.join(rundir, 'example-data/rf-4.joblib')) + else: + fn = os.path.join(rundir, 'example-data/rf4-py3.joblib') + tar_extract(fn) + rf = joblib.load(os.path.basename(fn)) learned_policy = agglo.classifier_probability(fc, rf) g_test = agglo.Rag(ws_test, p4_test, learned_policy, feature_manager=fc) g_test.agglomerate(0.5) diff --git a/tests/test_optimized.py b/tests/test_optimized.py index 987b8fe..19e4079 100644 --- a/tests/test_optimized.py +++ b/tests/test_optimized.py @@ -1,6 +1,8 @@ +from __future__ import absolute_import import numpy as np from numpy.testing import assert_equal from gala import optimized as opt +from six.moves import map def _flood_fill_example(): return np.array([[[0,1,2,5], diff --git a/tests/test_watershed.py b/tests/test_watershed.py index eb1b0a3..939d2ca 100644 --- a/tests/test_watershed.py +++ b/tests/test_watershed.py @@ -1,3 +1,4 @@ +from __future__ import absolute_import import os import time import numpy as np @@ -5,6 +6,9 @@ from numpy.testing import assert_array_equal, assert_array_less from gala import morpho +from six.moves import map +from six.moves import range +from six.moves import zip rundir = os.path.dirname(__file__) @@ -17,14 +21,14 @@ def wrapped(*args, **kwargs): return wrapped -test_idxs = range(4) +test_idxs = list(range(4)) num_tests = len(test_idxs) fns = [os.path.join(rundir, 'toy-data/test-%02i-probabilities.txt' % i) for i in test_idxs] -probs = map(np.loadtxt, fns) +probs = list(map(np.loadtxt, fns)) fns = [os.path.join(rundir, 'toy-data/test-%02i-watershed.txt' % i) for i in test_idxs] -results = map(np.loadtxt, fns) +results = list(map(np.loadtxt, fns)) landscape = np.array([1,0,1,2,1,3,2,0,2,4,1,0]) diff --git a/tests/toy-data/test-04-composite-2channel-12-13.pck b/tests/toy-data/test-04-composite-2channel-12-13.pck index 525f375..a3ec4ff 100644 Binary files a/tests/toy-data/test-04-composite-2channel-12-13.pck and b/tests/toy-data/test-04-composite-2channel-12-13.pck differ diff --git a/tests/toy-data/test-04-histogram-1channel-12-13.pck b/tests/toy-data/test-04-histogram-1channel-12-13.pck index 4eba4f3..f95c6d1 100644 Binary files a/tests/toy-data/test-04-histogram-1channel-12-13.pck and b/tests/toy-data/test-04-histogram-1channel-12-13.pck differ diff --git a/tests/toy-data/test-04-histogram-2channel-12-13.pck b/tests/toy-data/test-04-histogram-2channel-12-13.pck index 55c5071..cab2e35 100644 Binary files a/tests/toy-data/test-04-histogram-2channel-12-13.pck and b/tests/toy-data/test-04-histogram-2channel-12-13.pck differ diff --git a/tests/toy-data/test-04-moments-2channel-12-13.pck b/tests/toy-data/test-04-moments-2channel-12-13.pck index 94ed471..439f4ec 100644 Binary files a/tests/toy-data/test-04-moments-2channel-12-13.pck and b/tests/toy-data/test-04-moments-2channel-12-13.pck differ diff --git a/tests/toy-data/test-04-squiggle-1channel-12-13.pck b/tests/toy-data/test-04-squiggle-1channel-12-13.pck index 3636e64..b60205c 100644 Binary files a/tests/toy-data/test-04-squiggle-1channel-12-13.pck and b/tests/toy-data/test-04-squiggle-1channel-12-13.pck differ