/
trackobjects.py
2794 lines (2592 loc) · 128 KB
/
trackobjects.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
from cellprofiler.gui.help import USING_METADATA_HELP_REF, USING_METADATA_GROUPING_HELP_REF, LOADING_IMAGE_SEQ_HELP_REF
LT_NONE = 0
LT_PHASE_1 = 1
LT_SPLIT = 2
LT_MITOSIS = 3
LT_GAP = 4
KM_VEL = 1
KM_NO_VEL = 0
KM_NONE = -1
__doc__ = """
<b>Track Objects</b> allows tracking objects throughout sequential
frames of a series of images, so that from frame to frame
each object maintains a unique identity in the output measurements
<hr>
This module must be placed downstream of a module that identifies objects
(e.g., <b>IdentifyPrimaryObjects</b>). <b>TrackObjects</b> will associate each
object with the same object in the frames before and after. This allows the study
of objects' lineages and the timing and characteristics of dynamic events in
movies.
<p>Images in CellProfiler are processed sequentially by frame (whether loaded as a
series of images or a movie file). To process a collection of images/movies,
you will need to do the following:
<ul>
<li>Define each individual movie using metadata
either contained within the image file itself or as part of the images nomenclature
or folder structure. %(USING_METADATA_HELP_REF)s.</li>
<li>Group the movies to make sure
that each image sequence is handled individually. %(USING_METADATA_GROUPING_HELP_REF)s.
</li>
</ul>
For complete details, see <i>%(LOADING_IMAGE_SEQ_HELP_REF)s</i>.</p>
<p>For an example pipeline using TrackObjects, see the CellProfiler
<a href="http://www.cellprofiler.org/examples.shtml#Tracking">Examples</a> webpage.</p>
<h4>Available measurements</h4>
<b>Object measurements</b>
<ul>
<li><i>Label:</i> Each tracked object is assigned a unique identifier (label).
Results of splits or merges are seen as new objects and assigned a new
label.</li>
<li><i>ParentImageNumber, ParentObjectNumber:</i> The <i>ImageNumber</i> and
<i>ObjectNumber</i> of the parent object in the prior frame. For a split, each
child object will have the label of the object it split from. For a merge,
the child will have the label of the closest parent.</li>
<li><i>TrajectoryX, TrajectoryY:</i> The direction of motion (in x and y coordinates) of the
object from the previous frame to the current frame.</li>
<li><i>DistanceTraveled:</i> The distance traveled by the object from the
previous frame to the current frame (calculated as the magnitude of
the trajectory vectors).</li>
<li><i>Displacement:</i> The shortest distance traveled by the object from its
initial starting position to the position in the current frame. That is, it is
the straight-line path between the two points.</li>
<li><i>IntegratedDistance:</i> The total distance traveled by the object during
the lifetime of the object.</li>
<li><i>Linearity:</i> A measure of how linear the object trajectity is during the
object lifetime. Calculated as (displacement from initial to final
location)/(integrated object distance). Value is in range of [0,1].</li>
<li><i>Lifetime:</i> The number of frames an objects has existed. The lifetime starts
at 1 at the frame when an object appears, and is incremented with each frame that the
object persists. At the final frame of the image set/movie, the
lifetimes of all remaining objects are output.</li>
<li><i>FinalAge:</i> Similar to <i>LifeTime</i> but is only output at the final
frame of the object's life (or the movie ends, whichever comes first). At this point,
the final age of the object is output; no values are stored for earlier frames. This
is useful if you want to plot a histogram of the object lifetimes; all but the final age
can be ignored or filtered out.</li>
<li><i>LinkType:</i> The linking method used to link the object to its parent.
Values are <b>%(LT_NONE)d</b> if the object was not linked to a parent,
<b>%(LT_PHASE_1)d</b> if the object was linked to a parent in the previous frame,
<b>%(LT_SPLIT)d</b> if the object is linked as the start of a split path,
<b>%(LT_GAP)d</b> if the object was linked to a parent in a frame prior to the
previous frame (a gap),
<b>%(LT_MITOSIS)s</b> if the object was linked to its parent as a daughter of
a mitotic pair.</li>
<li><i>MovementModel:</i>The movement model used to track the object.
<b>%(KM_NO_VEL)d</b> if the random motion model was used and <b>%(KM_VEL)d</b>
if a constant-velocity model was used.</li>
<li><i>LinkingDistance:</i>The difference between the propagated position of an
object and the object to which it is matched. A slowly decaying histogram of
these distances indicates that the search radius is large enough.</li>
<li><i>StandardDeviation:</i>The Kalman filter maintains a running estimate
of the variance of the error in estimated position for each model.
This measurement records the linking distance divided by the standard deviation
of the error when linking the object with its parent.
</li>
<li><i>GapLength:</i>The number of frames between an object and its parent.
For instance, an object in frame 3 with a parent in frame 1 has a gap length of
2.</li>
<li><i>GapScore:</i>For an object linked to its parent by bridging a gap,
the score for the gap.</li>
<li><i>SplitScore:</i>For an object linked to its parent via a split, the score
for the split.</li>
<li><i>MergeScore:</i>For an object linked to a child via a merge, the score
for the merge.</li>
<li><i>MitosisScore:</i>For an object linked to two children via a mitosis,
the score for the mitosis.</li>
</ul>
<b>Image measurements</b>
<ul>
<li><i>LostObjectCount:</i> Number of objects that appear in the previous frame
but have no identifiable child in the current frame.</li>
<li><i>NewObjectCount:</i> Number of objects that appear in the current frame but
have no identifiable parent in the previous frame. </li>
<li><i>SplitObjectCount:</i> Number of objects in the current frame that
resulted from a split from a parent object in the previous frame.</li>
<li><i>MergedObjectCount:</i> Number of objects in the current frame that
resulted from the merging of child objects in the previous frame.</li>
</ul>
See also: Any of the <b>Measure</b> modules, <b>IdentifyPrimaryObjects</b>, <b>Groups</b>.
"""%globals()
# CellProfiler is distributed under the GNU General Public License.
# See the accompanying file LICENSE for details.
#
# Copyright (c) 2003-2009 Massachusetts Institute of Technology
# Copyright (c) 2009-2014 Broad Institute
#
# Please see the AUTHORS file for credits.
#
# Website: http://www.cellprofiler.org
import logging
logger = logging.getLogger(__name__)
import numpy as np
import numpy.ma
from scipy.ndimage import distance_transform_edt
import scipy.ndimage
import scipy.sparse
import cellprofiler.cpmodule as cpm
import cellprofiler.cpimage as cpi
import cellprofiler.pipeline as cpp
import cellprofiler.settings as cps
from cellprofiler.settings import YES, NO
import cellprofiler.measurements as cpmeas
import cellprofiler.preferences as cpprefs
from cellprofiler.cpmath.lapjv import lapjv
import cellprofiler.cpmath.filter as cpfilter
from cellprofiler.cpmath.cpmorphology import fixup_scipy_ndimage_result as fix
from cellprofiler.cpmath.cpmorphology import centers_of_labels
from cellprofiler.cpmath.cpmorphology import associate_by_distance
from cellprofiler.cpmath.cpmorphology import all_connected_components
from cellprofiler.cpmath.index import Indexes
from identify import M_LOCATION_CENTER_X, M_LOCATION_CENTER_Y
from cellprofiler.gui.help import HELP_ON_MEASURING_DISTANCES
TM_OVERLAP = 'Overlap'
TM_DISTANCE = 'Distance'
TM_MEASUREMENTS = 'Measurements'
TM_LAP = "LAP"
TM_ALL = [TM_OVERLAP, TM_DISTANCE, TM_MEASUREMENTS,TM_LAP]
DT_COLOR_AND_NUMBER = 'Color and Number'
DT_COLOR_ONLY = 'Color Only'
DT_ALL = [DT_COLOR_AND_NUMBER, DT_COLOR_ONLY]
R_PARENT = "Parent"
F_PREFIX = "TrackObjects"
F_LABEL = "Label"
F_PARENT_OBJECT_NUMBER = "ParentObjectNumber"
F_PARENT_IMAGE_NUMBER = "ParentImageNumber"
F_TRAJECTORY_X = "TrajectoryX"
F_TRAJECTORY_Y = "TrajectoryY"
F_DISTANCE_TRAVELED = "DistanceTraveled"
F_DISPLACEMENT = "Displacement"
F_INTEGRATED_DISTANCE = "IntegratedDistance"
F_LINEARITY = "Linearity"
F_LIFETIME = "Lifetime"
F_FINAL_AGE = "FinalAge"
F_MOVEMENT_MODEL = "MovementModel"
F_LINK_TYPE = "LinkType"
F_LINKING_DISTANCE = "LinkingDistance"
F_STANDARD_DEVIATION = "StandardDeviation"
F_GAP_LENGTH = "GapLength"
F_GAP_SCORE = "GapScore"
F_MERGE_SCORE = "MergeScore"
F_SPLIT_SCORE = "SplitScore"
F_MITOSIS_SCORE = "MitosisScore"
F_KALMAN = "Kalman"
F_STATE = "State"
F_COV = "COV"
F_NOISE = "Noise"
F_VELOCITY_MODEL = "Vel"
F_STATIC_MODEL = "NoVel"
F_X = "X"
F_Y = "Y"
F_VX = "VX"
F_VY = "VY"
F_EXPT_ORIG_NUMTRACKS = "%s_OriginalNumberOfTracks"%F_PREFIX
F_EXPT_FILT_NUMTRACKS = "%s_FilteredNumberOfTracks"%F_PREFIX
def kalman_feature(model, matrix_or_vector, i, j=None):
'''Return the feature name for a Kalman feature
model - model used for Kalman feature: velocity or static
matrix_or_vector - the part of the Kalman state to save, vec, COV or noise
i - the name for the first (or only for vec and noise) index into the vector
j - the name of the second index into the matrix
'''
pieces = [F_KALMAN, model, matrix_or_vector, i]
if j is not None:
pieces.append(j)
return "_".join(pieces)
'''# of objects in the current frame without parents in the previous frame'''
F_NEW_OBJECT_COUNT = "NewObjectCount"
'''# of objects in the previous frame without parents in the new frame'''
F_LOST_OBJECT_COUNT = "LostObjectCount"
'''# of parents that split into more than one child'''
F_SPLIT_COUNT = "SplitObjectCount"
'''# of children that are merged from more than one parent'''
F_MERGE_COUNT = "MergedObjectCount"
'''Object area measurement for LAP method
The final part of the LAP method needs the object area measurement
which is stored using this name.'''
F_AREA = "Area"
F_ALL_COLTYPE_ALL = [(F_LABEL, cpmeas.COLTYPE_INTEGER),
(F_PARENT_OBJECT_NUMBER, cpmeas.COLTYPE_INTEGER),
(F_PARENT_IMAGE_NUMBER, cpmeas.COLTYPE_INTEGER),
(F_TRAJECTORY_X, cpmeas.COLTYPE_INTEGER),
(F_TRAJECTORY_Y, cpmeas.COLTYPE_INTEGER),
(F_DISTANCE_TRAVELED, cpmeas.COLTYPE_FLOAT),
(F_DISPLACEMENT, cpmeas.COLTYPE_FLOAT),
(F_INTEGRATED_DISTANCE, cpmeas.COLTYPE_FLOAT),
(F_LINEARITY, cpmeas.COLTYPE_FLOAT),
(F_LIFETIME, cpmeas.COLTYPE_INTEGER),
(F_FINAL_AGE, cpmeas.COLTYPE_INTEGER)]
F_IMAGE_COLTYPE_ALL = [(F_NEW_OBJECT_COUNT, cpmeas.COLTYPE_INTEGER),
(F_LOST_OBJECT_COUNT, cpmeas.COLTYPE_INTEGER),
(F_SPLIT_COUNT, cpmeas.COLTYPE_INTEGER),
(F_MERGE_COUNT, cpmeas.COLTYPE_INTEGER)]
F_ALL = [feature for feature, coltype in F_ALL_COLTYPE_ALL]
F_IMAGE_ALL = [feature for feature, coltype in F_IMAGE_COLTYPE_ALL]
'''Random motion model, for instance Brownian motion'''
M_RANDOM = "Random"
'''Velocity motion model, object position depends on prior velocity'''
M_VELOCITY = "Velocity"
'''Random and velocity models'''
M_BOTH = "Both"
class TrackObjects(cpm.CPModule):
module_name = 'TrackObjects'
category = "Object Processing"
variable_revision_number = 6
def create_settings(self):
self.tracking_method = cps.Choice(
'Choose a tracking method',
TM_ALL, doc="""
When trying to track an object in an image,
<b>TrackObjects</b> will search within a maximum
specified distance (see the <i>distance within which to search</i> setting)
of the object's location in the previous image, looking for a "match".
Objects that match are assigned the same number, or label, throughout the
entire movie.
There are several options for the method used to find a match. Choose
among these options based on which is most consistent from frame
to frame of your movie.
<ul>
<li><i>%(TM_OVERLAP)s:</i> Compares the amount of spatial overlap between identified objects in
the previous frame with those in the current frame. The object with the
greatest amount of spatial overlap will be assigned the same number (label). Recommended
when there is a high degree of overlap of an object from one frame to the next,
which is the case for movies with high frame rates relative to object motion.</li>
<li><i>%(TM_DISTANCE)s:</i> Compares the distance between each identified
object in the previous frame with that of the current frame. The
closest objects to each other will be assigned the same number (label).
Distances are measured from the perimeter of each object. Recommended
for cases where the objects are not very crowded but where <i>Overlap</i>
does not work sufficiently well, which is the case
for movies with low frame rates relative to object motion.</li>
<li><i>%(TM_MEASUREMENTS)s:</i> Compares each object in the
current frame with objects in the previous frame based on a particular
feature you have measured for the objects (for example, a particular intensity or shape measurement that can distinguish nearby objects). The object
with the closest-matching measurement will be selected as a match and will be
assigned the same number (label). This selection requires that you run the
specified <b>Measure</b> module previous to this module in the pipeline so
that the measurement values can be used to track the objects.</li>
<li><i>%(TM_LAP)s:</i> Uses the linear assignment problem (LAP) framework. The
linear assignment problem (LAP) algorithm (<i>Jaqaman et al., 2008</i>)
addresses the challenges of high object density, motion heterogeneity,
temporary disappearances, and object merging and splitting.
The algorithm first links objects between consecutive frames and then links
the resulting partial trajectories into complete trajectories. Both steps are formulated
as global combinatorial optimization problems whose solution identifies the overall
most likely set of object trajectories throughout a movie.
Tracks are constructed from an image sequence by detecting objects in each
frame and linking objects between consecutive frames as a first step. This step alone
may result in incompletely tracked objects due to the appearance and disappearance
of objects, either in reality or apparently because of noise and imaging limitations.
To correct this, you may apply an optional second step which closes temporal gaps
between tracked objects and captures merging and splitting events. This step takes
place at the end of the analysis run.
<b>References</b>
<ul>
<li>Jaqaman K, Loerke D, Mettlen M, Kuwata H, Grinstein S, Schmid SL, Danuser G. (2008)
"Robust single-particle tracking in live-cell time-lapse sequences."
<i>Nature Methods</i> 5(8),695-702.
<a href="http://dx.doi.org/10.1038/nmeth.1237">(link)</a></li>
<li>Jaqaman K, Danuser G. (2009) "Computational image analysis of cellular dynamics:
a case study based on particle tracking." Cold Spring Harb Protoc. 2009(12):pdb.top65.
<a href="http://dx.doi.org/10.1101/pdb.top65">(link)</a></li>
</ul>
</li>
</ul>"""%globals())
self.object_name = cps.ObjectNameSubscriber(
'Select the objects to track',cps.NONE, doc="""
Select the objects to be tracked by this module.""")
self.measurement = cps.Measurement(
'Select object measurement to use for tracking',
lambda : self.object_name.value, doc="""
<i>(Used only if Measurements is the tracking method)</i><br>
Select which type of measurement (category) and which specific feature from the
<b>Measure</b> module will be used for tracking. Select the feature name from
the popup box or see each <b>Measure</b> module's help for the list of
the features measured by that module. If necessary, you will also be asked
to specify additional details such as the
image from which the measurements originated or the measurement scale.""")
self.pixel_radius = cps.Integer(
'Maximum pixel distance to consider matches',50,minval=1,doc="""
Objects in the subsequent frame will be considered potential matches if
they are within this distance. To determine a suitable pixel distance, you can look
at the axis increments on each image (shown in pixel units) or
use the distance measurement tool. %(HELP_ON_MEASURING_DISTANCES)s"""%globals())
self.model = cps.Choice(
"Select the motion model",[M_RANDOM, M_VELOCITY, M_BOTH], value=M_BOTH,doc = """
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i><br>
This setting controls how to predict an object's position in
the next frame, assuming that each object moves randomly with
a frame-to-frame variance in position that follows a Gaussian
distribution.<br>
<ul>
<li><i>%(M_RANDOM)s:</i> A model in which objects move due to
Brownian Motion or a similar process where the variance in position
differs between objects. Use this model if the objects move with some
random jitter around a stationary location.</li>
<li><i>%(M_VELOCITY)s:</i> A model in which the object moves with
a velocity. Both velocity and position (after correcting for
velocity) vary following a Gaussian distribution. Use this model if
the objects move along a spatial trajectory in some direction over time.</li>
<li><i>%(M_BOTH)s:</i> <b>TrackObjects</b> will predict each
object's position using both models and use the model with the
lowest penalty to join an object in one frame with one in another. Use this
option if both models above are applicable over time.
</li>
</ul>""" % globals())
self.radius_std = cps.Float(
'Number of standard deviations for search radius', 3, minval=1,doc = """
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i>
<br>
<b>TrackObjects</b> will estimate the variance of the error
between the observed and predicted positions of an object for
each movement model. It will constrain the search for matching
objects from one frame to the next to the standard deviation
of the error times the number of standard
deviations that you enter here."""%globals())
self.radius_limit = cps.FloatRange(
'Search radius limit, in pixel units (Min,Max)', (2, 10), minval = 0,doc = """
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i><br>
<b>Care must be taken to adjust the upper limit appropriate to the data.</b><br>
<b>TrackObjects</b> derives a search radius based on the error
estimation. Potentially, the module can make an erroneous assignment
with a large error, leading to a large estimated error for
the object in the next frame. Conversely, the module can arrive
at a small estimated error by chance, leading to a maximum radius
that does not track the object in a subsequent frame. The radius
limit constrains the maximum radius to reasonable values.
<p>The lower limit should be set to a radius (in pixels) that is a
reasonable displacement for any object from one frame to the next.
The upper limit should be set to the maximum reasonable
displacement under any circumstances.</p>"""%globals())
self.wants_second_phase = cps.Binary(
"Run the second phase of the LAP algorithm?", True, doc="""
<i>(Used only if the %(TM_LAP)s tracking method is applied)</i><br>
Select <i>%(YES)s</i> to run the second phase of the LAP algorithm
after processing all images. Select <i>%(NO)s</i> to omit the
second phase or to perform the second phase when running the module
as a data tool.
<p>Since object tracks may start and end not only because of the true appearance
and disappearance of objects, but also because of apparent disappearances due
to noise and limitations in imaging, you may want to run the second phase
which attempts to close temporal gaps between tracked objects and tries to
capture merging and splitting events.</p>
<p>For additional details on optimizing the LAP settings, refer to Jaqaman K, Danuser G.
"Computational image analysis of cellular dynamics: a case study based on particle
tracking." <i>Cold Spring Harb Protocols</i> 2009(12)
(<a href="http://cshprotocols.cshlp.org/cgi/content/full/2009/12/pdb.top65">link</a>),
in particular the section "Adjustment of control parameters and
diagnostics for track evaluation."</p>"""%globals())
self.gap_cost = cps.Integer(
'Gap cost', 40, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
This setting assigns a cost to keeping a gap caused
when an object is missing from one of the frames of a track (the
alternative to keeping the gap is to bridge it by connecting
the tracks on either side of the missing frames).
The cost of bridging a gap is the distance, in pixels, of the
displacement of the object between frames.
<p><i><b>Recommendations:</b></i>
<ul>
<li>Set the gap cost higher if tracks from objects in previous
frames are being erroneously joined, across a gap, to tracks from
objects in subsequent frames. </li>
<li>Set the cost lower if tracks
are not properly joined due to gaps caused by mis-segmentation.</li>
</ul></p>'''%globals())
self.split_cost = cps.Integer(
'Split alternative cost', 40, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
This setting is the cost of keeping two tracks distinct
when the alternative is to make them into one track that
splits. A split occurs when an object in one frame is assigned
to the same track as two objects in a subsequent frame.
The split cost takes two components into account:
<ul>
<li>The area of the split object relative to the area of
the resulting objects.</li>
<li>The displacement of the resulting
objects relative to the position of the original object.</li>
</ul>
The split cost is roughly measured in pixels. The split alternative cost is
(conceptually) subtracted from the cost of making the split.
<p><i><b>Recommendations:</b></i>
<ul>
<li>The split cost should be set lower if objects are being split
that should not be split. </li>
<li>The split cost should be set higher if objects
that should be split are not.</li>
</ul></p>'''%globals())
self.merge_cost = cps.Integer(
'Merge alternative cost', 40, minval=1,doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
This setting is the cost of keeping two tracks
distinct when the alternative is to merge them into one.
A merge occurs when two objects in one frame are assigned to
the same track as a single object in a subsequent frame.
The merge score takes two components into account:
<ul>
<li>The area of the two objects
to be merged relative to the area of the resulting objects.</li>
<li>The displacement of the original objects relative to the final
object. </li>
</ul>
The merge cost is measured in pixels. The merge
alternative cost is (conceptually) subtracted from the
cost of making the merge.
<p><i><b>Recommendations:</b></i>
<ul>
<li>Set the merge alternative cost lower if objects are being
merged when they should otherwise be kept separate. </li>
<li>Set the merge alternative cost
higher if objects that are not merged should be merged.</li>
</ul></p>'''%globals())
self.mitosis_cost = cps.Integer(
'Mitosis alternative cost', 80, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the
second phase is run)</i><br>
This setting is the cost of not linking a parent and two daughters
via the mitosis model. the %(TM_LAP)s tracking method weighs this
cost against the score of a potential mitosis. The model expects
the daughters to be equidistant from the parent after mitosis,
so the parent location is expected to be midway between the daughters.
In addition, the model expects the daughters' areas to be equal
to the parent's area. The mitosis score is the distance error
of the parent times the area inequality ratio of the parent and
daughters (the larger of Area(daughters) / Area(parent) and
Area(parent) / Area(daughters)).<br>
An accepted mitosis closes two gaps, so all things being equal,
the mitosis alternative cost should be approximately double the
gap closing cost.
Increase the mitosis alternative cost to favor more mitoses
and decrease it to prevent more mitoses candidates from being
accepted.''')
self.mitosis_max_distance = cps.Integer(
'Mitosis max distance', 40, minval=1, doc= '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the
second phase is run)</i><br>
This setting is the maximum allowed distance in pixels of either of
the daughter candidates after mitosis from the parent candidate.
''')
self.max_gap_score = cps.Integer(
'Maximum gap displacement, in frames', 5, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
This setting acts as a filter for unreasonably large
displacements during the second phase.
<p><i><b>Recommendations:</b></i>
<ul>
<li>The maximum gap displacement should be set to roughly
the maximum displacement of an object's center from frame to frame. An object that makes large
frame-to-frame jumps should have a higher value for this setting than one that only moves slightly.</li>
<li>Be aware that the LAP algorithm will run more slowly with a higher maximum gap displacement
value, since the higher this value, the more objects that must be compared at each step.</li>
<li>Objects that would have been tracked between successive frames for a lower maximum displacement
may not be tracked if the value is set higher.</li>
</ul></p>'''%globals())
self.max_merge_score = cps.Integer(
'Maximum merge score', 50, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
This setting acts as a filter for unreasonably large
merge scores. The merge score has two components:
<ul>
<li>The area of the resulting merged object relative to the area of the
two objects to be merged.</li>
<li>The distances between the objects to be merged and the resulting object. </li>
</ul>
<p><i><b>Recommendations:</b></i>
<ul>
<li>The LAP algorithm will run more slowly with a higher maximum merge score value. </li>
<li>Objects that would have been merged at a lower maximum merge score will not be considered for merging.</li>
</ul></p>'''%globals())
self.max_split_score = cps.Integer(
'Maximum split score', 50, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
This setting acts as a filter for unreasonably large
split scores. The split score has two components:
<ul>
<li>The area of the initial object relative to the area of the
two objects resulting from the split.</li>
<li>The distances between the original and resulting objects. </li>
</ul>
<p><i><b>Recommendations:</b></i>
<ul>
<li>The LAP algorithm will run more slowly with a maximum split score value. </li>
<li>Objects that would have been split at a lower maximum split score will not be considered for splitting.</li>
</ul></p>
'''%globals())
self.max_frame_distance = cps.Integer(
'Maximum gap', 5, minval=1, doc = '''
<i>(Used only if the %(TM_LAP)s tracking method is applied and the second phase is run)</i><br>
<b>Care must be taken to adjust this setting appropriate to the data.</b><br>
This setting controls the maximum number of frames that can
be skipped when merging a gap caused by an unsegmented object.
These gaps occur when an image is mis-segmented and identification
fails to find an object in one or more frames.
<p><i><b>Recommendations:</b></i>
<ul>
<li>Set the maximum gap higher in order to have more chance of correctly recapturing an object after
erroneously losing the original for a few frames.</li>
<li>Set the maximum gap lower to reduce the chance of erroneously connecting to the wrong object after
correctly losing the original object (e.g., if the cell dies or moves off-screen).</li>
</ul></p>'''%globals())
self.wants_lifetime_filtering = cps.Binary(
'Filter objects by lifetime?', False, doc = '''
Select <i>%(YES)s</i> if you want objects to be filtered by their
lifetime, i.e., total duration in frames. This is useful for
marking objects which transiently appear and disappear, such
as the results of a mis-segmentation. <br>
<p><i><b>Recommendations:</b></i>
<ul>
<li>This operation does not actually delete the filtered object,
but merely removes its label from the tracked object list;
the filtered object's per-object measurements are retained.</li>
<li>An object can be filtered only if it is tracked as an unique object.
Splits continue the lifetime count from their parents, so the minimum
lifetime value does not apply to them.</li>
</ul></p>'''%globals())
self.wants_minimum_lifetime = cps.Binary(
'Filter using a minimum lifetime?', True, doc = '''
<i>(Used only if objects are filtered by lifetime)</i><br>
Select <i>%(YES)s</i> to filter the object on the basis of a minimum number of frames.'''%globals())
self.min_lifetime = cps.Integer(
'Minimum lifetime', 1, minval=1,doc="""
Enter the minimum number of frames an object is permitted to persist. Objects
which last this number of frames or lower are filtered out.""")
self.wants_maximum_lifetime = cps.Binary(
'Filter using a maximum lifetime?', False, doc = '''
<i>(Used only if objects are filtered by lifetime)</i><br>
Select <i>%(YES)s</i> to filter the object on the basis of a maximum number of frames.'''%globals())
self.max_lifetime = cps.Integer(
'Maximum lifetime', 100, doc="""
Enter the maximum number of frames an object is permitted to persist. Objects
which last this number of frames or more are filtered out.""")
self.display_type = cps.Choice(
'Select display option', DT_ALL, doc="""
The output image can be saved as:
<ul>
<li><i>%(DT_COLOR_ONLY)s:</i> A color-labeled image, with each tracked
object assigned a unique color</li>
<li><i>%(DT_COLOR_AND_NUMBER)s:</i> Same as above but with the tracked object
number superimposed.</li>
</ul>"""%globals())
self.wants_image = cps.Binary(
"Save color-coded image?", False, doc="""
Select <i>%(YES)s</i> to retain the image showing the tracked objects
for later use in the pipeline. For example, a common use is for quality control purposes
saving the image with the <b>SaveImages</b> module.
<p>Please note that if you are using the second phase of the %(TM_LAP)s method,
the final labels are not assigned until <i>after</i> the pipeline has
completed the analysis run. That means that saving the color-coded image
will only show the penultimate result and not the final product.</p>."""%globals())
self.image_name = cps.ImageNameProvider(
"Name the output image", "TrackedCells", doc = '''
<i>(Used only if saving the color-coded image)</i><br>
Enter a name to give the color-coded image of tracked labels.''')
def settings(self):
return [self.tracking_method, self.object_name, self.measurement,
self.pixel_radius, self.display_type, self.wants_image,
self.image_name, self.model,
self.radius_std, self.radius_limit,
self.wants_second_phase,
self.gap_cost, self.split_cost, self.merge_cost,
self.max_gap_score, self.max_split_score,
self.max_merge_score, self.max_frame_distance,
self.wants_lifetime_filtering, self.wants_minimum_lifetime,
self.min_lifetime, self.wants_maximum_lifetime,
self.max_lifetime, self.mitosis_cost, self.mitosis_max_distance]
def validate_module(self, pipeline):
'''Make sure that the user has selected some limits when filtering'''
if (self.tracking_method == TM_LAP and
self.wants_lifetime_filtering.value and
(self.wants_minimum_lifetime.value == False and self.wants_minimum_lifetime.value == False) ):
raise cps.ValidationError(
'Please enter a minimum and/or maximum lifetime limit',
self.wants_lifetime_filtering)
def visible_settings(self):
result = [self.tracking_method, self.object_name]
if self.tracking_method == TM_MEASUREMENTS:
result += [ self.measurement]
if self.tracking_method == TM_LAP:
result += [self.model, self.radius_std, self.radius_limit]
result += [self.wants_second_phase]
if self.wants_second_phase:
result += [
self.gap_cost, self.split_cost, self.merge_cost,
self.mitosis_cost,
self.max_gap_score, self.max_split_score,
self.max_merge_score, self.max_frame_distance,
self.mitosis_max_distance]
else:
result += [self.pixel_radius]
result += [ self.wants_lifetime_filtering]
if self.wants_lifetime_filtering:
result += [ self.wants_minimum_lifetime ]
if self.wants_minimum_lifetime:
result += [ self.min_lifetime ]
result += [ self.wants_maximum_lifetime ]
if self.wants_maximum_lifetime:
result += [ self.max_lifetime ]
result +=[ self.display_type, self.wants_image]
if self.wants_image.value:
result += [self.image_name]
return result
@property
def static_model(self):
return self.model in (M_RANDOM, M_BOTH)
@property
def velocity_model(self):
return self.model in (M_VELOCITY, M_BOTH)
def get_ws_dictionary(self, workspace):
return self.get_dictionary(workspace.image_set_list)
def __get(self, field, workspace, default):
if self.get_ws_dictionary(workspace).has_key(field):
return self.get_ws_dictionary(workspace)[field]
return default
def __set(self, field, workspace, value):
self.get_ws_dictionary(workspace)[field] = value
def get_group_image_numbers(self, workspace):
m = workspace.measurements
assert isinstance(m, cpmeas.Measurements)
d = self.get_ws_dictionary(workspace)
group_number = m.get_group_number()
if not d.has_key("group_number") or d["group_number"] != group_number:
d["group_number"] = group_number
group_indexes = np.array([
(m.get_measurement(cpmeas.IMAGE, cpmeas.GROUP_INDEX, i), i)
for i in m.get_image_numbers()
if m.get_measurement(cpmeas.IMAGE, cpmeas.GROUP_NUMBER, i) ==
group_number], int)
order = np.lexsort([group_indexes[:, 0]])
d["group_image_numbers"] = group_indexes[order, 1]
return d["group_image_numbers"]
def get_saved_measurements(self, workspace):
return self.__get("measurements", workspace, np.array([], float))
def set_saved_measurements(self, workspace, value):
self.__set("measurements", workspace, value)
def get_saved_coordinates(self, workspace):
return self.__get("coordinates", workspace, np.zeros((2,0), int))
def set_saved_coordinates(self, workspace, value):
self.__set("coordinates", workspace, value)
def get_orig_coordinates(self, workspace):
'''The coordinates of the first occurrence of an object's ancestor'''
return self.__get("orig coordinates", workspace, np.zeros((2,0), int))
def set_orig_coordinates(self, workspace, value):
self.__set("orig coordinates", workspace, value)
def get_saved_labels(self, workspace):
return self.__get("labels", workspace, None)
def set_saved_labels(self, workspace, value):
self.__set("labels", workspace, value)
def get_saved_object_numbers(self, workspace):
return self.__get("object_numbers", workspace, np.array([], int))
def set_saved_object_numbers(self, workspace, value):
return self.__set("object_numbers", workspace, value)
def get_saved_ages(self, workspace):
return self.__get("ages", workspace, np.array([], int))
def set_saved_ages(self, workspace, values):
self.__set("ages", workspace, values)
def get_saved_distances(self, workspace):
return self.__get("distances", workspace, np.zeros((0,)))
def set_saved_distances(self, workspace, values):
self.__set("distances", workspace, values)
def get_max_object_number(self, workspace):
return self.__get("max_object_number", workspace, 0)
def set_max_object_number(self, workspace, value):
self.__set("max_object_number", workspace, value)
def get_kalman_states(self, workspace):
return self.__get("kalman_states", workspace, None)
def set_kalman_states(self, workspace, value):
self.__set("kalman_states", workspace, value)
def prepare_group(self, workspace, grouping, image_numbers):
'''Erase any tracking information at the start of a run'''
d = self.get_dictionary(workspace.image_set_list)
d.clear()
return True
def measurement_name(self, feature):
'''Return a measurement name for the given feature'''
if self.tracking_method == TM_LAP:
return "%s_%s" % (F_PREFIX, feature)
return "%s_%s_%s" % (F_PREFIX, feature, str(self.pixel_radius.value))
def image_measurement_name(self, feature):
'''Return a measurement name for an image measurement'''
if self.tracking_method == TM_LAP:
return "%s_%s_%s" % (F_PREFIX, feature, self.object_name.value)
return "%s_%s_%s_%s" % (F_PREFIX, feature, self.object_name.value,
str(self.pixel_radius.value))
def add_measurement(self, workspace, feature, values):
'''Add a measurement to the workspace's measurements
workspace - current image set's workspace
feature - name of feature being measured
values - one value per object
'''
workspace.measurements.add_measurement(
self.object_name.value,
self.measurement_name(feature),
values)
def add_image_measurement(self, workspace, feature, value):
measurement_name = self.image_measurement_name(feature)
workspace.measurements.add_image_measurement(measurement_name, value)
def run(self, workspace):
objects = workspace.object_set.get_objects(self.object_name.value)
if self.tracking_method == TM_DISTANCE:
self.run_distance(workspace, objects)
elif self.tracking_method == TM_OVERLAP:
self.run_overlap(workspace, objects)
elif self.tracking_method == TM_MEASUREMENTS:
self.run_measurements(workspace, objects)
elif self.tracking_method == TM_LAP:
self.run_lapdistance(workspace, objects)
else:
raise NotImplementedError("Unimplemented tracking method: %s" %
self.tracking_method.value)
if self.wants_image.value:
import matplotlib.figure
import matplotlib.axes
import matplotlib.backends.backend_agg
import matplotlib.transforms
from cellprofiler.gui.cpfigure_tools import figure_to_image, only_display_image
figure = matplotlib.figure.Figure()
canvas = matplotlib.backends.backend_agg.FigureCanvasAgg(figure)
ax = figure.add_subplot(1,1,1)
self.draw(objects.segmented, ax,
self.get_saved_object_numbers(workspace))
#
# This is the recipe for just showing the axis
#
only_display_image(figure, objects.segmented.shape)
image_pixels = figure_to_image(figure, dpi=figure.dpi)
image = cpi.Image(image_pixels)
workspace.image_set.add(self.image_name.value, image)
if self.show_window:
workspace.display_data.labels = objects.segmented
workspace.display_data.object_numbers = \
self.get_saved_object_numbers(workspace)
def display(self, workspace, figure):
if hasattr(workspace.display_data, "labels"):
figure.set_subplots((1, 1))
subfigure = figure.figure
subfigure.clf()
ax = subfigure.add_subplot(1,1,1)
self.draw(workspace.display_data.labels, ax,
workspace.display_data.object_numbers)
else:
# We get here after running as a data tool
figure.figure.text(.5, .5, "Analysis complete",
ha="center", va="center")
def draw(self, labels, ax, object_numbers):
import matplotlib
indexer = np.zeros(len(object_numbers)+1,int)
indexer[1:] = object_numbers
#
# We want to keep the colors stable, but we also want the
# largest possible separation between adjacent colors. So, here
# we reverse the significance of the bits in the indices so
# that adjacent number (e.g. 0 and 1) differ by 128, roughly
#
pow_of_2 = 2**np.mgrid[0:8,0:len(indexer)][0]
bits = (indexer & pow_of_2).astype(bool)
indexer = np.sum(bits.transpose() * (2 ** np.arange(7,-1,-1)), 1)
recolored_labels = indexer[labels]
cm = matplotlib.cm.get_cmap(cpprefs.get_default_colormap())
cm.set_bad((0,0,0))
norm = matplotlib.colors.BoundaryNorm(range(256), 256)
img = ax.imshow(numpy.ma.array(recolored_labels, mask=(labels==0)),
cmap=cm, norm=norm)
if self.display_type == DT_COLOR_AND_NUMBER:
i,j = centers_of_labels(labels)
for n, x, y in zip(object_numbers, j, i):
if np.isnan(x) or np.isnan(y):
# This happens if there are missing labels
continue
ax.annotate(str(n), xy=(x,y),color='white',
arrowprops=dict(visible=False))
def run_distance(self, workspace, objects):
'''Track objects based on distance'''
old_i, old_j = self.get_saved_coordinates(workspace)
if len(old_i):
distances, (i,j) = distance_transform_edt(objects.segmented == 0,
return_indices=True)
#
# Look up the coordinates of the nearest new object (given by
# the transform i,j), then look up the label at that coordinate
# (objects.segmented[#,#])
#
new_object_numbers = objects.segmented[i[old_i, old_j],
j[old_i, old_j]]
#
# Mask out any objects at too great of a distance
#
new_object_numbers[distances[old_i, old_j] >
self.pixel_radius.value] = 0
#
# Do the same with the new centers and old objects
#
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
old_labels = self.get_saved_labels(workspace)
distances, (old_i,old_j) = distance_transform_edt(
old_labels == 0,
return_indices=True)
old_object_numbers = old_labels[old_i[i, j],
old_j[i, j]]
old_object_numbers[distances[i, j] > self.pixel_radius.value] = 0
self.map_objects(workspace,
new_object_numbers,
old_object_numbers,
i,j)
else:
i,j = (centers_of_labels(objects.segmented)+.5).astype(int)
count = len(i)
self.map_objects(workspace, np.zeros((0,),int),
np.zeros(count,int), i,j)
self.set_saved_labels(workspace, objects.segmented)
def run_lapdistance(self, workspace, objects):
'''Track objects based on distance'''
m = workspace.measurements
old_i, old_j = self.get_saved_coordinates(workspace)
n_old = len(old_i)
#
# Automatically set the cost of birth and death above
# that of the largest allowable cost.
#
costBorn = costDie = self.radius_limit.max * 1.10
kalman_states = self.get_kalman_states(workspace)
if kalman_states == None:
if self.static_model:
kalman_states = [ cpfilter.static_kalman_model()]
else:
kalman_states = []
if self.velocity_model:
kalman_states.append(cpfilter.velocity_kalman_model())
areas = fix(scipy.ndimage.sum(
np.ones(objects.segmented.shape), objects.segmented,
np.arange(1, np.max(objects.segmented) + 1,dtype=np.int32)))
areas = areas.astype(int)
model_types = np.array(
[m for m, s in ((KM_NO_VEL, self.static_model),
(KM_VEL, self.velocity_model)) if s], int)
if n_old > 0:
new_i, new_j = centers_of_labels(objects.segmented)
n_new = len(new_i)
i,j = np.mgrid[0:n_old, 0:n_new]
##############################
#
# Kalman filter prediction
#
#
# We take the lowest cost among all possible models
#
minDist = np.ones((n_old, n_new)) * self.radius_limit.max
d = np.ones((n_old, n_new)) * np.inf
sd = np.zeros((n_old, n_new))
# The index of the Kalman filter used: -1 means not used
kalman_used = -np.ones((n_old, n_new), int)
for nkalman, kalman_state in enumerate(kalman_states):
assert isinstance(kalman_state, cpfilter.KalmanState)
obs = kalman_state.predicted_obs_vec
dk = np.sqrt((obs[i,0] - new_i[j])**2 +
(obs[i,1] - new_j[j])**2)
noise_sd = np.sqrt(np.sum(kalman_state.noise_var[:,0:2], 1))
radius = np.maximum(np.minimum(noise_sd * self.radius_std.value,
self.radius_limit.max),
self.radius_limit.min)
is_best = ((dk < d) & (dk < radius[:, np.newaxis]))
d[is_best] = dk[is_best]
minDist[is_best] = radius[i][is_best]
kalman_used[is_best] = nkalman
minDist = np.maximum(np.minimum(minDist, self.radius_limit.max),
self.radius_limit.min)
#
#############################
#
# Linear assignment setup
#
n = len(old_i)+len(new_i)