Skip to content
Permalink
Browse files

Add hotkey to set visibility in Annotator

  • Loading branch information
jgraving committed Feb 17, 2020
1 parent 97b4598 commit 91b0f76d379db1c50a4e520a4672c7a84fc3ea0b
Showing with 42 additions and 38 deletions.
  1. +1 −14 README.md
  2. +5 −0 deepposekit/annotate/gui/Annotator.py
  3. +34 −24 deepposekit/annotate/gui/GUI.py
  4. +2 −0 deepposekit/annotate/utils/hotkeys.py
@@ -22,8 +22,6 @@ Localization (without tracking) can also be achieved with deep learning software

[Check out our paper](https://doi.org/10.7554/eLife.47994) to find out more.

**NOTE:** This software is still in early-release development. *Expect some adventures.*

<p align="center">
<img src="https://github.com/jgraving/jgraving.github.io/blob/master/files/images/zebra.gif" height="256px">
<img src="https://github.com/jgraving/jgraving.github.io/blob/master/files/images/locust.gif" height="256px">
@@ -144,17 +142,7 @@ If you use DeepPoseKit for your research please cite [our open-access paper](htt
url={https://doi.org/10.7554/eLife.47994},
}
You can also read [our open-access preprint](http://preprint.deepposekit.org):
@article{graving2019preprint,
title={DeepPoseKit, a software toolkit for fast and robust animal pose estimation using deep learning},
author={Graving, Jacob M and Chae, Daniel and Naik, Hemal and Li, Liang and Koger, Benjamin and Costelloe, Blair R and Couzin, Iain D},
journal={bioRxiv},
pages={620245},
year={2019},
publisher={Cold Spring Harbor Laboratory}
url={https://doi.org/10.1101/620245}
}
You can also read [our open-access preprint](http://preprint.deepposekit.org).
If you use the [imgaug package](https://github.com/aleju/imgaug) for data augmentation, please also consider [citing it](https://github.com/aleju/imgaug/blob/master/README.md#citation).
@@ -163,7 +151,6 @@ If you [use data](https://github.com/jgraving/DeepPoseKit#i-already-have-annotat
Please also consider citing the relevant references for the pose estimation model(s) used in your research, which can be found in the documentation (i.e., [`StackedDenseNet`](http://jakegraving.com/DeepPoseKit/html/deepposekit/models/StackedDenseNet.html#references), [`StackedHourglass`](http://jakegraving.com/DeepPoseKit/html/deepposekit/models/StackedHourglass.html#references), [`DeepLabCut`](http://jakegraving.com/DeepPoseKit/html/deepposekit/models/DeepLabCut.html#references), [`LEAP`](http://jakegraving.com/DeepPoseKit/html/deepposekit/models/LEAP.html#references)).
# News
- **October 2019:** Our paper describing DeepPoseKit is published at eLife! (http://paper.deepposekit.org)
- **September 2019**:
- Nature News covers DeepPoseKit: [Deep learning powers a motion-tracking revolution](http://doi.org/10.1038/d41586-019-02942-5)
@@ -307,6 +307,11 @@ def _data(self):
self.skeleton["annotated"] = False
elif self.key is keys.F:
self.skeleton["annotated"] = True
elif self.key is keys.V:
if self.skeleton.loc[self.idx, ["x", "y"]].isnull()[0]:
self.skeleton.loc[self.idx, ["x", "y"]] = -1
else:
self.skeleton.loc[self.idx, ["x", "y"]] = np.nan
elif self.key in [keys.Q, keys.ESC]:
self._save()
print("Saved")
@@ -324,6 +324,9 @@ def _draw_text(self):
else:
color = self.colors[self.idx] # (34, 87, 255)
border_color = np.bitwise_not(color)
if self.skeleton.loc[self.idx, ["x", "y"]].isnull()[0]:
color = (127, 127, 127)

# color = (3, 255, 118)
thickness = 8

@@ -332,8 +335,11 @@ def _draw_text(self):
color = (254, 79, 48)
else:
color = self.colors[idx] # (34, 87, 255)
if self.skeleton.loc[text_idx, ["x", "y"]].isnull()[0]:
color = (127, 127, 127)
thickness = 2
border_color = (0, 0, 0)


text = self.skeleton.loc[text_idx, "name"]
loc = self.text_locs[(idx + len(self.text_locs) // 4) % len(self.text_locs)]
@@ -447,29 +453,31 @@ def _draw_points(self):
"""
for idx in self.keypoint_index:
if idx != self.idx:
if np.all(self.skeleton.loc[:, "annotated"]):
color = self.colors[idx]
inv_color = (254, 79, 48)
# inv_color = None
else:
# color = (34, 87, 255)
color = self.colors[idx]
inv_color = self.inv_colors[idx]
center = self._get_scaled_coords(idx)
radius = 5
if inv_color is not None:
self._draw_point(center, radius, inv_color, 2)
self._draw_point(center, radius, color)

center = self._get_scaled_coords(self.idx)
radius = 8
# color = (3, 255, 118)
color = self.colors[self.idx]
inv_color = self.inv_colors[self.idx]
self._draw_point(center, radius, inv_color, 2)
self._draw_crosshairs(center, radius + 3, inv_color, 2)
self._draw_point(center, radius, color)
self._draw_crosshairs(center, radius + 3, color, 1)
if not self.skeleton.loc[idx, ["x", "y"]].isnull()[0]:
if np.all(self.skeleton.loc[:, "annotated"]):
color = self.colors[idx]
inv_color = (254, 79, 48)
# inv_color = None
else:
# color = (34, 87, 255)
color = self.colors[idx]
inv_color = self.inv_colors[idx]
center = self._get_scaled_coords(idx)
radius = 5
if inv_color is not None:
self._draw_point(center, radius, inv_color, 2)
self._draw_point(center, radius, color)

if not self.skeleton.loc[self.idx, ["x", "y"]].isnull()[0]:
center = self._get_scaled_coords(self.idx)
radius = 8
# color = (3, 255, 118)
color = self.colors[self.idx]
inv_color = self.inv_colors[self.idx]
self._draw_point(center, radius, inv_color, 2)
self._draw_crosshairs(center, radius + 3, inv_color, 2)
self._draw_point(center, radius, color)
self._draw_crosshairs(center, radius + 3, color, 1)

def _draw_lines(self):
""" Draw lines
@@ -483,7 +491,9 @@ def _draw_lines(self):
color = (34, 87, 255)
for idx in self.keypoint_index:
tree = self.skeleton.loc[idx, "tree"]
if tree >= 0:
if (tree >= 0 and
not self.skeleton.loc[tree, ["x", "y"]].isnull()[0] and
not self.skeleton.loc[idx, ["x", "y"]].isnull()[0]):
pt1 = self._get_scaled_coords(idx)
pt2 = self._get_scaled_coords(tree)
cv2.line(
@@ -20,6 +20,8 @@
A = ord("a")
S = ord("s")
D = ord("d")
V = ord("v")

SPACE = ord(" ")


0 comments on commit 91b0f76

Please sign in to comment.
You can’t perform that action at this time.