Skip to content

Commit

Permalink
demo.py - first step towards integration (single threaded)
Browse files Browse the repository at this point in the history
  • Loading branch information
martind committed Mar 2, 2015
1 parent 416670d commit abcff79
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 20 deletions.
3 changes: 2 additions & 1 deletion README.md
Expand Up @@ -71,4 +71,5 @@ ARDrone2. You can use this script also for detail verbose dump of recorded
* apyros folder --- logging tools, shared among several Python driven robots.
This folder is planned be moved into separate repository.


* demo.py --- integration of image processing with flight control (work in
progress)
30 changes: 20 additions & 10 deletions capdet.py
Expand Up @@ -32,6 +32,7 @@ def detectTwoColors( img, capColors ):

cmpRG = imgR > imgG
contours, hierarchy = cv2.findContours( binary, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE )
result = []
for cnt in contours:
area = cv2.contourArea(cnt, oriented=True)
if area < 0:
Expand All @@ -41,29 +42,38 @@ def detectTwoColors( img, capColors ):
mask = maskTmp > 0
orange = np.count_nonzero(np.logical_and( mask, cmpRG ))
# print abs(area), orange, orange/float(abs(area))
return imgResult
frac = orange/float(abs(area))
if abs(area) > 20 and (0.4 < frac < 0.7):
M = cv2.moments(cnt)
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
result.append( ((cx,cy), abs(area), frac) )

return imgResult, result


def onmouse(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
col = img[y][x]
print col
capColors.append( col )
cv2.imshow('result', detectTwoColors(img, capColors))

cv2.imshow('result', detectTwoColors(img, capColors)[0])

if __name__ == "__main__":
if len(sys.argv) < 2:
print __doc__
sys.exit(2)

def loadColors( filename ):
capColors = []
for line in open(sys.argv[1]):
for line in open( filename ):
line = line.split('#')[0]
line = line.replace(',',' ').translate(None,"[]")
if len(line.split()) == 3:
capColors.append( [int(x) for x in line.split()] )
return capColors

if __name__ == "__main__":
if len(sys.argv) < 2:
print __doc__
sys.exit(2)

capColors = loadColors( sys.argv[1] )
filename = sys.argv[2]

# convert navdata to video if necessary
Expand All @@ -82,7 +92,7 @@ def onmouse(event, x, y, flags, param):
pause = 0
while ret:
cv2.imshow('image', img)
cv2.imshow('result', detectTwoColors( img, capColors ))
cv2.imshow('result', detectTwoColors( img, capColors )[0])
c = cv2.waitKey( pause )
if c == 27: # ESC
break
Expand Down
69 changes: 69 additions & 0 deletions demo.py
@@ -0,0 +1,69 @@
#!/usr/bin/python
"""
ARDrone3 demo with autonomous navigation to two color Parrot Cap
usage:
./demo.py <task> [<metalog> [<F>]]
"""
import sys
import cv2

from bebop import Bebop
from video import VideoFrames
from capdet import detectTwoColors, loadColors

# this will be in new separate repository as common library fo robotika Python-powered robots
from apyros.metalog import MetaLog, disableAsserts
from apyros.manual import myKbhit, ManualControlException

TMP_VIDEO_FILE = "video.bin"


g_vf = None

def videoCallback( data ):
global g_vf
g_vf.append( data )
frame = g_vf.getFrame()
if frame:
print "Video", len(frame)
# workaround for a single frame
f = open( TMP_VIDEO_FILE, "wb" )
f.write( frame )
f.close()
cap = cv2.VideoCapture( TMP_VIDEO_FILE )
ret, img = cap.read()
cap.release()
if ret:
img, detected = detectTwoColors( img, loadColors("cap-colors.txt") )
print "Detected", detected
# cv2.imshow('image', img)
# key = cv2.waitKey(10)



def demo( drone ):
print "Follow 2-color cap ..."
global g_vf
g_vf = VideoFrames( onlyIFrames=True, verbose=False )
drone.videoCbk = videoCallback
drone.videoEnable()
for i in xrange(100):
print i,
drone.update( cmd=None )

if __name__ == "__main__":
if len(sys.argv) < 2:
print __doc__
sys.exit(2)
metalog=None
if len(sys.argv) > 2:
metalog = MetaLog( filename=sys.argv[2] )
if len(sys.argv) > 3 and sys.argv[3] == 'F':
disableAsserts()

drone = Bebop( metalog=metalog )
demo( drone )
print "Battery:", drone.battery

# vim: expandtab sw=4 ts=4

27 changes: 18 additions & 9 deletions video.py
Expand Up @@ -10,9 +10,11 @@
from navdata import cutPacket,videoAckRequired

class VideoFrames:
def __init__( self ):
def __init__( self, onlyIFrames=False, verbose=True ):
self.onlyIFrames = onlyIFrames
self.verbose = verbose
self.currentFrameNumber = None
self.parts = []
self.parts = None
self.frames = []

def append( self, packet ):
Expand All @@ -21,20 +23,27 @@ def append( self, packet ):
frameNumber, frameFlags, fragmentNumber, fragmentsPerFrame = struct.unpack("<HBBB", packet[7:12])
data = packet[12:]
if frameNumber != self.currentFrameNumber:
if self.currentFrameNumber is not None:
if self.currentFrameNumber is not None and self.parts is not None:
s = ""
for i,d in enumerate(self.parts):
if d is None:
print (self.currentFrameNumber, i, len(self.parts))
if self.verbose:
print (self.currentFrameNumber, i, len(self.parts))
continue
s += d
self.frames.append( s )
print "processing", frameNumber
if self.verbose:
print "processing", frameNumber
self.currentFrameNumber = frameNumber
self.parts = [None]*fragmentsPerFrame
if self.parts[ fragmentNumber ] is not None:
print "duplicity", (frameNumber, fragmentNumber)
self.parts[ fragmentNumber ] = data
if self.onlyIFrames and frameFlags != 1:
self.parts = None
else:
self.parts = [None]*fragmentsPerFrame
if not self.onlyIFrames or frameFlags == 1:
if self.parts[ fragmentNumber ] is not None:
if self.verbose:
print "duplicity", (frameNumber, fragmentNumber)
self.parts[ fragmentNumber ] = data

def getFrame( self ):
if len(self.frames) == 0:
Expand Down

0 comments on commit abcff79

Please sign in to comment.