Skip to content

Commit

Permalink
Added background subtraction to tracker, and adjusted some parameters…
Browse files Browse the repository at this point in the history
…. (Still a little ad hoc, but not terribly so.) Tracking performance has improved. Added a couple of more tests.
  • Loading branch information
Nathaniel M Guy committed May 30, 2015
1 parent 362f016 commit 7338155
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 40 deletions.
42 changes: 42 additions & 0 deletions tests & playing around/background_subtract_test.py
@@ -0,0 +1,42 @@
import cv2
import numpy as np

# Open up a video from which to get frames
cap = cv2.VideoCapture('juggling.mp4')

fgbg = cv2.createBackgroundSubtractorMOG2()

while True:

# Read a frame
_, frame = cap.read()

fgmask = fgbg.apply(frame)
res = cv2.bitwise_and(frame,frame,mask = fgmask)

cv2.imshow('frame',res)
# cv2.imshow('fgmask',fgmask)

# img = cv2.medianBlur(frame,9)
# bwimg = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)

# circles = cv2.HoughCircles(bwimg, cv2.HOUGH_GRADIENT,1,20,
# param1=50,param2=50,minRadius=20,maxRadius=100)
# if circles is None or len(circles) > 10:
# continue

# circles = np.uint16(np.around(circles))
# for i in circles[0,:]:
# # draw the outer circle
# cv2.circle(frame,(i[0],i[1]),i[2],(0,0,0),2)
# # draw the center of the circle
# # cv2.circle(img,(i[0],i[1]),2,(0,0,0),3)

# cv2.imshow('detected circles',frame)

k = cv2.waitKey(5) & 0xFF
if k == 27:
# User hit ESC
break

cv2.destroyAllWindows()
2 changes: 1 addition & 1 deletion tests & playing around/oftest.py
@@ -1,7 +1,7 @@
import numpy as np
import cv2

cap = cv2.VideoCapture(0)
cap = cv2.VideoCapture('juggling.mp4')

# params for ShiTomasi corner detection
feature_params = dict( maxCorners = 100,
Expand Down
31 changes: 31 additions & 0 deletions tests & playing around/oftest2.py
@@ -0,0 +1,31 @@
import cv2
import numpy as np
cap = cv2.VideoCapture("juggling.mp4")

ret, frame1 = cap.read()
prvs = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY)
hsv = np.zeros_like(frame1)
hsv[...,1] = 255

while(1):
ret, frame2 = cap.read()
next = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)

flow = cv2.calcOpticalFlowFarneback(prvs,next, None, 0.5, 3, 15, 3, 5, 1.2, 0)

mag, ang = cv2.cartToPolar(flow[...,0], flow[...,1])
hsv[...,0] = ang*180/np.pi/2
hsv[...,2] = cv2.normalize(mag,None,0,255,cv2.NORM_MINMAX)
rgb = cv2.cvtColor(hsv,cv2.COLOR_HSV2BGR)

cv2.imshow('frame2',rgb)
k = cv2.waitKey(30) & 0xff
if k == 27:
break
elif k == ord('s'):
cv2.imwrite('opticalfb.png',frame2)
cv2.imwrite('opticalhsv.png',rgb)
prvs = next

cap.release()
cv2.destroyAllWindows()
49 changes: 10 additions & 39 deletions tracker.py
Expand Up @@ -96,6 +96,8 @@ def main():
fourcc1 = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output.avi',fourcc1, 20.0, (640,480))

fgbg = cv2.createBackgroundSubtractorMOG2()

while(cap.isOpened()):
frame = getFrame(cap)
if frame is None:
Expand All @@ -105,8 +107,12 @@ def main():

blurredFrame = blur(frame)

# Subtract background (makes isolation of balls more effective, in combination with thresholding)
fgmask = fgbg.apply(frame)
foreground = cv2.bitwise_and(frame,frame,mask = fgmask)

# Convert to HSV
hsvBlurredFrame = cv2.cvtColor(blurredFrame, cv2.COLOR_BGR2HSV)
hsvBlurredFrame = cv2.cvtColor(foreground, cv2.COLOR_BGR2HSV)

cv2.imshow('hsvBlurredFrame', hsvBlurredFrame)

Expand All @@ -117,8 +123,8 @@ def main():

# Open to remove small elements/noise
kernel = np.ones((5,5)).astype(np.uint8)
thresholdImage = cv2.erode(thresholdImage, kernel)
thresholdImage = cv2.dilate(thresholdImage, kernel)
# thresholdImage = cv2.erode(thresholdImage, kernel)
# thresholdImage = cv2.dilate(thresholdImage, kernel)

# cv2.imshow('thresholdImage', thresholdImage)

Expand All @@ -129,11 +135,6 @@ def main():
# Break into clusters using k-means clustering
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
compactness, labels, centers = cv2.kmeans(points, 2, None, criteria, 10, cv2.KMEANS_RANDOM_CENTERS)
# print compactness


# This line helps things not crash, for some reason
centers = centers.tolist()

# Don't let the blue and white marked balls get mixed up
distBC0toC0 = np.sqrt((ballCenters[0][0] - centers[0][1])**2 + (ballCenters[0][1] - centers[0][0])**2)
Expand Down Expand Up @@ -202,22 +203,10 @@ def main():

positions = getTrajectory(ballCenters[0], ballVelocities[0], (0, g), timeStepSize, eulerSteps)

# print positions
# print ''

for i, position in enumerate(positions):
height, width, depth = frameCopy.shape
if (position[0] < width) and (position[1] < height):

# blankImage = np.zeros((height,width,3), np.uint8)
# blankImageAlpha = cv2.cvtColor(blankImage, cv2.COLOR_BGR2RGBA)
# # Alpha blending depending on how far along this is
# alpha = i / len(positions)
ballColor = (255,55,55)

# print 'ballVelocities[0]', ballVelocities[0]
# print 'ballCenters[0]', ballCenters[0]

cv2.circle(frame, (int(position[0]), int(position[1])), 2, ballColor, thickness=2)

positions = getTrajectory(ballCenters[1], ballVelocities[1], (0, g), timeStepSize, eulerSteps)
Expand All @@ -226,19 +215,9 @@ def main():
height, width, depth = frameCopy.shape
if (position[0] < width) and (position[1] < height):

# blankImage = np.zeros((height,width,3), np.uint8)
# blankImageAlpha = cv2.cvtColor(blankImage, cv2.COLOR_BGR2RGBA)
# # Alpha blending depending on how far along this is
# alpha = i / len(positions)
ballColor = (255,255,255)

# print 'ballVelocities[1]', ballVelocities[1]
# print 'ballCenters[1]', ballCenters[1]

cv2.circle(frame, (int(position[0]), int(position[1])), 2, ballColor, thickness=2)

# points = rejectOutlierPoints(points)

# Find location of red ball
color = 'red'
colorBounds = hsvColorBounds[color]
Expand All @@ -259,9 +238,6 @@ def main():
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
compactness, labels, centers = cv2.kmeans(points, 1, None, criteria, 10, cv2.KMEANS_RANDOM_CENTERS)

# This line helps things not crash, for some reason
centers = centers.tolist()

# Find the velocity for the third ball and update its position
if averageWithLastVelocity:
estimatedVelocity = estimateVelocity(ballCenters[2], (centers[0][1], centers[0][0]))
Expand All @@ -279,21 +255,16 @@ def main():

# Draw position marker
cv2.circle(frame, tuple(ballCenters[2]), 6, (50,200,50), thickness=6)

# Draw velocity vector
# cv2.arrowedLine(frame, tuple(ballCenters[2]), (int(ballCenters[2][0]+ballVelocities[2][0]*2), int(ballCenters[2][1]+ballVelocities[2][1]*2)), (0,255,255), 2, 2, 0, 0.1)

positions = getTrajectory(ballCenters[2], ballVelocities[2], (0, g), timeStepSize, eulerSteps)
# print positions
# print ''

for i, position in enumerate(positions):
height, width, depth = frameCopy.shape
if (position[0] < width) and (position[1] < height):

# blankImage = np.zeros((height,width,3), np.uint8)
# blankImageAlpha = cv2.cvtColor(blankImage, cv2.COLOR_BGR2RGBA)
# # Alpha blending depending on how far along this is
# alpha = i / len(positions)
ballColor = (105,255,105)

cv2.circle(frame, (int(position[0]), int(position[1])), 2, ballColor, thickness=2)
Expand Down

0 comments on commit 7338155

Please sign in to comment.