Switch branches/tags
Nothing to show
Find file Copy path
Fetching contributors…
Cannot retrieve contributors at this time
77 lines (63 sloc) 2.98 KB
from utils import detector_utils as detector_utils
import cv2
import tensorflow as tf
import datetime
import argparse
detection_graph, sess = detector_utils.load_inference_graph()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-sth', '--scorethreshold', dest='score_thresh', type=float,
default=0.4, help='Score threshold for displaying bounding boxes')
parser.add_argument('-fps', '--fps', dest='fps', type=int,
default=1, help='Show FPS on detection/display visualization')
parser.add_argument('-src', '--source', dest='video_source',
default=0, help='Device index of the camera.')
parser.add_argument('-wd', '--width', dest='width', type=int,
default=480, help='Width of the frames in the video stream.')
parser.add_argument('-ht', '--height', dest='height', type=int,
default=640, help='Height of the frames in the video stream.')
parser.add_argument('-ds', '--display', dest='display', type=int,
default=1, help='Display the detected images using OpenCV. This reduces FPS')
args = parser.parse_args()
cap = cv2.VideoCapture(1)
#cap.set(cv2.CAP_PROP_FRAME_WIDTH, args.width)
#cap.set(cv2.CAP_PROP_FRAME_HEIGHT, args.height)
start_time =
num_frames = 0
im_width, im_height = (cap.get(3), cap.get(4))
# max number of hands we want to detect/track
num_hands_detect = 2
cv2.namedWindow('Single-Threaded Detection', cv2.WINDOW_NORMAL)
while True:
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
ret, image_np =
# image_np = cv2.flip(image_np, 1)
image_np = cv2.cvtColor(image_np, cv2.COLOR_BGR2RGB)
print("Error converting to RGB")
# actual detection
boxes, scores = detector_utils.detect_objects(
image_np, detection_graph, sess)
# draw bounding boxes
num_hands_detect, args.score_thresh, scores, boxes, im_width, im_height, image_np)
# Calculate Frames per second (FPS)
num_frames += 1
elapsed_time = ( -
fps = num_frames / elapsed_time
if (args.display > 0):
# Display FPS on frame
if (args.fps > 0):
"FPS : " + str(int(fps)), image_np)
cv2.imshow('Single-Threaded Detection', cv2.cvtColor(
image_np, cv2.COLOR_RGB2BGR))
if cv2.waitKey(25) & 0xFF == ord('q'):
print("frames processed: ", num_frames,
"elapsed time: ", elapsed_time, "fps: ", str(int(fps)))