Skip to content

Commit

Permalink
initial push to GitHub
Browse files Browse the repository at this point in the history
  • Loading branch information
Chris Sharp committed Nov 22, 2017
0 parents commit 59d2878
Show file tree
Hide file tree
Showing 105 changed files with 8,344 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .gitignore
@@ -0,0 +1,2 @@
.DS_Store
xcuserdata
36 changes: 36 additions & 0 deletions README.md
@@ -0,0 +1,36 @@
# Tensorflow iOS Object Detection

An Object Detection application on iOS using Tensorflow and pre-trained COCO dataset models. Video frames are captured and inference is done locally using one of the 3 provided models: ssd_mobilenet_v1_coco, ssd_inception_v2_coco, and faster_rcnn_resnet101_coco. Both Swift and Objective-C projects.

![cat image](images/cat.png)

## Building

* Make sure you have automake and libtool. Using homebrew:

`brew install automake libtool`


* Clone the tensorflow source repo on GitHub

`git clone https://github.com/tensorflow/tensorflow`


* We need to build the tensorflow components with ANDROID_TYPES_FULL. In the terminal type:

`export ANDROID_TYPES="-D__ANDROID_TYPES_FULL__"`


* Build the tensorflow libraries for iOS. Go to the root of your newly downloaded tensorflow repo and run:

`tensorflow/contrib/makefile/build_all_ios.sh`

Go get a coffee. This can take a while. On my macBook it took almost 2 hours.


* Open either the Swift of Objective-C project in this repo and edit the **tensorflow.xconfig** file to point to the folder where you cloned the tensorflow repo

`TENSORFLOW_ROOT=/Users/username/Development/tensorflow`


* Compile the xcode project and run. Since we need a camera this will only run on a device.
Binary file added images/cat.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
8 changes: 8 additions & 0 deletions objC/App/AppDelegate.h
@@ -0,0 +1,8 @@

@import UIKit;

@interface AppDelegate : UIResponder <UIApplicationDelegate>

@property (nonatomic) UIWindow *window;

@end
5 changes: 5 additions & 0 deletions objC/App/AppDelegate.m
@@ -0,0 +1,5 @@

#import "AppDelegate.h"

@implementation AppDelegate
@end
17 changes: 17 additions & 0 deletions objC/App/BoundingBoxView.h
@@ -0,0 +1,17 @@
//
// BoundingBoxView.h
// tensorflowiOS
//
// Created by Sharp, Chris T on 10/9/17.
// Copyright © 2017 Apple. All rights reserved.
//

#import <UIKit/UIKit.h>

@interface BoundingBoxView : UIView

@property (nonatomic) NSMutableArray* labels;

- (void) updateBoundingBoxes: (NSArray*) boxes;

@end
142 changes: 142 additions & 0 deletions objC/App/BoundingBoxView.m
@@ -0,0 +1,142 @@
//
// BoundingBoxView.m
// tensorflowiOS
//
// Created by Sharp, Chris T on 10/9/17.
// Copyright © 2017 Apple. All rights reserved.
//

#import "BoundingBoxView.h"
#import "TensorflowPrediction.h"

const CGFloat BoundingBoxLineWidth = 3.5f;

@interface BoundingBoxView()
@property (nonatomic) NSArray *boxesToBeErased;
@property (nonatomic) NSArray *boxesToBeDrawn;
@end

@implementation BoundingBoxView

- (instancetype)initWithCoder:(NSCoder *)coder
{
self = [super initWithCoder:coder];
if (self)
{
//
// Maintain a list of UILabels for easy removal from superView.
//
self.labels = [[NSMutableArray alloc] init];
}
return self;
}


//
// in drawRect we have a clear UIView that we draw green bounding boxes on.
// As a new list of boundingboxes comes in we erase the old boxes and draw the new ones.
// Since this view is just a layer over the videoPreview the bounding boxes could be a few
// frames behind and the box will not align with the object underneath it. This will likely
// be an issue until Tensorflow processing is as fast as the video preview's frame capture.
//
- (void)drawRect:(CGRect)rect
{
//
// Our drawing context
//
CGContextRef context = UIGraphicsGetCurrentContext();

//
// The width of the bounding box lines.
//
CGContextSetLineWidth(context, BoundingBoxLineWidth);

//
// The fill color of the bounding box is always clear
//
CGContextSetRGBFillColor(context, 1.0, 1.0, 1.0, 0.0);

//
// Erase boxes from the previous frame
//
if (self.boxesToBeErased)
{
for (TensorflowPrediction* pred in self.boxesToBeErased)
{
// Erase the previous bounding box by using a clear stroke color
CGContextSetRGBStrokeColor(context, 1.0, 1.0, 1.0, 0.0);

// Calculate box dimensions of box to be erased.
CGFloat x = pred.left * self.frame.size.width;
CGFloat y = pred.top * self.frame.size.height;
CGFloat w = (pred.right * self.frame.size.width) - x;
CGFloat h = (pred.bottom * self.frame.size.height) - y;
CGRect rectangle = CGRectMake(x, y, w, h);

//Erase it. (draw clear pixels over the green)
CGContextFillRect(context, rectangle);
CGContextStrokeRect(context, rectangle);
}

//
// Remove existing labels too.
//
for (UILabel * label in self.labels)
{
[label removeFromSuperview];
}
[self.labels removeAllObjects];
self.boxesToBeErased = nil;
}

//
// Draw newly predicted boxes
//
for (TensorflowPrediction* pred in self.boxesToBeDrawn)
{
//
// Calculate the box dimensions. The box dimensions are given
// as normalized values. Because this view has the same dimensions
// as the original image multiplying by width and height gives the
// correct location for the bounding box.
//
CGFloat x = pred.left * self.frame.size.width;
CGFloat y = pred.top * self.frame.size.height;
CGFloat w = (pred.right * self.frame.size.width) - x;
CGFloat h = (pred.bottom * self.frame.size.height) - y;
CGRect rectangle = CGRectMake(x, y, w, h);

// Draw with a green stroke.
CGContextSetRGBStrokeColor(context, 0.0, 1.0, 0.0, 0.75);
CGContextFillRect(context, rectangle);
CGContextStrokeRect(context, rectangle);

// Add the label to the upper left of the bounding box
UILabel * label = [[UILabel alloc] initWithFrame:CGRectMake(x, y, 75, 35)];
[label setBackgroundColor:[UIColor whiteColor]];
[label setTextColor:[UIColor orangeColor]];
[label setText:pred.label];
[self addSubview:label];

//
// Keep a list of labels so we can easily remove from superview.
//
[self.labels addObject:label];
}
}

- (void) updateBoundingBoxes: (NSArray*) boxes
{
//
// flag the old boxes to be erased and flag the new to be drawn.
//
self.boxesToBeErased = self.boxesToBeDrawn;
self.boxesToBeDrawn = boxes;

//
// trigger a drawRect call next frame
//
[self setNeedsDisplay];
}

@end
12 changes: 12 additions & 0 deletions objC/App/CameraPreviewView.h
@@ -0,0 +1,12 @@

#import <UIKit/UIKit.h>

@class AVCaptureSession;

@interface CameraPreviewView : UIView
@property (nonatomic, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer;

- (void) configureSession;
- (void) startSessionWithDelegate: (id<AVCaptureVideoDataOutputSampleBufferDelegate>) delegate;
- (void) stopSession;
@end

0 comments on commit 59d2878

Please sign in to comment.