diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..47042c2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +.DS_Store +xcuserdata diff --git a/README.md b/README.md new file mode 100644 index 0000000..b43222a --- /dev/null +++ b/README.md @@ -0,0 +1,36 @@ +# Tensorflow iOS Object Detection + +An Object Detection application on iOS using Tensorflow and pre-trained COCO dataset models. Video frames are captured and inference is done locally using one of the 3 provided models: ssd_mobilenet_v1_coco, ssd_inception_v2_coco, and faster_rcnn_resnet101_coco. Both Swift and Objective-C projects. + +![cat image](images/cat.png) + +## Building + +* Make sure you have automake and libtool. Using homebrew: + +`brew install automake libtool` + + +* Clone the tensorflow source repo on GitHub + +`git clone https://github.com/tensorflow/tensorflow` + + +* We need to build the tensorflow components with ANDROID_TYPES_FULL. In the terminal type: + +`export ANDROID_TYPES="-D__ANDROID_TYPES_FULL__"` + + +* Build the tensorflow libraries for iOS. Go to the root of your newly downloaded tensorflow repo and run: + +`tensorflow/contrib/makefile/build_all_ios.sh` + +Go get a coffee. This can take a while. On my macBook it took almost 2 hours. + + +* Open either the Swift of Objective-C project in this repo and edit the **tensorflow.xconfig** file to point to the folder where you cloned the tensorflow repo + +`TENSORFLOW_ROOT=/Users/username/Development/tensorflow` + + +* Compile the xcode project and run. Since we need a camera this will only run on a device. diff --git a/images/cat.png b/images/cat.png new file mode 100644 index 0000000..d3ad867 Binary files /dev/null and b/images/cat.png differ diff --git a/objC/App/AppDelegate.h b/objC/App/AppDelegate.h new file mode 100644 index 0000000..971a851 --- /dev/null +++ b/objC/App/AppDelegate.h @@ -0,0 +1,8 @@ + +@import UIKit; + +@interface AppDelegate : UIResponder + +@property (nonatomic) UIWindow *window; + +@end diff --git a/objC/App/AppDelegate.m b/objC/App/AppDelegate.m new file mode 100644 index 0000000..65d696a --- /dev/null +++ b/objC/App/AppDelegate.m @@ -0,0 +1,5 @@ + +#import "AppDelegate.h" + +@implementation AppDelegate +@end diff --git a/objC/App/BoundingBoxView.h b/objC/App/BoundingBoxView.h new file mode 100644 index 0000000..9124321 --- /dev/null +++ b/objC/App/BoundingBoxView.h @@ -0,0 +1,17 @@ +// +// BoundingBoxView.h +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +#import + +@interface BoundingBoxView : UIView + +@property (nonatomic) NSMutableArray* labels; + +- (void) updateBoundingBoxes: (NSArray*) boxes; + +@end diff --git a/objC/App/BoundingBoxView.m b/objC/App/BoundingBoxView.m new file mode 100644 index 0000000..21e0827 --- /dev/null +++ b/objC/App/BoundingBoxView.m @@ -0,0 +1,142 @@ +// +// BoundingBoxView.m +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +#import "BoundingBoxView.h" +#import "TensorflowPrediction.h" + +const CGFloat BoundingBoxLineWidth = 3.5f; + +@interface BoundingBoxView() +@property (nonatomic) NSArray *boxesToBeErased; +@property (nonatomic) NSArray *boxesToBeDrawn; +@end + +@implementation BoundingBoxView + +- (instancetype)initWithCoder:(NSCoder *)coder +{ + self = [super initWithCoder:coder]; + if (self) + { + // + // Maintain a list of UILabels for easy removal from superView. + // + self.labels = [[NSMutableArray alloc] init]; + } + return self; +} + + +// +// in drawRect we have a clear UIView that we draw green bounding boxes on. +// As a new list of boundingboxes comes in we erase the old boxes and draw the new ones. +// Since this view is just a layer over the videoPreview the bounding boxes could be a few +// frames behind and the box will not align with the object underneath it. This will likely +// be an issue until Tensorflow processing is as fast as the video preview's frame capture. +// +- (void)drawRect:(CGRect)rect +{ + // + // Our drawing context + // + CGContextRef context = UIGraphicsGetCurrentContext(); + + // + // The width of the bounding box lines. + // + CGContextSetLineWidth(context, BoundingBoxLineWidth); + + // + // The fill color of the bounding box is always clear + // + CGContextSetRGBFillColor(context, 1.0, 1.0, 1.0, 0.0); + + // + // Erase boxes from the previous frame + // + if (self.boxesToBeErased) + { + for (TensorflowPrediction* pred in self.boxesToBeErased) + { + // Erase the previous bounding box by using a clear stroke color + CGContextSetRGBStrokeColor(context, 1.0, 1.0, 1.0, 0.0); + + // Calculate box dimensions of box to be erased. + CGFloat x = pred.left * self.frame.size.width; + CGFloat y = pred.top * self.frame.size.height; + CGFloat w = (pred.right * self.frame.size.width) - x; + CGFloat h = (pred.bottom * self.frame.size.height) - y; + CGRect rectangle = CGRectMake(x, y, w, h); + + //Erase it. (draw clear pixels over the green) + CGContextFillRect(context, rectangle); + CGContextStrokeRect(context, rectangle); + } + + // + // Remove existing labels too. + // + for (UILabel * label in self.labels) + { + [label removeFromSuperview]; + } + [self.labels removeAllObjects]; + self.boxesToBeErased = nil; + } + + // + // Draw newly predicted boxes + // + for (TensorflowPrediction* pred in self.boxesToBeDrawn) + { + // + // Calculate the box dimensions. The box dimensions are given + // as normalized values. Because this view has the same dimensions + // as the original image multiplying by width and height gives the + // correct location for the bounding box. + // + CGFloat x = pred.left * self.frame.size.width; + CGFloat y = pred.top * self.frame.size.height; + CGFloat w = (pred.right * self.frame.size.width) - x; + CGFloat h = (pred.bottom * self.frame.size.height) - y; + CGRect rectangle = CGRectMake(x, y, w, h); + + // Draw with a green stroke. + CGContextSetRGBStrokeColor(context, 0.0, 1.0, 0.0, 0.75); + CGContextFillRect(context, rectangle); + CGContextStrokeRect(context, rectangle); + + // Add the label to the upper left of the bounding box + UILabel * label = [[UILabel alloc] initWithFrame:CGRectMake(x, y, 75, 35)]; + [label setBackgroundColor:[UIColor whiteColor]]; + [label setTextColor:[UIColor orangeColor]]; + [label setText:pred.label]; + [self addSubview:label]; + + // + // Keep a list of labels so we can easily remove from superview. + // + [self.labels addObject:label]; + } +} + +- (void) updateBoundingBoxes: (NSArray*) boxes +{ + // + // flag the old boxes to be erased and flag the new to be drawn. + // + self.boxesToBeErased = self.boxesToBeDrawn; + self.boxesToBeDrawn = boxes; + + // + // trigger a drawRect call next frame + // + [self setNeedsDisplay]; +} + +@end diff --git a/objC/App/CameraPreviewView.h b/objC/App/CameraPreviewView.h new file mode 100644 index 0000000..639875c --- /dev/null +++ b/objC/App/CameraPreviewView.h @@ -0,0 +1,12 @@ + +#import + +@class AVCaptureSession; + +@interface CameraPreviewView : UIView +@property (nonatomic, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer; + +- (void) configureSession; +- (void) startSessionWithDelegate: (id) delegate; +- (void) stopSession; +@end diff --git a/objC/App/CameraPreviewView.m b/objC/App/CameraPreviewView.m new file mode 100644 index 0000000..119ac77 --- /dev/null +++ b/objC/App/CameraPreviewView.m @@ -0,0 +1,349 @@ + +#import + +#import "CameraPreviewView.h" +#import "constants.h" + +static void * SessionRunningContext = &SessionRunningContext; + +typedef NS_ENUM( NSInteger, CameraSetupResult ) +{ + SetupResultSuccess, + SetupResultCameraNotAuthorized, + SetupResultSessionConfigurationFailed +}; + +@interface AVCaptureDeviceDiscoverySession (Utilities) + +- (NSInteger)uniqueDevicePositionsCount; + +@end + +@implementation AVCaptureDeviceDiscoverySession (Utilities) + +- (NSInteger)uniqueDevicePositionsCount +{ + NSMutableArray *uniqueDevicePositions = [NSMutableArray array]; + + for ( AVCaptureDevice *device in self.devices ) + { + if ( ! [uniqueDevicePositions containsObject:@(device.position)] ) + { + [uniqueDevicePositions addObject:@(device.position)]; + } + } + + return uniqueDevicePositions.count; +} + +@end + +@interface CameraPreviewView() +@property (nonatomic) CameraSetupResult cameraSetupResult; +@property (nonatomic) AVCaptureSession *avSession; +@property (nonatomic) dispatch_queue_t sessionQueue; +@property (nonatomic) dispatch_queue_t videoFrameSerialQueue; +@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning; +@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput; +@property (nonatomic) AVCaptureVideoDataOutput *videoDataOutput; +@end + +@implementation CameraPreviewView + ++ (Class)layerClass +{ + return [AVCaptureVideoPreviewLayer class]; +} + +- (instancetype)initWithCoder:(NSCoder *)aDecoder +{ + self = [super initWithCoder:aDecoder]; + if (self) + { + [self setupSession]; + } + return self; + +} + +- (instancetype)init +{ + self = [super init]; + if (self) + { + [self setupSession]; + } + return self; +} + +#pragma mark Session Management + + +- (AVCaptureVideoPreviewLayer *)videoPreviewLayer +{ + return (AVCaptureVideoPreviewLayer *)self.layer; +} + +- (AVCaptureSession *)session +{ + return self.videoPreviewLayer.session; +} + +- (void)setSession:(AVCaptureSession *)session +{ + self.videoPreviewLayer.session = session; +} + +- (void) setupSession +{ + self.avSession = [[AVCaptureSession alloc] init]; + + self.videoPreviewLayer.session = self.avSession; + + // + // Communicate with the session and other session objects on this queue. + // + self.sessionQueue = dispatch_queue_create( "PreviewSessionQueue", DISPATCH_QUEUE_SERIAL ); + + // We use a serial queue for the video frames so that + // they are dispatched in the order that they are captured + self.videoFrameSerialQueue = dispatch_queue_create("VideoFrameQueue", DISPATCH_QUEUE_SERIAL); + + self.cameraSetupResult = SetupResultSuccess; + + //Check video authorization status. Video access is required. + switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] ) + { + case AVAuthorizationStatusAuthorized: + { + // The user has previously granted access to the camera. + break; + } + case AVAuthorizationStatusNotDetermined: + { + /* + The user has not yet been presented with the option to grant + video access. We suspend the session queue to delay session + setup until the access request has completed. + */ + dispatch_suspend( self.sessionQueue ); + [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) { + if ( ! granted ) + { + self.cameraSetupResult = SetupResultCameraNotAuthorized; + } + dispatch_resume( self.sessionQueue ); + }]; + break; + } + default: + { + // The user has previously denied access. + self.cameraSetupResult = SetupResultCameraNotAuthorized; + break; + } + } +} + +// Call this on the session queue. +- (void)configureSession +{ + dispatch_async( self.sessionQueue, ^{ + if ( self.cameraSetupResult != SetupResultSuccess ) + { + return; + } + + [self.avSession beginConfiguration]; + self.avSession.sessionPreset = AVCaptureSessionPresetLow; + + // Add video input. + + // Choose the back dual camera if available, otherwise default to a wide angle camera. + AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack]; + if ( ! videoDevice ) + { + // If the back dual camera is not available, default to the back wide angle camera. + videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack]; + + // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera. + if ( ! videoDevice ) + { + videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront]; + } + } + + // Set the frame rate to 15fps max on the video preview. + [videoDevice lockForConfiguration:nil]; + [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(1,15)]; + [videoDevice unlockForConfiguration]; + + NSError *error = nil; + AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; + if ( ! videoDeviceInput ) + { + NSLog( @"Could not create video device input: %@", error ); + self.cameraSetupResult = SetupResultSessionConfigurationFailed; + [self.avSession commitConfiguration]; + return; + } + if ( [self.avSession canAddInput:videoDeviceInput] ) + { + [self.avSession addInput:videoDeviceInput]; + self.videoDeviceInput = videoDeviceInput; + + dispatch_async( dispatch_get_main_queue(), ^{ + // Dispatching this to the main queue because a UIView (CameraPreviewView) can only be + // changed on the main thread. + UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; + AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; + if ( statusBarOrientation != UIInterfaceOrientationUnknown ) + { + initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; + } + + self.videoPreviewLayer.connection.videoOrientation = initialVideoOrientation; + } ); + } + else + { + NSLog( @"Could not add video device input to the session" ); + self.cameraSetupResult = SetupResultSessionConfigurationFailed; + [self.avSession commitConfiguration]; + return; + } + + [self addVideoOutput]; + + [self.avSession commitConfiguration]; + } ); +} + +- (void) addVideoOutput +{ + AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init]; + + // + // We use the 32 bit BGRA pixel format type. That way we can just pass the data to + // Tensorflow without pre-processing. + // + NSDictionary *newSettings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; + videoOutput.videoSettings = newSettings; + videoOutput.alwaysDiscardsLateVideoFrames = YES; + + // + // Add the videoOutput to our AVSession + // + if ( [self.avSession canAddOutput:videoOutput] ) + { + [self.avSession beginConfiguration]; + [self.avSession addOutput:videoOutput]; + self.avSession.sessionPreset = AVCaptureSessionPresetHigh; + AVCaptureConnection *connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; + if ( connection.isVideoStabilizationSupported ) + { + connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; + } + + [self.avSession commitConfiguration]; + + self.videoDataOutput = videoOutput; + } +} + +- (void) startSessionWithDelegate:(id) delegate +{ + dispatch_async( self.sessionQueue, ^{ + switch ( self.cameraSetupResult ) + { + case SetupResultSuccess: + { + // if setup succeeded we can add Observers and frame delegate + // and run the session. + [self addObservers]; + [self.videoDataOutput setSampleBufferDelegate:delegate queue:self.videoFrameSerialQueue]; + + [self.avSession startRunning]; + self.sessionRunning = self.avSession.isRunning; + + // Let everyone know we have a session. + [[NSNotificationCenter defaultCenter] postNotificationName:kAVSessionStarted object:nil]; + break; + } + case SetupResultCameraNotAuthorized: + { + [[NSNotificationCenter defaultCenter] postNotificationName:kSetupResultCameraNotAuthorized object:nil]; + break; + } + case SetupResultSessionConfigurationFailed: + { + [[NSNotificationCenter defaultCenter] postNotificationName:kSetupResultSessionConfigurationFailed object:nil]; + break; + } + } + } ); +} + +- (void) stopSession +{ + dispatch_async( self.sessionQueue, ^{ + if ( self.cameraSetupResult == SetupResultSuccess ) + { + [self.avSession stopRunning]; + [self removeObservers]; + } + } ); +} + +#pragma mark KVO and Notifications + +- (void)addObservers +{ + [self.avSession addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext]; + + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.avSession]; +} + +- (void)removeObservers +{ + [[NSNotificationCenter defaultCenter] removeObserver:self]; + + [self.avSession removeObserver:self forKeyPath:@"running" context:SessionRunningContext]; +} + +- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context +{ + if ( context == SessionRunningContext ) + { + self.sessionRunning = [change[NSKeyValueChangeNewKey] boolValue]; + } + else + { + [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; + } +} + + +- (void)sessionRuntimeError:(NSNotification *)notification +{ + NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; + NSLog( @"Capture session runtime error: %@", error ); + + /* + Automatically try to restart the session running if media services were + reset and the last start running succeeded. Otherwise, enable the user + to try to resume the session running. + */ + if ( error.code == AVErrorMediaServicesWereReset ) + { + dispatch_async( self.sessionQueue, ^{ + if ( self.isSessionRunning ) + { + [self.avSession startRunning]; + self.sessionRunning = self.avSession.isRunning; + } + } ); + } +} + +@end diff --git a/objC/App/ViewController.h b/objC/App/ViewController.h new file mode 100644 index 0000000..e7fa387 --- /dev/null +++ b/objC/App/ViewController.h @@ -0,0 +1,7 @@ + +#import +#import + +@interface ViewController : UIViewController + +@end diff --git a/objC/App/ViewController.mm b/objC/App/ViewController.mm new file mode 100644 index 0000000..55dc27c --- /dev/null +++ b/objC/App/ViewController.mm @@ -0,0 +1,234 @@ +#import +#import "ViewController.h" +#import "CameraPreviewView.h" +#import "TensorflowGraph.h" +#import "constants.h" +#import "TensorflowPrediction.h" +#import "BoundingBoxView.h" +#import "tensorflowUtils.h" + +@interface ViewController () + +// The view of what the camera is currently capturing +@property (nonatomic, weak) IBOutlet CameraPreviewView *cameraPreviewView; + +// the transparent UIView where we draw the bounding boxes. This view +// sits on top of the CameraPreview +@property (nonatomic, weak) IBOutlet BoundingBoxView *boundingBoxView; + +// the tensorflow graph that will do the recognizing. +@property (nonatomic) TensorflowGraph *tensorflowGraph; + +// Label to alert the user if the camera is unavailable. +@property (nonatomic, weak) IBOutlet UILabel *cameraUnavailableLabel; + +// processingTime and framesProcessed are used for keeping an average time to make predictions. +@property (nonatomic) double processingTime; +@property (nonatomic) int framesProcessed; + +@end + + +@implementation ViewController + + +#pragma mark View Controller Life Cycle + +- (void)viewDidLoad +{ + [super viewDidLoad]; + + // + // Configure the video preview. We will grab frames + // from the video preview and feed them into the tensorflow graph. + // Then bounding boxes can be rendered onto the boundingBoxView. + // + [self.cameraPreviewView configureSession]; +} + +- (void)viewWillAppear:(BOOL)animated +{ + [super viewWillAppear:animated]; + + // + // Listen for the start of the AVSession. This will signal the start + // of the delivery of video frames and will trigger the + // initialization of the tensorflow graph + // + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnAVSessionStarted:) name:kAVSessionStarted object:nil]; + + // + // Also Listen for Session initialization failure or for when + // the user doesn't authorize the use of the camera + // + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnSetupResultCameraNotAuthorized:) name:kSetupResultCameraNotAuthorized object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnSetupResultSessionConfigurationFailed:) name:kSetupResultSessionConfigurationFailed object:nil]; + + // + // Respond to the tensorflow graph's update of predictions. This will + // trigger the redrawing of the bounding boxes. + // + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnPredictionsUpdated:) name:kPredictionsUpdated object:nil]; + + // + // Start the AV Session. This will prompt the user for + // permission to use the camera to present a video preview. + // + [self.cameraPreviewView startSessionWithDelegate:self]; + } + +// +// when the view disappears we shut down the session. It will be restarted in ViewWillAppear +// +- (void)viewDidDisappear:(BOOL)animated +{ + [self.cameraPreviewView stopSession]; + [super viewDidDisappear:animated]; +} + +// +// Yes, please autorotate, but we will have to change the orientation of the pixel buffer when we run the graph. +// +- (BOOL)shouldAutorotate +{ + return YES; +} + +// +// Supporting only landscape. +// +- (UIInterfaceOrientationMask)supportedInterfaceOrientations +{ + return UIInterfaceOrientationMaskLandscape; +} + + +// +// Override viewWillTransitionToSize so that we can update the videoPreviewLayer with the new orientation. +// +- (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id)coordinator +{ + // + // call super so the coordinator can be passed on. + // + [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator]; + + // + // ignore everything but landscape orientation changes. + // + UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation; + if ( UIDeviceOrientationIsLandscape(deviceOrientation) ) + { + self.cameraPreviewView.videoPreviewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)deviceOrientation; + } +} + +#pragma mark - Video Preview delegate + + +// +// Delegate function from the AVSession. Here we capture frames from +// the video preview and feed them to tensorflow. +// +- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ + // If the graph is ready, run the frame through tensorflow + if (self.tensorflowGraph) + { + // + // if it is not busy pass the pixel buffer off to the tensorflow graph + // + if ([self.tensorflowGraph canProcessFrame]) + { + // + // Grab the pixel buffer. We pass it to the tf graph which will retain, copy and release + // + CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + [self.tensorflowGraph runModelOnPixelBuffer:pixelBuffer orientation:[UIDevice currentDevice].orientation]; + } + } +} + +// +// Will be called when frames are dropped by the Video Output delegate. +// +- (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection +{ + //CFTypeRef droppedFrameReason = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, NULL); + //NSLog(@"dropped frame, reason: %@", droppedFrameReason); +} + + +#pragma mark - NS_NOTIFICATIONS + +// +// Notification that the AV Session has started. Since we now have a camera session +// it is safe to alloc a tensorflowGraph object. +// +- (void) OnAVSessionStarted: (NSNotification*) notification +{ + // Now that the user has granted permission to the camera + // and we have a video session we can initialize our graph. + if (!self.tensorflowGraph) + { + self.tensorflowGraph = [[TensorflowGraph alloc] init]; + } +} + +// +// The tensorflow graph has analyzed the pixel buffer coming out of the CameraPreview +// and resulted in new predictions and bounding boxes. We notify the boundingBoxView to +// draw the boxes over the CameraPreview. +// +- (void) OnPredictionsUpdated: (NSNotification*) notification +{ + NSDictionary * dict = [notification userInfo]; + if (dict) + { + // Update the Bounding boxes and labels from the + // new predictions coming out of the graph. + NSArray * predictions = dict[@"predictions"]; + if (predictions) + { + [self.boundingBoxView updateBoundingBoxes:predictions]; + } + } +} + + +// +// Notification that the camera has not been authorized. Without camera permissions +// we will not have a preview and won't alloc a Tensorflow graph. Post an alertBox +// and give the user a short cut to the settings app. +// +- (void) OnSetupResultCameraNotAuthorized: (NSNotification *) notification +{ + dispatch_async( dispatch_get_main_queue(), ^{ + NSString *message = NSLocalizedString( @"In order to display a video preview we need to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" ); + UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"tensorflowiOS" message:message preferredStyle:UIAlertControllerStyleAlert]; + UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; + [alertController addAction:cancelAction]; + // Provide quick access to Settings. + UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) { + [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString] options:@{} completionHandler:nil]; + }]; + [alertController addAction:settingsAction]; + [self presentViewController:alertController animated:YES completion:nil]; + } ); +} + +// +// Configuration of the AV session failed. For some reason the AVSession has failed to +// initialize. Post an alert. +// +- (void) OnSetupResultSessionConfigurationFailed: (NSNotification *) notification +{ + dispatch_async( dispatch_get_main_queue(), ^{ + NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" ); + UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"tensorflowiOS" message:message preferredStyle:UIAlertControllerStyleAlert]; + UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; + [alertController addAction:cancelAction]; + [self presentViewController:alertController animated:YES completion:nil]; + } ); +} +@end diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json b/objC/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..a218566 --- /dev/null +++ b/objC/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,154 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_29x29-1.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_29x29@2x-1.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_29x29@3x.png", + "scale" : "3x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_40x40@2x-1.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_40x40@3x.png", + "scale" : "3x" + }, + { + "size" : "57x57", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_57x57.png", + "scale" : "1x" + }, + { + "size" : "57x57", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_57x57@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_60x60@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_60x60@3x.png", + "scale" : "3x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_29x29.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_40x40.png", + "scale" : "1x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "50x50", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_50x50.png", + "scale" : "1x" + }, + { + "size" : "50x50", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_50x50@2x.png", + "scale" : "2x" + }, + { + "size" : "72x72", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_72x72.png", + "scale" : "1x" + }, + { + "size" : "72x72", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_72x72@2x.png", + "scale" : "2x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_76x76.png", + "scale" : "1x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_76x76@2x.png", + "scale" : "2x" + }, + { + "size" : "83.5x83.5", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_83.5x83.5.png", + "scale" : "2x" + }, + { + "size" : "1024x1024", + "idiom" : "ios-marketing", + "filename" : "tensorflowiOS_Icon_1024x1024-1.png", + "scale" : "1x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png new file mode 100644 index 0000000..447f367 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png new file mode 100644 index 0000000..bf647a7 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png new file mode 100644 index 0000000..bf647a7 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png new file mode 100644 index 0000000..a585b4f Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png new file mode 100644 index 0000000..a585b4f Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png new file mode 100644 index 0000000..93bf8ec Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png new file mode 100644 index 0000000..93c35cd Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png new file mode 100644 index 0000000..b7072e8 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png new file mode 100644 index 0000000..b7072e8 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png new file mode 100644 index 0000000..90db855 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png new file mode 100644 index 0000000..a659364 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png new file mode 100644 index 0000000..864bcb6 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png new file mode 100644 index 0000000..5e77b32 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png new file mode 100644 index 0000000..1b29107 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png new file mode 100644 index 0000000..90db855 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png new file mode 100644 index 0000000..f3211ca Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png new file mode 100644 index 0000000..bc25098 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png new file mode 100644 index 0000000..5cc1e60 Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png new file mode 100644 index 0000000..e1e4b5c Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png new file mode 100644 index 0000000..4e1d83d Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png differ diff --git a/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png new file mode 100644 index 0000000..382373f Binary files /dev/null and b/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png differ diff --git a/objC/Assets/Assets.xcassets/Contents.json b/objC/Assets/Assets.xcassets/Contents.json new file mode 100644 index 0000000..da4a164 --- /dev/null +++ b/objC/Assets/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/objC/Assets/Base.lproj/LaunchScreen.storyboard b/objC/Assets/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 0000000..5dc6a62 --- /dev/null +++ b/objC/Assets/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/objC/Assets/Default-568h@2x.png b/objC/Assets/Default-568h@2x.png new file mode 100644 index 0000000..0891b7a Binary files /dev/null and b/objC/Assets/Default-568h@2x.png differ diff --git a/objC/Assets/Main.storyboard b/objC/Assets/Main.storyboard new file mode 100644 index 0000000..d1dcb4c --- /dev/null +++ b/objC/Assets/Main.storyboard @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/objC/Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb b/objC/Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb new file mode 100644 index 0000000..89e45da Binary files /dev/null and b/objC/Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb differ diff --git a/objC/Models/mscoco_label_map.txt b/objC/Models/mscoco_label_map.txt new file mode 100644 index 0000000..0f1be65 --- /dev/null +++ b/objC/Models/mscoco_label_map.txt @@ -0,0 +1,400 @@ +item { + name: "/m/01g317" + id: 1 + display_name: "person" +} +item { + name: "/m/0199g" + id: 2 + display_name: "bicycle" +} +item { + name: "/m/0k4j" + id: 3 + display_name: "car" +} +item { + name: "/m/04_sv" + id: 4 + display_name: "motorcycle" +} +item { + name: "/m/05czz6l" + id: 5 + display_name: "airplane" +} +item { + name: "/m/01bjv" + id: 6 + display_name: "bus" +} +item { + name: "/m/07jdr" + id: 7 + display_name: "train" +} +item { + name: "/m/07r04" + id: 8 + display_name: "truck" +} +item { + name: "/m/019jd" + id: 9 + display_name: "boat" +} +item { + name: "/m/015qff" + id: 10 + display_name: "traffic light" +} +item { + name: "/m/01pns0" + id: 11 + display_name: "fire hydrant" +} +item { + name: "/m/02pv19" + id: 13 + display_name: "stop sign" +} +item { + name: "/m/015qbp" + id: 14 + display_name: "parking meter" +} +item { + name: "/m/0cvnqh" + id: 15 + display_name: "bench" +} +item { + name: "/m/015p6" + id: 16 + display_name: "bird" +} +item { + name: "/m/01yrx" + id: 17 + display_name: "cat" +} +item { + name: "/m/0bt9lr" + id: 18 + display_name: "dog" +} +item { + name: "/m/03k3r" + id: 19 + display_name: "horse" +} +item { + name: "/m/07bgp" + id: 20 + display_name: "sheep" +} +item { + name: "/m/01xq0k1" + id: 21 + display_name: "cow" +} +item { + name: "/m/0bwd_0j" + id: 22 + display_name: "elephant" +} +item { + name: "/m/01dws" + id: 23 + display_name: "bear" +} +item { + name: "/m/0898b" + id: 24 + display_name: "zebra" +} +item { + name: "/m/03bk1" + id: 25 + display_name: "giraffe" +} +item { + name: "/m/01940j" + id: 27 + display_name: "backpack" +} +item { + name: "/m/0hnnb" + id: 28 + display_name: "umbrella" +} +item { + name: "/m/080hkjn" + id: 31 + display_name: "handbag" +} +item { + name: "/m/01rkbr" + id: 32 + display_name: "tie" +} +item { + name: "/m/01s55n" + id: 33 + display_name: "suitcase" +} +item { + name: "/m/02wmf" + id: 34 + display_name: "frisbee" +} +item { + name: "/m/071p9" + id: 35 + display_name: "skis" +} +item { + name: "/m/06__v" + id: 36 + display_name: "snowboard" +} +item { + name: "/m/018xm" + id: 37 + display_name: "sports ball" +} +item { + name: "/m/02zt3" + id: 38 + display_name: "kite" +} +item { + name: "/m/03g8mr" + id: 39 + display_name: "baseball bat" +} +item { + name: "/m/03grzl" + id: 40 + display_name: "baseball glove" +} +item { + name: "/m/06_fw" + id: 41 + display_name: "skateboard" +} +item { + name: "/m/019w40" + id: 42 + display_name: "surfboard" +} +item { + name: "/m/0dv9c" + id: 43 + display_name: "tennis racket" +} +item { + name: "/m/04dr76w" + id: 44 + display_name: "bottle" +} +item { + name: "/m/09tvcd" + id: 46 + display_name: "wine glass" +} +item { + name: "/m/08gqpm" + id: 47 + display_name: "cup" +} +item { + name: "/m/0dt3t" + id: 48 + display_name: "fork" +} +item { + name: "/m/04ctx" + id: 49 + display_name: "knife" +} +item { + name: "/m/0cmx8" + id: 50 + display_name: "spoon" +} +item { + name: "/m/04kkgm" + id: 51 + display_name: "bowl" +} +item { + name: "/m/09qck" + id: 52 + display_name: "banana" +} +item { + name: "/m/014j1m" + id: 53 + display_name: "apple" +} +item { + name: "/m/0l515" + id: 54 + display_name: "sandwich" +} +item { + name: "/m/0cyhj_" + id: 55 + display_name: "orange" +} +item { + name: "/m/0hkxq" + id: 56 + display_name: "broccoli" +} +item { + name: "/m/0fj52s" + id: 57 + display_name: "carrot" +} +item { + name: "/m/01b9xk" + id: 58 + display_name: "hot dog" +} +item { + name: "/m/0663v" + id: 59 + display_name: "pizza" +} +item { + name: "/m/0jy4k" + id: 60 + display_name: "donut" +} +item { + name: "/m/0fszt" + id: 61 + display_name: "cake" +} +item { + name: "/m/01mzpv" + id: 62 + display_name: "chair" +} +item { + name: "/m/02crq1" + id: 63 + display_name: "couch" +} +item { + name: "/m/03fp41" + id: 64 + display_name: "potted plant" +} +item { + name: "/m/03ssj5" + id: 65 + display_name: "bed" +} +item { + name: "/m/04bcr3" + id: 67 + display_name: "dining table" +} +item { + name: "/m/09g1w" + id: 70 + display_name: "toilet" +} +item { + name: "/m/07c52" + id: 72 + display_name: "tv" +} +item { + name: "/m/01c648" + id: 73 + display_name: "laptop" +} +item { + name: "/m/020lf" + id: 74 + display_name: "mouse" +} +item { + name: "/m/0qjjc" + id: 75 + display_name: "remote" +} +item { + name: "/m/01m2v" + id: 76 + display_name: "keyboard" +} +item { + name: "/m/050k8" + id: 77 + display_name: "cell phone" +} +item { + name: "/m/0fx9l" + id: 78 + display_name: "microwave" +} +item { + name: "/m/029bxz" + id: 79 + display_name: "oven" +} +item { + name: "/m/01k6s3" + id: 80 + display_name: "toaster" +} +item { + name: "/m/0130jx" + id: 81 + display_name: "sink" +} +item { + name: "/m/040b_t" + id: 82 + display_name: "refrigerator" +} +item { + name: "/m/0bt_c3" + id: 84 + display_name: "book" +} +item { + name: "/m/01x3z" + id: 85 + display_name: "clock" +} +item { + name: "/m/02s195" + id: 86 + display_name: "vase" +} +item { + name: "/m/01lsmm" + id: 87 + display_name: "scissors" +} +item { + name: "/m/0kmg4" + id: 88 + display_name: "teddy bear" +} +item { + name: "/m/03wvsk" + id: 89 + display_name: "hair drier" +} +item { + name: "/m/012xff" + id: 90 + display_name: "toothbrush" +} \ No newline at end of file diff --git a/objC/Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb b/objC/Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb new file mode 100644 index 0000000..54c686c Binary files /dev/null and b/objC/Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb differ diff --git a/objC/Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb b/objC/Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb new file mode 100644 index 0000000..e315d10 Binary files /dev/null and b/objC/Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb differ diff --git a/objC/SupportingFiles/Info.plist b/objC/SupportingFiles/Info.plist new file mode 100644 index 0000000..16d0cec --- /dev/null +++ b/objC/SupportingFiles/Info.plist @@ -0,0 +1,52 @@ + + + + + CFBundleDevelopmentRegion + en + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + CFBundleShortVersionString + 5.0 + CFBundleSignature + ???? + CFBundleVersion + 1 + LSRequiresIPhoneOS + + NSCameraUsageDescription + For the Video Preview + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UIRequiresFullScreen + + UIStatusBarHidden + + UISupportedInterfaceOrientations + + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + + diff --git a/objC/SupportingFiles/main.m b/objC/SupportingFiles/main.m new file mode 100644 index 0000000..19dab0c --- /dev/null +++ b/objC/SupportingFiles/main.m @@ -0,0 +1,11 @@ +@import UIKit; + +#import "AppDelegate.h" + +int main(int argc, char * argv[]) +{ + @autoreleasepool + { + return UIApplicationMain( argc, argv, nil, NSStringFromClass( [AppDelegate class] ) ); + } +} diff --git a/objC/SupportingFiles/tensorflow.xcconfig b/objC/SupportingFiles/tensorflow.xcconfig new file mode 100644 index 0000000..298eee8 --- /dev/null +++ b/objC/SupportingFiles/tensorflow.xcconfig @@ -0,0 +1,9 @@ +// +// tensorflow.xcconfig +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +TENSORFLOW_ROOT = /Users/username/Development/tensorflow diff --git a/objC/Tensorflow/TensorflowGraph.h b/objC/Tensorflow/TensorflowGraph.h new file mode 100644 index 0000000..dfe916a --- /dev/null +++ b/objC/Tensorflow/TensorflowGraph.h @@ -0,0 +1,24 @@ +#import +#import + +#include "tensorflow/core/public/session.h" +#include "tensorflow/core/util/memmapped_file_system.h" +#include "string_int_label_map.pb.h" + +#include + +@interface TensorflowGraph : NSObject +{ + std::unique_ptr tfSession; + object_detection::protos::StringIntLabelMap labelMap; +} + +- (BOOL)loadLabelsWithFileName:(NSString *)labelsFileName labelsFileType:(NSString *)labelsFileType; +- (BOOL)loadModelWithFileName:(NSString *)modelFileName modelFileType:(NSString *)modelFileType; + +tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelStrings); + +- (BOOL) canProcessFrame; +- (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuf orientation: (UIDeviceOrientation) orientation; + +@end diff --git a/objC/Tensorflow/TensorflowGraph.mm b/objC/Tensorflow/TensorflowGraph.mm new file mode 100644 index 0000000..4758deb --- /dev/null +++ b/objC/Tensorflow/TensorflowGraph.mm @@ -0,0 +1,300 @@ + +#import "TensorflowGraph.h" +#import +#import "constants.h" +#import "tensorflowUtils.h" +#import "TensorflowPrediction.h" +#include + +const int kGraphChannels = 3; // BGR. +const int kGraphImageWidth = 480; // The width of the pixels going into the graph. +const int kGraphImageHeight = 270; // the height of the pixels going into the graph. +const float kPredictionThreshold = 0.50; // Prediction percentages lower than this will be discarded. +const int kGraphMaxPredictions = 15; // After this many predictions we move on. +const int kAverageEveryXFrames = 50; // Output average processing time every X frames + +@interface TensorflowGraph() + +// processingTime and framesProcessed are used for keeping an average time to make predictions. +@property (nonatomic) double processingTime; +@property (nonatomic) int framesProcessed; + +// Keep a load status - if loading fails we don't want to attempt to run +// anything through a non-existent graph. +@property (nonatomic) tensorflow::Status loadStatus; +@property (nonatomic) tensorflow::Status labelStatus; +@property (nonatomic) BOOL isProcessingFrame; + +@end + + +@implementation TensorflowGraph + +- (id) init +{ + self = [super init]; + if (self) + { + // change model name here to use one of the other models. + NSString *model = @"op_inference_graph"; + NSString *label = @"mscoco_label_map"; + + if (![self loadModelWithFileName:model modelFileType:@"pb"]) + { + NSLog(@"Failed to load model"); + } + + if (![self loadLabelsWithFileName:label labelsFileType:@"txt"]) + { + NSLog(@"Failed to load labels"); + } + } + return self; +} + +- (BOOL)loadModelWithFileName:(NSString *)modelFileName modelFileType:(NSString *)modelFileType +{ + self.loadStatus = loadModel(modelFileName, modelFileType, &tfSession); + return self.loadStatus.ok(); +} + +- (BOOL)loadLabelsWithFileName:(NSString *)labelsFileName labelsFileType:(NSString *)labelsFileType +{ + // + // load the labels from the file. labelMap is populated by calling loadLabels. + self.labelStatus = loadLabels(labelsFileName, labelsFileType, &labelMap); + return self.labelStatus.ok(); +} + +- (BOOL) canProcessFrame +{ + return (!self.isProcessingFrame); +} + + +- (CGImageRef) copyPixelBuffer: (CVImageBufferRef) pixelBuffer +{ + // + // alloc a CIImage with the pixel buffer. + CIImage* ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer]; + + const int pixelBufHeight = (int) CVPixelBufferGetHeight(pixelBuffer); + const int pixelBufWidth = (int) CVPixelBufferGetWidth(pixelBuffer); + CGAffineTransform scale = CGAffineTransformMakeScale(float(kGraphImageWidth)/pixelBufWidth, + float(kGraphImageHeight)/pixelBufHeight); + CIImage* resized = [ciImage imageByApplyingTransform:scale]; + + // + // Create a cgImage from the frame pixels + // + CIContext *context = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [context createCGImage:resized fromRect:resized.extent]; + + return cgImage; +} + + +// +// Takes a pixel buffer coming from the Camera preview session and obtains predictions w/bounding boxes from +// a tensorflow graph. +// +- (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuffer orientation: (UIDeviceOrientation) orientation +{ + // + // if the graph hasn't loaded we can't do anything yet. + // + if (!self.loadStatus.ok()) + { + return; + } + + // + // Retain the pixel buffer, copy and make a CGImage out of it. + // + CFRetain(pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + CGImageRef cgImage = [self copyPixelBuffer:pixelBuffer]; + CFRelease(pixelBuffer); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + // + // mark the graph as busy + // + self.isProcessingFrame = YES; + + // + // Create a tensor for running through the graph. + // + tensorflow::Tensor imageTensor(tensorflow::DT_UINT8, tensorflow::TensorShape({1, kGraphImageHeight, kGraphImageWidth, kGraphChannels})); + auto imageTensorDimensioned = imageTensor.tensor(); + + // + // Gather needed dimensions of the CGImage + // + const int srcHeight = (int) CGImageGetHeight(cgImage); + const int srcWidth = (int) CGImageGetWidth(cgImage); + const int bytesPerRow = (int) CGImageGetBytesPerRow(cgImage); + const int srcChannels = (int) bytesPerRow / srcWidth; + + // + // Scale the pixel data down, drop the alpha channel, and populate the image_tensor. + // The source pointer iterates through the pixelBuffer and the destination pointer + // writes pixel data into the reshaped image tensor. Changing the GraphInputWidth and Height + // may increase (or decrease) speed and/or accuracy. + // + CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage)); + unsigned char *srcStartAddress = (unsigned char*) CFDataGetBytePtr(pixelData); + + // + // if the orientation is landscape-right the source pixels start at the end of the pixel buffer + // and read backwards. dest pixel still ends up in the same row, col. + // + if (orientation == UIDeviceOrientationLandscapeRight) + { + srcStartAddress += (bytesPerRow * srcHeight); + } + + // + // Scale the buffer down to the expected size and shape of the input tensor for the TF graph + // also, drop the alpha component as the pixel format going in is BGA. + // + unsigned char *destStartAddress = imageTensorDimensioned.data(); + for (int row = 0; row < kGraphImageHeight; ++row) + { + unsigned char *destRow = destStartAddress + (row * kGraphImageWidth * kGraphChannels); + for (int col = 0; col < kGraphImageWidth; ++col) + { + const int srcRow = (int) (row * (srcHeight / kGraphImageHeight)); + const int srcCol = (int) (col * (srcWidth / kGraphImageWidth)); + unsigned char* srcPixel; + + if (orientation == UIDeviceOrientationLandscapeRight) + { + // landscape right - we start at the end of the buffer and read backwards + srcPixel = srcStartAddress - (srcRow * bytesPerRow) - (srcCol * srcChannels); + } + else + { + // landscape left - we start at the beginning of the buffer and read forward + srcPixel = srcStartAddress + (srcRow * bytesPerRow) + (srcCol * srcChannels); + } + + unsigned char* destPixel = destRow + (col * kGraphChannels); + for (int c = 0; c < kGraphChannels; ++c) + { + destPixel[c] = srcPixel[c]; + } + } + } + + // we are done with the CFDataRef + CFRelease(pixelData); + + // + // Move the tensorflow processing to another thread. Not only are there limited pixelBuffers + // but if the thread running the videoPreview gets blocked we will get Late Frame warninigs. + // Running the graph on a background thread keeps things moving. + // + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ + + // + // Get a start time. We will clock the tensorflow processing time. + // + struct timespec ts_start; + clock_gettime(CLOCK_MONOTONIC, &ts_start); + + if (tfSession.get()) + { + // Run through the graph. + std::vector outputs; + tensorflow::Status runStatus = tfSession->Run({{"image_tensor", imageTensor}}, {"detection_boxes", "detection_scores", "detection_classes", "num_detections"}, {}, &outputs); + + if (!runStatus.ok()) + { + LOG(FATAL) << "Error: " << runStatus; + } + else + { + // + // Generate our list of predictions and bounding boxes + // + auto boundingBoxesFlat = outputs[0].flat(); + tensorflow::TTypes::Flat scores_flat = outputs[1].flat(); + tensorflow::TTypes::Flat indices_flat = outputs[2].flat(); + + NSMutableArray * predictions = [[NSMutableArray alloc] init]; + for (int i = 0; i < kGraphMaxPredictions; ++i) + { + // + // once the prediction score falls below our threshold don't bother + // processing any more predictions. + // + const float score = scores_flat(i); + if (score < kPredictionThreshold) + { + break; + } + + // + // Keep an array of predictions + // + TensorflowPrediction * prediction = [[TensorflowPrediction alloc] init]; + prediction.score = score; + const int label_index = (tensorflow::int32)indices_flat(i); + prediction.label = [NSString stringWithUTF8String:GetDisplayName(&labelMap, label_index).c_str()]; + prediction.top = boundingBoxesFlat(i * 4 + 0); + prediction.left = boundingBoxesFlat(i * 4 + 1); + prediction.bottom = boundingBoxesFlat(i * 4 + 2); + prediction.right = boundingBoxesFlat(i * 4 + 3); + + // + // Crop the pixels out of the bounding box and put the cropped + // image into the prediction object + // + const int x = prediction.left * kGraphImageWidth; + const int y = prediction.top * kGraphImageHeight; + const int w = (prediction.right * kGraphImageWidth) - x; + const int h = (prediction.bottom * kGraphImageHeight) - y; + CGRect croppedArea = CGRectMake(x, y, w, h); + CGImageRef cropped = CGImageCreateWithImageInRect(cgImage, croppedArea); + prediction.image = [UIImage imageWithCGImage:cropped]; + CGImageRelease(cropped); + + [predictions addObject:prediction]; + } + + // + // Now that predictions are done calculate the amount of time elapsed since the start of processing. + // + struct timespec ts_end; + clock_gettime(CLOCK_MONOTONIC, &ts_end); + struct timespec elapsed = diff(ts_start, ts_end); + + // + // Calculate an average time and output every X frames. + // + self.processingTime += elapsed.tv_sec; + self.processingTime += (elapsed.tv_nsec / 1000000000.0f); + self.framesProcessed += 1; + if (self.framesProcessed % kAverageEveryXFrames == 0) + { + printf("Avg. prediction time: %f\n", self.processingTime / self.framesProcessed); + } + + // + // Notify the UI that we have new predictions. Another class will receive this + // and use the data to draw bounding boxes. + // + dispatch_async(dispatch_get_main_queue(), ^(void) { + [[NSNotificationCenter defaultCenter] postNotificationName:kPredictionsUpdated object:nil userInfo:@{@"predictions" : predictions}]; + }); + + CGImageRelease(cgImage); + } + + self.isProcessingFrame = NO; + } // end --- if (tfSession.get) + }); // end --- dispatch_async +} // end --- runModelOnPixelBuffer() + +@end diff --git a/objC/Tensorflow/TensorflowPrediction.h b/objC/Tensorflow/TensorflowPrediction.h new file mode 100644 index 0000000..c0603ef --- /dev/null +++ b/objC/Tensorflow/TensorflowPrediction.h @@ -0,0 +1,20 @@ +// +// TensorflowPrediction.h +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +#import +#import + +@interface TensorflowPrediction : NSObject +@property (nonatomic) NSString *label; +@property (nonatomic) UIImage *image; +@property (nonatomic) float score; +@property (nonatomic) float top; +@property (nonatomic) float left; +@property (nonatomic) float right; +@property (nonatomic) float bottom; +@end diff --git a/objC/Tensorflow/TensorflowPrediction.m b/objC/Tensorflow/TensorflowPrediction.m new file mode 100644 index 0000000..63cddb1 --- /dev/null +++ b/objC/Tensorflow/TensorflowPrediction.m @@ -0,0 +1,13 @@ +// +// TensorflowPrediction.m +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +#import "TensorflowPrediction.h" + +@implementation TensorflowPrediction + +@end diff --git a/objC/Tensorflow/constants.h b/objC/Tensorflow/constants.h new file mode 100644 index 0000000..b83f149 --- /dev/null +++ b/objC/Tensorflow/constants.h @@ -0,0 +1,10 @@ + +#ifndef constants_h +#define constants_h + +extern NSString * const kAVSessionStarted; +extern NSString * const kSetupResultCameraNotAuthorized; +extern NSString * const kSetupResultSessionConfigurationFailed; +extern NSString * const kPredictionsUpdated; + +#endif /* constants_h */ diff --git a/objC/Tensorflow/constants.m b/objC/Tensorflow/constants.m new file mode 100644 index 0000000..49da618 --- /dev/null +++ b/objC/Tensorflow/constants.m @@ -0,0 +1,8 @@ + +#import + +NSString * const kAVSessionStarted = @"kAVSessionStarted"; +NSString * const kSetupResultCameraNotAuthorized = @"kSetupResultCameraNotAuthorized"; +NSString * const kSetupResultSessionConfigurationFailed = @"SetupResultSessionConfigurationFailed"; +NSString * const kPredictionsUpdated = @"kPredictionsUpdated"; + diff --git a/objC/Tensorflow/ops_to_register.h b/objC/Tensorflow/ops_to_register.h new file mode 100644 index 0000000..bccc3fa --- /dev/null +++ b/objC/Tensorflow/ops_to_register.h @@ -0,0 +1,201 @@ +// This file was autogenerated by print_selective_registration_header.py +#ifndef OPS_TO_REGISTER +#define OPS_TO_REGISTER + + namespace { + constexpr const char* skip(const char* x) { + return (*x) ? (*x == ' ' ? skip(x + 1) : x) : x; + } + + constexpr bool isequal(const char* x, const char* y) { + return (*skip(x) && *skip(y)) + ? (*skip(x) == *skip(y) && isequal(skip(x) + 1, skip(y) + 1)) + : (!*skip(x) && !*skip(y)); + } + + template + struct find_in { + static constexpr bool f(const char* x, const char* const y[N]) { + return isequal(x, y[0]) || find_in::f(x, y + 1); + } + }; + + template<> + struct find_in<0> { + static constexpr bool f(const char* x, const char* const y[]) { + return false; + } + }; + } // end namespace + constexpr const char* kNecessaryOpKernelClasses[] = { +"BinaryOp< CPUDevice, functor::add>", +"BinaryOp< CPUDevice, functor::add>", +"AddNOp< CPUDevice, float>", +"ReductionOp", +"AssertOp", +"AvgPoolingOp", +"BiasOp", +"CpuCastOp", +"ConcatV2Op", +"ConcatV2Op", +"ConstantOp", +//"Conv2DOp", +"Conv2DUsingGemmOp< float, Im2ColConvFunctor>>", +"DepthwiseConv2dNativeOp", +"CropAndResizeOp", +"DequantizeOp", +"EnterOp", +"BinaryOp< CPUDevice, functor::equal_to>", +"ExitOp", +"UnaryOp< CPUDevice, functor::exp>", +"ExpandDimsOp", +"FillOp", +"FillOp", +"GatherOp", +"BinaryOp< CPUDevice, functor::greater>", +"BinaryOp< CPUDevice, functor::greater>", +"BinaryOp< CPUDevice, functor::greater_equal>", +"IdentityOp", +"BinaryOp< CPUDevice, functor::less>", +"BinaryOp", +"LoopCondOp", +"MatMulOp", +"ReductionOp>", +"MaxPoolingOp", +"BinaryOp< CPUDevice, functor::maximum>", +"ReductionOp>", +"MergeOp", +"BinaryOp< CPUDevice, functor::minimum>", +"BinaryOp< CPUDevice, functor::minimum>", +"BinaryOp< CPUDevice, functor::mul>", +"BinaryOp< CPUDevice, functor::mul>", +"NextIterationOp", +"NoOp", +"NonMaxSuppressionOp", +"PackOp", +"PackOp", +"PadOp", +"PlaceholderOp", +"RangeOp<::tensorflow::int32>", +"RankOp", +"BinaryOp< CPUDevice, functor::div>", +"ReluOp", +"Relu6Op", +"ReshapeOp", +"ResizeBilinearOp", +"UnaryOp< CPUDevice, functor::round>", +"UnaryOp< CPUDevice, functor::rsqrt>", +"ShapeOp", +"UnaryOp< CPUDevice, functor::sigmoid>", +"SizeOp", +"SliceOp", +"SliceOp", +"SoftmaxOp", +"SplitOpCPU", +"UnaryOp< CPUDevice, functor::sqrt>", +"SqueezeOp", +"StridedSliceOp", +"StridedSliceOp", +"BinaryOp< CPUDevice, functor::sub>", +"BinaryOp< CPUDevice, functor::sub>", +"SwitchOp", +"TensorArrayPackOrGatherOp", +"TensorArrayReadOp", +"TensorArrayUnpackOrScatterOp", +"TensorArraySizeOp", +"TensorArrayOp", +"TensorArrayWriteOp", +"TileOp", +"TopK", +"TransposeCpuOp", +"UnpackOp", +"UnpackOp", +"WhereCPUOp", +"ZerosLikeOp< CPUDevice, float>", +"RecvOp", +"SendOp", +}; +#define SHOULD_REGISTER_OP_KERNEL(clz) (find_in::f(clz, kNecessaryOpKernelClasses)) + +constexpr inline bool ShouldRegisterOp(const char op[]) { + return false + || isequal(op, "Add") + || isequal(op, "AddN") + || isequal(op, "All") + || isequal(op, "Assert") + || isequal(op, "AvgPool") + || isequal(op, "BiasAdd") + || isequal(op, "Cast") + || isequal(op, "ConcatV2") + || isequal(op, "Const") + || isequal(op, "Conv2D") + || isequal(op, "DepthwiseConv2dNative") + || isequal(op, "CropAndResize") + || isequal(op, "Dequantize") + || isequal(op, "Enter") + || isequal(op, "Equal") + || isequal(op, "Exit") + || isequal(op, "Exp") + || isequal(op, "ExpandDims") + || isequal(op, "Fill") + || isequal(op, "Gather") + || isequal(op, "Greater") + || isequal(op, "GreaterEqual") + || isequal(op, "Identity") + || isequal(op, "Less") + || isequal(op, "LogicalAnd") + || isequal(op, "LoopCond") + || isequal(op, "MatMul") + || isequal(op, "Max") + || isequal(op, "MaxPool") + || isequal(op, "Maximum") + || isequal(op, "Mean") + || isequal(op, "Merge") + || isequal(op, "Minimum") + || isequal(op, "Mul") + || isequal(op, "NextIteration") + || isequal(op, "NoOp") + || isequal(op, "NonMaxSuppression") + || isequal(op, "Pack") + || isequal(op, "Pad") + || isequal(op, "Placeholder") + || isequal(op, "Range") + || isequal(op, "Rank") + || isequal(op, "RealDiv") + || isequal(op, "Relu") + || isequal(op, "Relu6") + || isequal(op, "Reshape") + || isequal(op, "ResizeBilinear") + || isequal(op, "Rsqrt") + || isequal(op, "Round") + || isequal(op, "Shape") + || isequal(op, "Sigmoid") + || isequal(op, "Size") + || isequal(op, "Slice") + || isequal(op, "Softmax") + || isequal(op, "Split") + || isequal(op, "Sqrt") + || isequal(op, "Squeeze") + || isequal(op, "StridedSlice") + || isequal(op, "Sub") + || isequal(op, "Switch") + || isequal(op, "TensorArrayGatherV3") + || isequal(op, "TensorArrayReadV3") + || isequal(op, "TensorArrayScatterV3") + || isequal(op, "TensorArraySizeV3") + || isequal(op, "TensorArrayV3") + || isequal(op, "TensorArrayWriteV3") + || isequal(op, "Tile") + || isequal(op, "TopKV2") + || isequal(op, "Transpose") + || isequal(op, "Unpack") + || isequal(op, "Where") + || isequal(op, "ZerosLike") + || isequal(op, "_Recv") + || isequal(op, "_Send") + ; +} +#define SHOULD_REGISTER_OP(op) ShouldRegisterOp(op) + +#define SHOULD_REGISTER_OP_GRADIENT false +#endif diff --git a/objC/Tensorflow/string_int_label_map.pb.cc b/objC/Tensorflow/string_int_label_map.pb.cc new file mode 100644 index 0000000..0f8e818 --- /dev/null +++ b/objC/Tensorflow/string_int_label_map.pb.cc @@ -0,0 +1,961 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: string_int_label_map.proto + +#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION +#include "string_int_label_map.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace object_detection { +namespace protos { +class StringIntLabelMapItemDefaultTypeInternal { +public: + ::google::protobuf::internal::ExplicitlyConstructed + _instance; +} _StringIntLabelMapItem_default_instance_; +class StringIntLabelMapDefaultTypeInternal { +public: + ::google::protobuf::internal::ExplicitlyConstructed + _instance; +} _StringIntLabelMap_default_instance_; + +namespace protobuf_string_5fint_5flabel_5fmap_2eproto { + + +namespace { + +::google::protobuf::Metadata file_level_metadata[2]; + +} // namespace + +PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTableField + const TableStruct::entries[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + {0, 0, 0, ::google::protobuf::internal::kInvalidMask, 0, 0}, +}; + +PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::AuxillaryParseTableField + const TableStruct::aux[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + ::google::protobuf::internal::AuxillaryParseTableField(), +}; +PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTable const + TableStruct::schema[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + { NULL, NULL, 0, -1, -1, -1, -1, NULL, false }, + { NULL, NULL, 0, -1, -1, -1, -1, NULL, false }, +}; + +const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, _has_bits_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, id_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, display_name_), + 0, + 2, + 1, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMap, _has_bits_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMap, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMap, item_), + ~0u, +}; +static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + { 0, 8, sizeof(StringIntLabelMapItem)}, + { 11, 17, sizeof(StringIntLabelMap)}, +}; + +static ::google::protobuf::Message const * const file_default_instances[] = { + reinterpret_cast(&_StringIntLabelMapItem_default_instance_), + reinterpret_cast(&_StringIntLabelMap_default_instance_), +}; + +namespace { + +void protobuf_AssignDescriptors() { + AddDescriptors(); + ::google::protobuf::MessageFactory* factory = NULL; + AssignDescriptors( + "string_int_label_map.proto", schemas, file_default_instances, TableStruct::offsets, factory, + file_level_metadata, NULL, NULL); +} + +void protobuf_AssignDescriptorsOnce() { + static GOOGLE_PROTOBUF_DECLARE_ONCE(once); + ::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors); +} + +void protobuf_RegisterTypes(const ::std::string&) GOOGLE_ATTRIBUTE_COLD; +void protobuf_RegisterTypes(const ::std::string&) { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 2); +} + +} // namespace +void TableStruct::InitDefaultsImpl() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + ::google::protobuf::internal::InitProtobufDefaults(); + _StringIntLabelMapItem_default_instance_._instance.DefaultConstruct(); + ::google::protobuf::internal::OnShutdownDestroyMessage( + &_StringIntLabelMapItem_default_instance_);_StringIntLabelMap_default_instance_._instance.DefaultConstruct(); + ::google::protobuf::internal::OnShutdownDestroyMessage( + &_StringIntLabelMap_default_instance_);} + +void InitDefaults() { + static GOOGLE_PROTOBUF_DECLARE_ONCE(once); + ::google::protobuf::GoogleOnceInit(&once, &TableStruct::InitDefaultsImpl); +} +namespace { +void AddDescriptorsImpl() { + InitDefaults(); + static const char descriptor[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + "\n\032string_int_label_map.proto\022\027object_det" + "ection.protos\"G\n\025StringIntLabelMapItem\022\014" + "\n\004name\030\001 \001(\t\022\n\n\002id\030\002 \001(\005\022\024\n\014display_name" + "\030\003 \001(\t\"Q\n\021StringIntLabelMap\022<\n\004item\030\001 \003(" + "\0132..object_detection.protos.StringIntLab" + "elMapItem" + }; + ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( + descriptor, 209); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( + "string_int_label_map.proto", &protobuf_RegisterTypes); +} +} // anonymous namespace + +void AddDescriptors() { + static GOOGLE_PROTOBUF_DECLARE_ONCE(once); + ::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl); +} +// Force AddDescriptors() to be called at dynamic initialization time. +struct StaticDescriptorInitializer { + StaticDescriptorInitializer() { + AddDescriptors(); + } +} static_descriptor_initializer; + +} // namespace protobuf_string_5fint_5flabel_5fmap_2eproto + + +// =================================================================== + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int StringIntLabelMapItem::kNameFieldNumber; +const int StringIntLabelMapItem::kIdFieldNumber; +const int StringIntLabelMapItem::kDisplayNameFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +StringIntLabelMapItem::StringIntLabelMapItem() + : ::google::protobuf::Message(), _internal_metadata_(NULL) { + if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + } + SharedCtor(); + // @@protoc_insertion_point(constructor:object_detection.protos.StringIntLabelMapItem) +} +StringIntLabelMapItem::StringIntLabelMapItem(const StringIntLabelMapItem& from) + : ::google::protobuf::Message(), + _internal_metadata_(NULL), + _has_bits_(from._has_bits_), + _cached_size_(0) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.has_name()) { + name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); + } + display_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.has_display_name()) { + display_name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.display_name_); + } + id_ = from.id_; + // @@protoc_insertion_point(copy_constructor:object_detection.protos.StringIntLabelMapItem) +} + +void StringIntLabelMapItem::SharedCtor() { + _cached_size_ = 0; + name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + display_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + id_ = 0; +} + +StringIntLabelMapItem::~StringIntLabelMapItem() { + // @@protoc_insertion_point(destructor:object_detection.protos.StringIntLabelMapItem) + SharedDtor(); +} + +void StringIntLabelMapItem::SharedDtor() { + name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + display_name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +void StringIntLabelMapItem::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* StringIntLabelMapItem::descriptor() { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; +} + +const StringIntLabelMapItem& StringIntLabelMapItem::default_instance() { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + return *internal_default_instance(); +} + +StringIntLabelMapItem* StringIntLabelMapItem::New(::google::protobuf::Arena* arena) const { + StringIntLabelMapItem* n = new StringIntLabelMapItem; + if (arena != NULL) { + arena->Own(n); + } + return n; +} + +void StringIntLabelMapItem::Clear() { +// @@protoc_insertion_point(message_clear_start:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + cached_has_bits = _has_bits_[0]; + if (cached_has_bits & 3u) { + if (cached_has_bits & 0x00000001u) { + GOOGLE_DCHECK(!name_.IsDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited())); + (*name_.UnsafeRawStringPointer())->clear(); + } + if (cached_has_bits & 0x00000002u) { + GOOGLE_DCHECK(!display_name_.IsDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited())); + (*display_name_.UnsafeRawStringPointer())->clear(); + } + } + id_ = 0; + _has_bits_.Clear(); + _internal_metadata_.Clear(); +} + +bool StringIntLabelMapItem::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:object_detection.protos.StringIntLabelMapItem) + for (;;) { + ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->name().data(), static_cast(this->name().length()), + ::google::protobuf::internal::WireFormat::PARSE, + "object_detection.protos.StringIntLabelMapItem.name"); + } else { + goto handle_unusual; + } + break; + } + + // optional int32 id = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { + set_has_id(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &id_))); + } else { + goto handle_unusual; + } + break; + } + + // optional string display_name = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_display_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->display_name().data(), static_cast(this->display_name().length()), + ::google::protobuf::internal::WireFormat::PARSE, + "object_detection.protos.StringIntLabelMapItem.display_name"); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:object_detection.protos.StringIntLabelMapItem) + return true; +failure: + // @@protoc_insertion_point(parse_failure:object_detection.protos.StringIntLabelMapItem) + return false; +#undef DO_ +} + +void StringIntLabelMapItem::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + cached_has_bits = _has_bits_[0]; + // optional string name = 1; + if (cached_has_bits & 0x00000001u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->name().data(), static_cast(this->name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.name"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->name(), output); + } + + // optional int32 id = 2; + if (cached_has_bits & 0x00000004u) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->id(), output); + } + + // optional string display_name = 3; + if (cached_has_bits & 0x00000002u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->display_name().data(), static_cast(this->display_name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.display_name"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 3, this->display_name(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:object_detection.protos.StringIntLabelMapItem) +} + +::google::protobuf::uint8* StringIntLabelMapItem::InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const { + (void)deterministic; // Unused + // @@protoc_insertion_point(serialize_to_array_start:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + cached_has_bits = _has_bits_[0]; + // optional string name = 1; + if (cached_has_bits & 0x00000001u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->name().data(), static_cast(this->name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.name"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional int32 id = 2; + if (cached_has_bits & 0x00000004u) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->id(), target); + } + + // optional string display_name = 3; + if (cached_has_bits & 0x00000002u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->display_name().data(), static_cast(this->display_name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.display_name"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->display_name(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:object_detection.protos.StringIntLabelMapItem) + return target; +} + +size_t StringIntLabelMapItem::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:object_detection.protos.StringIntLabelMapItem) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + if (_has_bits_[0 / 32] & 7u) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional string display_name = 3; + if (has_display_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->display_name()); + } + + // optional int32 id = 2; + if (has_id()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->id()); + } + + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = cached_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void StringIntLabelMapItem::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:object_detection.protos.StringIntLabelMapItem) + GOOGLE_DCHECK_NE(&from, this); + const StringIntLabelMapItem* source = + ::google::protobuf::internal::DynamicCastToGenerated( + &from); + if (source == NULL) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:object_detection.protos.StringIntLabelMapItem) + MergeFrom(*source); + } +} + +void StringIntLabelMapItem::MergeFrom(const StringIntLabelMapItem& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:object_detection.protos.StringIntLabelMapItem) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + cached_has_bits = from._has_bits_[0]; + if (cached_has_bits & 7u) { + if (cached_has_bits & 0x00000001u) { + set_has_name(); + name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); + } + if (cached_has_bits & 0x00000002u) { + set_has_display_name(); + display_name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.display_name_); + } + if (cached_has_bits & 0x00000004u) { + id_ = from.id_; + } + _has_bits_[0] |= cached_has_bits; + } +} + +void StringIntLabelMapItem::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:object_detection.protos.StringIntLabelMapItem) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void StringIntLabelMapItem::CopyFrom(const StringIntLabelMapItem& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:object_detection.protos.StringIntLabelMapItem) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool StringIntLabelMapItem::IsInitialized() const { + return true; +} + +void StringIntLabelMapItem::Swap(StringIntLabelMapItem* other) { + if (other == this) return; + InternalSwap(other); +} +void StringIntLabelMapItem::InternalSwap(StringIntLabelMapItem* other) { + using std::swap; + name_.Swap(&other->name_); + display_name_.Swap(&other->display_name_); + swap(id_, other->id_); + swap(_has_bits_[0], other->_has_bits_[0]); + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(_cached_size_, other->_cached_size_); +} + +::google::protobuf::Metadata StringIntLabelMapItem::GetMetadata() const { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages]; +} + +#if PROTOBUF_INLINE_NOT_IN_HEADERS +// StringIntLabelMapItem + +// optional string name = 1; +bool StringIntLabelMapItem::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +void StringIntLabelMapItem::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +void StringIntLabelMapItem::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +void StringIntLabelMapItem::clear_name() { + name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_name(); +} +const ::std::string& StringIntLabelMapItem::name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.name) + return name_.GetNoArena(); +} +void StringIntLabelMapItem::set_name(const ::std::string& value) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.name) +} +#if LANG_CXX11 +void StringIntLabelMapItem::set_name(::std::string&& value) { + set_has_name(); + name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.name) +} +#endif +void StringIntLabelMapItem::set_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.name) +} +void StringIntLabelMapItem::set_name(const char* value, size_t size) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.name) +} +::std::string* StringIntLabelMapItem::mutable_name() { + set_has_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.name) + return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +::std::string* StringIntLabelMapItem::release_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.name) + clear_has_name(); + return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +void StringIntLabelMapItem::set_allocated_name(::std::string* name) { + if (name != NULL) { + set_has_name(); + } else { + clear_has_name(); + } + name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.name) +} + +// optional int32 id = 2; +bool StringIntLabelMapItem::has_id() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +void StringIntLabelMapItem::set_has_id() { + _has_bits_[0] |= 0x00000004u; +} +void StringIntLabelMapItem::clear_has_id() { + _has_bits_[0] &= ~0x00000004u; +} +void StringIntLabelMapItem::clear_id() { + id_ = 0; + clear_has_id(); +} +::google::protobuf::int32 StringIntLabelMapItem::id() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.id) + return id_; +} +void StringIntLabelMapItem::set_id(::google::protobuf::int32 value) { + set_has_id(); + id_ = value; + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.id) +} + +// optional string display_name = 3; +bool StringIntLabelMapItem::has_display_name() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +void StringIntLabelMapItem::set_has_display_name() { + _has_bits_[0] |= 0x00000002u; +} +void StringIntLabelMapItem::clear_has_display_name() { + _has_bits_[0] &= ~0x00000002u; +} +void StringIntLabelMapItem::clear_display_name() { + display_name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_display_name(); +} +const ::std::string& StringIntLabelMapItem::display_name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.GetNoArena(); +} +void StringIntLabelMapItem::set_display_name(const ::std::string& value) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.display_name) +} +#if LANG_CXX11 +void StringIntLabelMapItem::set_display_name(::std::string&& value) { + set_has_display_name(); + display_name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.display_name) +} +#endif +void StringIntLabelMapItem::set_display_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.display_name) +} +void StringIntLabelMapItem::set_display_name(const char* value, size_t size) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.display_name) +} +::std::string* StringIntLabelMapItem::mutable_display_name() { + set_has_display_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +::std::string* StringIntLabelMapItem::release_display_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.display_name) + clear_has_display_name(); + return display_name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +void StringIntLabelMapItem::set_allocated_display_name(::std::string* display_name) { + if (display_name != NULL) { + set_has_display_name(); + } else { + clear_has_display_name(); + } + display_name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), display_name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.display_name) +} + +#endif // PROTOBUF_INLINE_NOT_IN_HEADERS + +// =================================================================== + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int StringIntLabelMap::kItemFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +StringIntLabelMap::StringIntLabelMap() + : ::google::protobuf::Message(), _internal_metadata_(NULL) { + if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + } + SharedCtor(); + // @@protoc_insertion_point(constructor:object_detection.protos.StringIntLabelMap) +} +StringIntLabelMap::StringIntLabelMap(const StringIntLabelMap& from) + : ::google::protobuf::Message(), + _internal_metadata_(NULL), + _has_bits_(from._has_bits_), + _cached_size_(0), + item_(from.item_) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + // @@protoc_insertion_point(copy_constructor:object_detection.protos.StringIntLabelMap) +} + +void StringIntLabelMap::SharedCtor() { + _cached_size_ = 0; +} + +StringIntLabelMap::~StringIntLabelMap() { + // @@protoc_insertion_point(destructor:object_detection.protos.StringIntLabelMap) + SharedDtor(); +} + +void StringIntLabelMap::SharedDtor() { +} + +void StringIntLabelMap::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* StringIntLabelMap::descriptor() { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; +} + +const StringIntLabelMap& StringIntLabelMap::default_instance() { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + return *internal_default_instance(); +} + +StringIntLabelMap* StringIntLabelMap::New(::google::protobuf::Arena* arena) const { + StringIntLabelMap* n = new StringIntLabelMap; + if (arena != NULL) { + arena->Own(n); + } + return n; +} + +void StringIntLabelMap::Clear() { +// @@protoc_insertion_point(message_clear_start:object_detection.protos.StringIntLabelMap) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + item_.Clear(); + _has_bits_.Clear(); + _internal_metadata_.Clear(); +} + +bool StringIntLabelMap::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:object_detection.protos.StringIntLabelMap) + for (;;) { + ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_item())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:object_detection.protos.StringIntLabelMap) + return true; +failure: + // @@protoc_insertion_point(parse_failure:object_detection.protos.StringIntLabelMap) + return false; +#undef DO_ +} + +void StringIntLabelMap::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:object_detection.protos.StringIntLabelMap) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + for (unsigned int i = 0, + n = static_cast(this->item_size()); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, this->item(static_cast(i)), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:object_detection.protos.StringIntLabelMap) +} + +::google::protobuf::uint8* StringIntLabelMap::InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const { + (void)deterministic; // Unused + // @@protoc_insertion_point(serialize_to_array_start:object_detection.protos.StringIntLabelMap) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + for (unsigned int i = 0, + n = static_cast(this->item_size()); i < n; i++) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageNoVirtualToArray( + 1, this->item(static_cast(i)), deterministic, target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:object_detection.protos.StringIntLabelMap) + return target; +} + +size_t StringIntLabelMap::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:object_detection.protos.StringIntLabelMap) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + { + unsigned int count = static_cast(this->item_size()); + total_size += 1UL * count; + for (unsigned int i = 0; i < count; i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->item(static_cast(i))); + } + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = cached_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void StringIntLabelMap::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:object_detection.protos.StringIntLabelMap) + GOOGLE_DCHECK_NE(&from, this); + const StringIntLabelMap* source = + ::google::protobuf::internal::DynamicCastToGenerated( + &from); + if (source == NULL) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:object_detection.protos.StringIntLabelMap) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:object_detection.protos.StringIntLabelMap) + MergeFrom(*source); + } +} + +void StringIntLabelMap::MergeFrom(const StringIntLabelMap& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:object_detection.protos.StringIntLabelMap) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + item_.MergeFrom(from.item_); +} + +void StringIntLabelMap::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:object_detection.protos.StringIntLabelMap) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void StringIntLabelMap::CopyFrom(const StringIntLabelMap& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:object_detection.protos.StringIntLabelMap) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool StringIntLabelMap::IsInitialized() const { + return true; +} + +void StringIntLabelMap::Swap(StringIntLabelMap* other) { + if (other == this) return; + InternalSwap(other); +} +void StringIntLabelMap::InternalSwap(StringIntLabelMap* other) { + using std::swap; + item_.InternalSwap(&other->item_); + swap(_has_bits_[0], other->_has_bits_[0]); + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(_cached_size_, other->_cached_size_); +} + +::google::protobuf::Metadata StringIntLabelMap::GetMetadata() const { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages]; +} + +#if PROTOBUF_INLINE_NOT_IN_HEADERS +// StringIntLabelMap + +// repeated .object_detection.protos.StringIntLabelMapItem item = 1; +int StringIntLabelMap::item_size() const { + return item_.size(); +} +void StringIntLabelMap::clear_item() { + item_.Clear(); +} +const ::object_detection::protos::StringIntLabelMapItem& StringIntLabelMap::item(int index) const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMap.item) + return item_.Get(index); +} +::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::mutable_item(int index) { + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMap.item) + return item_.Mutable(index); +} +::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::add_item() { + // @@protoc_insertion_point(field_add:object_detection.protos.StringIntLabelMap.item) + return item_.Add(); +} +::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >* +StringIntLabelMap::mutable_item() { + // @@protoc_insertion_point(field_mutable_list:object_detection.protos.StringIntLabelMap.item) + return &item_; +} +const ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >& +StringIntLabelMap::item() const { + // @@protoc_insertion_point(field_list:object_detection.protos.StringIntLabelMap.item) + return item_; +} + +#endif // PROTOBUF_INLINE_NOT_IN_HEADERS + +// @@protoc_insertion_point(namespace_scope) + +} // namespace protos +} // namespace object_detection + +// @@protoc_insertion_point(global_scope) diff --git a/objC/Tensorflow/string_int_label_map.pb.h b/objC/Tensorflow/string_int_label_map.pb.h new file mode 100644 index 0000000..a834acd --- /dev/null +++ b/objC/Tensorflow/string_int_label_map.pb.h @@ -0,0 +1,528 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: string_int_label_map.proto + +#ifndef PROTOBUF_string_5fint_5flabel_5fmap_2eproto__INCLUDED +#define PROTOBUF_string_5fint_5flabel_5fmap_2eproto__INCLUDED + +#include + +#include + +#if GOOGLE_PROTOBUF_VERSION < 3004000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 3004000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include +#include +#include +#include // IWYU pragma: export +#include // IWYU pragma: export +#include +// @@protoc_insertion_point(includes) +namespace object_detection { +namespace protos { +class StringIntLabelMap; +class StringIntLabelMapDefaultTypeInternal; +extern StringIntLabelMapDefaultTypeInternal _StringIntLabelMap_default_instance_; +class StringIntLabelMapItem; +class StringIntLabelMapItemDefaultTypeInternal; +extern StringIntLabelMapItemDefaultTypeInternal _StringIntLabelMapItem_default_instance_; +} // namespace protos +} // namespace object_detection + +namespace object_detection { +namespace protos { + +namespace protobuf_string_5fint_5flabel_5fmap_2eproto { +// Internal implementation detail -- do not call these. +struct TableStruct { + static const ::google::protobuf::internal::ParseTableField entries[]; + static const ::google::protobuf::internal::AuxillaryParseTableField aux[]; + static const ::google::protobuf::internal::ParseTable schema[]; + static const ::google::protobuf::uint32 offsets[]; + static const ::google::protobuf::internal::FieldMetadata field_metadata[]; + static const ::google::protobuf::internal::SerializationTable serialization_table[]; + static void InitDefaultsImpl(); +}; +void AddDescriptors(); +void InitDefaults(); +} // namespace protobuf_string_5fint_5flabel_5fmap_2eproto + +// =================================================================== + +class StringIntLabelMapItem : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:object_detection.protos.StringIntLabelMapItem) */ { + public: + StringIntLabelMapItem(); + virtual ~StringIntLabelMapItem(); + + StringIntLabelMapItem(const StringIntLabelMapItem& from); + + inline StringIntLabelMapItem& operator=(const StringIntLabelMapItem& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + StringIntLabelMapItem(StringIntLabelMapItem&& from) noexcept + : StringIntLabelMapItem() { + *this = ::std::move(from); + } + + inline StringIntLabelMapItem& operator=(StringIntLabelMapItem&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _internal_metadata_.unknown_fields(); + } + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return _internal_metadata_.mutable_unknown_fields(); + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const StringIntLabelMapItem& default_instance(); + + static inline const StringIntLabelMapItem* internal_default_instance() { + return reinterpret_cast( + &_StringIntLabelMapItem_default_instance_); + } + static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = + 0; + + void Swap(StringIntLabelMapItem* other); + friend void swap(StringIntLabelMapItem& a, StringIntLabelMapItem& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline StringIntLabelMapItem* New() const PROTOBUF_FINAL { return New(NULL); } + + StringIntLabelMapItem* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; + void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void CopyFrom(const StringIntLabelMapItem& from); + void MergeFrom(const StringIntLabelMapItem& from); + void Clear() PROTOBUF_FINAL; + bool IsInitialized() const PROTOBUF_FINAL; + + size_t ByteSizeLong() const PROTOBUF_FINAL; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; + int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const PROTOBUF_FINAL; + void InternalSwap(StringIntLabelMapItem* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return NULL; + } + inline void* MaybeArenaPtr() const { + return NULL; + } + public: + + ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + bool has_name() const; + void clear_name(); + static const int kNameFieldNumber = 1; + const ::std::string& name() const; + void set_name(const ::std::string& value); + #if LANG_CXX11 + void set_name(::std::string&& value); + #endif + void set_name(const char* value); + void set_name(const char* value, size_t size); + ::std::string* mutable_name(); + ::std::string* release_name(); + void set_allocated_name(::std::string* name); + + // optional string display_name = 3; + bool has_display_name() const; + void clear_display_name(); + static const int kDisplayNameFieldNumber = 3; + const ::std::string& display_name() const; + void set_display_name(const ::std::string& value); + #if LANG_CXX11 + void set_display_name(::std::string&& value); + #endif + void set_display_name(const char* value); + void set_display_name(const char* value, size_t size); + ::std::string* mutable_display_name(); + ::std::string* release_display_name(); + void set_allocated_display_name(::std::string* display_name); + + // optional int32 id = 2; + bool has_id() const; + void clear_id(); + static const int kIdFieldNumber = 2; + ::google::protobuf::int32 id() const; + void set_id(::google::protobuf::int32 value); + + // @@protoc_insertion_point(class_scope:object_detection.protos.StringIntLabelMapItem) + private: + void set_has_name(); + void clear_has_name(); + void set_has_id(); + void clear_has_id(); + void set_has_display_name(); + void clear_has_display_name(); + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::HasBits<1> _has_bits_; + mutable int _cached_size_; + ::google::protobuf::internal::ArenaStringPtr name_; + ::google::protobuf::internal::ArenaStringPtr display_name_; + ::google::protobuf::int32 id_; + friend struct protobuf_string_5fint_5flabel_5fmap_2eproto::TableStruct; +}; +// ------------------------------------------------------------------- + +class StringIntLabelMap : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:object_detection.protos.StringIntLabelMap) */ { + public: + StringIntLabelMap(); + virtual ~StringIntLabelMap(); + + StringIntLabelMap(const StringIntLabelMap& from); + + inline StringIntLabelMap& operator=(const StringIntLabelMap& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + StringIntLabelMap(StringIntLabelMap&& from) noexcept + : StringIntLabelMap() { + *this = ::std::move(from); + } + + inline StringIntLabelMap& operator=(StringIntLabelMap&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _internal_metadata_.unknown_fields(); + } + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return _internal_metadata_.mutable_unknown_fields(); + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const StringIntLabelMap& default_instance(); + + static inline const StringIntLabelMap* internal_default_instance() { + return reinterpret_cast( + &_StringIntLabelMap_default_instance_); + } + static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = + 1; + + void Swap(StringIntLabelMap* other); + friend void swap(StringIntLabelMap& a, StringIntLabelMap& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline StringIntLabelMap* New() const PROTOBUF_FINAL { return New(NULL); } + + StringIntLabelMap* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; + void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void CopyFrom(const StringIntLabelMap& from); + void MergeFrom(const StringIntLabelMap& from); + void Clear() PROTOBUF_FINAL; + bool IsInitialized() const PROTOBUF_FINAL; + + size_t ByteSizeLong() const PROTOBUF_FINAL; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; + int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const PROTOBUF_FINAL; + void InternalSwap(StringIntLabelMap* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return NULL; + } + inline void* MaybeArenaPtr() const { + return NULL; + } + public: + + ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + int item_size() const; + void clear_item(); + static const int kItemFieldNumber = 1; + const ::object_detection::protos::StringIntLabelMapItem& item(int index) const; + ::object_detection::protos::StringIntLabelMapItem* mutable_item(int index); + ::object_detection::protos::StringIntLabelMapItem* add_item(); + ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >* + mutable_item(); + const ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >& + item() const; + + // @@protoc_insertion_point(class_scope:object_detection.protos.StringIntLabelMap) + private: + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::HasBits<1> _has_bits_; + mutable int _cached_size_; + ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem > item_; + friend struct protobuf_string_5fint_5flabel_5fmap_2eproto::TableStruct; +}; +// =================================================================== + + +// =================================================================== + +#if !PROTOBUF_INLINE_NOT_IN_HEADERS +#ifdef __GNUC__ + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstrict-aliasing" +#endif // __GNUC__ +// StringIntLabelMapItem + +// optional string name = 1; +inline bool StringIntLabelMapItem::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void StringIntLabelMapItem::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void StringIntLabelMapItem::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void StringIntLabelMapItem::clear_name() { + name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_name(); +} +inline const ::std::string& StringIntLabelMapItem::name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.name) + return name_.GetNoArena(); +} +inline void StringIntLabelMapItem::set_name(const ::std::string& value) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.name) +} +#if LANG_CXX11 +inline void StringIntLabelMapItem::set_name(::std::string&& value) { + set_has_name(); + name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.name) +} +#endif +inline void StringIntLabelMapItem::set_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.name) +} +inline void StringIntLabelMapItem::set_name(const char* value, size_t size) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.name) +} +inline ::std::string* StringIntLabelMapItem::mutable_name() { + set_has_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.name) + return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* StringIntLabelMapItem::release_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.name) + clear_has_name(); + return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void StringIntLabelMapItem::set_allocated_name(::std::string* name) { + if (name != NULL) { + set_has_name(); + } else { + clear_has_name(); + } + name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.name) +} + +// optional int32 id = 2; +inline bool StringIntLabelMapItem::has_id() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void StringIntLabelMapItem::set_has_id() { + _has_bits_[0] |= 0x00000004u; +} +inline void StringIntLabelMapItem::clear_has_id() { + _has_bits_[0] &= ~0x00000004u; +} +inline void StringIntLabelMapItem::clear_id() { + id_ = 0; + clear_has_id(); +} +inline ::google::protobuf::int32 StringIntLabelMapItem::id() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.id) + return id_; +} +inline void StringIntLabelMapItem::set_id(::google::protobuf::int32 value) { + set_has_id(); + id_ = value; + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.id) +} + +// optional string display_name = 3; +inline bool StringIntLabelMapItem::has_display_name() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void StringIntLabelMapItem::set_has_display_name() { + _has_bits_[0] |= 0x00000002u; +} +inline void StringIntLabelMapItem::clear_has_display_name() { + _has_bits_[0] &= ~0x00000002u; +} +inline void StringIntLabelMapItem::clear_display_name() { + display_name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_display_name(); +} +inline const ::std::string& StringIntLabelMapItem::display_name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.GetNoArena(); +} +inline void StringIntLabelMapItem::set_display_name(const ::std::string& value) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.display_name) +} +#if LANG_CXX11 +inline void StringIntLabelMapItem::set_display_name(::std::string&& value) { + set_has_display_name(); + display_name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.display_name) +} +#endif +inline void StringIntLabelMapItem::set_display_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.display_name) +} +inline void StringIntLabelMapItem::set_display_name(const char* value, size_t size) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.display_name) +} +inline ::std::string* StringIntLabelMapItem::mutable_display_name() { + set_has_display_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* StringIntLabelMapItem::release_display_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.display_name) + clear_has_display_name(); + return display_name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void StringIntLabelMapItem::set_allocated_display_name(::std::string* display_name) { + if (display_name != NULL) { + set_has_display_name(); + } else { + clear_has_display_name(); + } + display_name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), display_name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.display_name) +} + +// ------------------------------------------------------------------- + +// StringIntLabelMap + +// repeated .object_detection.protos.StringIntLabelMapItem item = 1; +inline int StringIntLabelMap::item_size() const { + return item_.size(); +} +inline void StringIntLabelMap::clear_item() { + item_.Clear(); +} +inline const ::object_detection::protos::StringIntLabelMapItem& StringIntLabelMap::item(int index) const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMap.item) + return item_.Get(index); +} +inline ::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::mutable_item(int index) { + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMap.item) + return item_.Mutable(index); +} +inline ::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::add_item() { + // @@protoc_insertion_point(field_add:object_detection.protos.StringIntLabelMap.item) + return item_.Add(); +} +inline ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >* +StringIntLabelMap::mutable_item() { + // @@protoc_insertion_point(field_mutable_list:object_detection.protos.StringIntLabelMap.item) + return &item_; +} +inline const ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >& +StringIntLabelMap::item() const { + // @@protoc_insertion_point(field_list:object_detection.protos.StringIntLabelMap.item) + return item_; +} + +#ifdef __GNUC__ + #pragma GCC diagnostic pop +#endif // __GNUC__ +#endif // !PROTOBUF_INLINE_NOT_IN_HEADERS +// ------------------------------------------------------------------- + + +// @@protoc_insertion_point(namespace_scope) + + +} // namespace protos +} // namespace object_detection + +// @@protoc_insertion_point(global_scope) + +#endif // PROTOBUF_string_5fint_5flabel_5fmap_2eproto__INCLUDED diff --git a/objC/Tensorflow/tensorflowUtils.h b/objC/Tensorflow/tensorflowUtils.h new file mode 100644 index 0000000..1409cd0 --- /dev/null +++ b/objC/Tensorflow/tensorflowUtils.h @@ -0,0 +1,40 @@ + +#ifndef tensorflowUtils_h +#define tensorflowUtils_h + +#include "tensorflow/core/public/session.h" +#include "tensorflow/core/util/memmapped_file_system.h" +#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" +namespace object_detection +{ + namespace protos + { + class StringIntLabelMap; + } +} + +// Reads a serialized GraphDef protobuf file from the bundle, typically +// created with the freeze_graph script. Populates the session argument with a +// Session object that has the model loaded. +tensorflow::Status loadModel(NSString* file_name, + NSString* file_type, + std::unique_ptr* session); + +// Loads a model from a file that has been created using the +// convert_graphdef_memmapped_format tool. This bundles together a GraphDef +// proto together with a file that can be memory-mapped, containing the weight +// parameters for the model. This is useful because it reduces the overall +// memory pressure, since the read-only parameter regions can be easily paged +// out and don't count toward memory limits on iOS. +tensorflow::Status loadMemoryMappedModel(NSString* file_name, + NSString* file_type, + std::unique_ptr* session, + std::unique_ptr* memmapped_env); + +// Loads a text file of a label map in mscoco style. +tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap); + +// Takes a label Map and an index into it. Returns the 'DisplayName' field in the protobuf +std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index); +timespec diff(timespec start, timespec end); +#endif /* tensorflowUtils_h */ diff --git a/objC/Tensorflow/tensorflowUtils.mm b/objC/Tensorflow/tensorflowUtils.mm new file mode 100644 index 0000000..6425888 --- /dev/null +++ b/objC/Tensorflow/tensorflowUtils.mm @@ -0,0 +1,166 @@ + +#import + +#include "tensorflowUtils.h" +#include +#include +#include +#include "string_int_label_map.pb.h" + + +// Helper class borrowed from some utils that loads protobufs efficiently. +namespace +{ + class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream + { + public: + explicit IfstreamInputStream(const std::string& file_name) : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {} + ~IfstreamInputStream() { ifs_.close(); } + + int Read(void *buffer, int size) + { + if (!ifs_) + { + return -1; + } + ifs_.read(static_cast(buffer), size); + return (int)ifs_.gcount(); + } + + private: + std::ifstream ifs_; + }; +} + +#pragma mark - Private + +NSString *filePathForResourceName(NSString *name, NSString *extension) +{ + NSString *filePath = [[NSBundle mainBundle] pathForResource:name ofType:extension]; + + if (filePath == NULL) + { + LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "." << [extension UTF8String] << "' in bundle."; + return nullptr; + } + return filePath; +} + +bool PortableReadFileToProto(const std::string& fileName, ::google::protobuf::MessageLite *proto) +{ + ::google::protobuf::io::CopyingInputStreamAdaptor stream(new IfstreamInputStream(fileName)); + stream.SetOwnsCopyingStream(true); + ::google::protobuf::io::CodedInputStream codedStream(&stream); + + // Total bytes hard limit / warning limit are set to 1GB and 512MB + // respectively. + codedStream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); + return proto->ParseFromCodedStream(&codedStream); +} + +#pragma mark - Public + +tensorflow::Status loadModel(NSString *fileName, NSString *fileType, std::unique_ptr *session) +{ + tensorflow::SessionOptions options; + + tensorflow::Session *sessionPointer = nullptr; + tensorflow::Status sessionStatus = tensorflow::NewSession(options, &sessionPointer); + + if (!sessionStatus.ok()) + { + LOG(ERROR) << "Could not create TensorFlow Session: " << sessionStatus; + return sessionStatus; + } + session->reset(sessionPointer); + + tensorflow::GraphDef tensorflowGraph; + + NSString *modelPath = filePathForResourceName(fileName, fileType); + + if (!modelPath) + { + LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; + return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); + } + + const bool readProtoSucceeded = PortableReadFileToProto([modelPath UTF8String], &tensorflowGraph); + + if (!readProtoSucceeded) + { + LOG(ERROR) << "Failed to load model proto from" << [modelPath UTF8String]; + return tensorflow::errors::NotFound([modelPath UTF8String]); + } + + tensorflow::Status create_status = (*session)->Create(tensorflowGraph); + + if (!create_status.ok()) + { + LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status; + return create_status; + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap) +{ + // Read the label list + NSString *labelsPath = filePathForResourceName(fileName, fileType); + + if (!labelsPath) + { + LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; + return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); + } + + int fileDescriptor = open([labelsPath UTF8String], O_RDONLY); + if (fileDescriptor >= 0) + { + google::protobuf::io::FileInputStream fileInput(fileDescriptor); + fileInput.SetCloseOnDelete( true ); + + if (!google::protobuf::TextFormat::Parse(&fileInput, labelMap)) + { + LOG(ERROR) << "Failed to parse label file.\n"; + return tensorflow::errors::Aborted([fileName UTF8String], [fileType UTF8String]); + } + } + + return tensorflow::Status::OK(); +} + +std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index) +{ + for (int i = 0; i < labels->item_size(); ++i) + { + const object_detection::protos::StringIntLabelMapItem& item = labels->item(i); + if (index == item.id()) + { + return item.display_name(); + } + } + + return ""; +} + +// +// Calculate and return elapsed time between to struct timespecs +// +timespec diff(timespec start, timespec end) +{ + timespec temp; + if ((end.tv_nsec-start.tv_nsec)<0) + { + temp.tv_sec = end.tv_sec-start.tv_sec-1; + temp.tv_nsec = 1000000000+end.tv_nsec-start.tv_nsec; + } + else + { + temp.tv_sec = end.tv_sec-start.tv_sec; + temp.tv_nsec = end.tv_nsec-start.tv_nsec; + } + return temp; +} + + diff --git a/objC/tensorflowiOS.xcodeproj/project.pbxproj b/objC/tensorflowiOS.xcodeproj/project.pbxproj new file mode 100644 index 0000000..81ca486 --- /dev/null +++ b/objC/tensorflowiOS.xcodeproj/project.pbxproj @@ -0,0 +1,526 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 46; + objects = { + +/* Begin PBXBuildFile section */ + 2206265F1A1E330400A45150 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 2206265E1A1E330400A45150 /* main.m */; }; + 220626881A1E345E00A45150 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 220626831A1E345E00A45150 /* AppDelegate.m */; }; + 220626891A1E345E00A45150 /* CameraPreviewView.m in Sources */ = {isa = PBXBuildFile; fileRef = 220626851A1E345E00A45150 /* CameraPreviewView.m */; }; + 2206268A1A1E345E00A45150 /* ViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 220626871A1E345E00A45150 /* ViewController.mm */; }; + 5317E2541F940FB700514D13 /* op_inference_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 5317E2531F940F7800514D13 /* op_inference_graph.pb */; }; + 5317E2551F940FB900514D13 /* op_inference_graph_inv2.pb in Resources */ = {isa = PBXBuildFile; fileRef = 5317E2521F940F7100514D13 /* op_inference_graph_inv2.pb */; }; + 5317E2561F940FBD00514D13 /* op_inference_graph_rcnn.pb in Resources */ = {isa = PBXBuildFile; fileRef = 5317E2501F940F6100514D13 /* op_inference_graph_rcnn.pb */; }; + 5317E2571F940FC100514D13 /* mscoco_label_map.txt in Resources */ = {isa = PBXBuildFile; fileRef = 5317E2511F940F6900514D13 /* mscoco_label_map.txt */; }; + 53341A221FA44770003D2A2D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 53341A201FA44770003D2A2D /* LaunchScreen.storyboard */; }; + 53341A241FA44935003D2A2D /* Default-568h@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 53341A231FA44935003D2A2D /* Default-568h@2x.png */; }; + 5353B3DC1F8B30A300493AB0 /* string_int_label_map.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5353B3DA1F8B30A200493AB0 /* string_int_label_map.pb.cc */; }; + 635E14501F82E28D0025E912 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 635E144F1F82E28D0025E912 /* Accelerate.framework */; }; + 635E14571F82E4330025E912 /* libprotobuf-lite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 635E14551F82E4330025E912 /* libprotobuf-lite.a */; }; + 635E14581F82E4330025E912 /* libprotobuf.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 635E14561F82E4330025E912 /* libprotobuf.a */; }; + 635E14591F82E65C0025E912 /* libtensorflow-core.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 635E14531F82E3CE0025E912 /* libtensorflow-core.a */; }; + 635E14611F82FEF90025E912 /* tensorflowUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = 635E14601F82FEF80025E912 /* tensorflowUtils.mm */; }; + 635E14641F82FF380025E912 /* constants.m in Sources */ = {isa = PBXBuildFile; fileRef = 635E14631F82FF380025E912 /* constants.m */; }; + 635E14681F8301840025E912 /* libstdc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 635E14671F8301840025E912 /* libstdc++.tbd */; }; + 637BC7851F804F920029265F /* TensorflowGraph.mm in Sources */ = {isa = PBXBuildFile; fileRef = 637BC7841F804F920029265F /* TensorflowGraph.mm */; }; + 638820541F8C66880009B2DE /* TensorflowPrediction.m in Sources */ = {isa = PBXBuildFile; fileRef = 638820531F8C66880009B2DE /* TensorflowPrediction.m */; }; + 638820591F8C72490009B2DE /* BoundingBoxView.m in Sources */ = {isa = PBXBuildFile; fileRef = 638820581F8C72490009B2DE /* BoundingBoxView.m */; }; + 7A74447C1CEE6B4B00C70C83 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7A74447B1CEE6B4B00C70C83 /* Assets.xcassets */; }; + 7A74447E1CEE6B5900C70C83 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 7A74447D1CEE6B5900C70C83 /* Main.storyboard */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 220626591A1E330400A45150 /* tensorflowiOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tensorflowiOS.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 2206265D1A1E330400A45150 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 2206265E1A1E330400A45150 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; + 220626821A1E345E00A45150 /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; + 220626831A1E345E00A45150 /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; + 220626841A1E345E00A45150 /* CameraPreviewView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CameraPreviewView.h; sourceTree = ""; }; + 220626851A1E345E00A45150 /* CameraPreviewView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewView.m; sourceTree = ""; }; + 220626861A1E345E00A45150 /* ViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; + 220626871A1E345E00A45150 /* ViewController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ViewController.mm; sourceTree = ""; }; + 5317E2501F940F6100514D13 /* op_inference_graph_rcnn.pb */ = {isa = PBXFileReference; lastKnownFileType = file; name = op_inference_graph_rcnn.pb; path = Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb; sourceTree = SOURCE_ROOT; }; + 5317E2511F940F6900514D13 /* mscoco_label_map.txt */ = {isa = PBXFileReference; lastKnownFileType = text; name = mscoco_label_map.txt; path = Models/mscoco_label_map.txt; sourceTree = SOURCE_ROOT; }; + 5317E2521F940F7100514D13 /* op_inference_graph_inv2.pb */ = {isa = PBXFileReference; lastKnownFileType = file; name = op_inference_graph_inv2.pb; path = Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb; sourceTree = SOURCE_ROOT; }; + 5317E2531F940F7800514D13 /* op_inference_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; name = op_inference_graph.pb; path = Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb; sourceTree = SOURCE_ROOT; }; + 53341A211FA44770003D2A2D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 53341A231FA44935003D2A2D /* Default-568h@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Default-568h@2x.png"; sourceTree = ""; }; + 5353B3DA1F8B30A200493AB0 /* string_int_label_map.pb.cc */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.cpp; path = string_int_label_map.pb.cc; sourceTree = ""; }; + 5353B3DB1F8B30A300493AB0 /* string_int_label_map.pb.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = string_int_label_map.pb.h; sourceTree = ""; }; + 53AA6BD91FC5F9FF00074E49 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; + 635E144F1F82E28D0025E912 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; + 635E14511F82E37E0025E912 /* libtensorflow-core-arm64.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libtensorflow-core-arm64.a"; path = "../../tensorflow/tensorflow/contrib/makefile/gen/lib/ios_ARM64/libtensorflow-core-arm64.a"; sourceTree = ""; }; + 635E14531F82E3CE0025E912 /* libtensorflow-core.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libtensorflow-core.a"; path = "../../tensorflow/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a"; sourceTree = ""; }; + 635E14551F82E4330025E912 /* libprotobuf-lite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = "libprotobuf-lite.a"; path = "../../tensorflow/tensorflow/contrib/makefile/gen/protobuf_ios/lib/libprotobuf-lite.a"; sourceTree = ""; }; + 635E14561F82E4330025E912 /* libprotobuf.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libprotobuf.a; path = ../../tensorflow/tensorflow/contrib/makefile/gen/protobuf_ios/lib/libprotobuf.a; sourceTree = ""; }; + 635E145A1F82E9250025E912 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; }; + 635E145F1F82FEF80025E912 /* tensorflowUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = tensorflowUtils.h; sourceTree = ""; }; + 635E14601F82FEF80025E912 /* tensorflowUtils.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = tensorflowUtils.mm; sourceTree = ""; }; + 635E14621F82FF370025E912 /* constants.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = constants.h; sourceTree = ""; }; + 635E14631F82FF380025E912 /* constants.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = constants.m; sourceTree = ""; }; + 635E14651F83015C0025E912 /* libstdc++.6.0.9.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libstdc++.6.0.9.tbd"; path = "usr/lib/libstdc++.6.0.9.tbd"; sourceTree = SDKROOT; }; + 635E14671F8301840025E912 /* libstdc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libstdc++.tbd"; path = "usr/lib/libstdc++.tbd"; sourceTree = SDKROOT; }; + 637BC7831F804F920029265F /* TensorflowGraph.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TensorflowGraph.h; sourceTree = ""; }; + 637BC7841F804F920029265F /* TensorflowGraph.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorflowGraph.mm; sourceTree = ""; }; + 638820521F8C66880009B2DE /* TensorflowPrediction.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = TensorflowPrediction.h; sourceTree = ""; }; + 638820531F8C66880009B2DE /* TensorflowPrediction.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = TensorflowPrediction.m; sourceTree = ""; }; + 638820571F8C72490009B2DE /* BoundingBoxView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = BoundingBoxView.h; sourceTree = ""; }; + 638820581F8C72490009B2DE /* BoundingBoxView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = BoundingBoxView.m; sourceTree = ""; }; + 63E6BBB11F8BDB74003DB62B /* tensorflow.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = tensorflow.xcconfig; sourceTree = ""; }; + 7A74447B1CEE6B4B00C70C83 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 7A74447D1CEE6B5900C70C83 /* Main.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; path = Main.storyboard; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 220626561A1E330400A45150 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 635E14681F8301840025E912 /* libstdc++.tbd in Frameworks */, + 635E14591F82E65C0025E912 /* libtensorflow-core.a in Frameworks */, + 635E14571F82E4330025E912 /* libprotobuf-lite.a in Frameworks */, + 635E14581F82E4330025E912 /* libprotobuf.a in Frameworks */, + 635E14501F82E28D0025E912 /* Accelerate.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 220626501A1E330400A45150 = { + isa = PBXGroup; + children = ( + 53AA6BD91FC5F9FF00074E49 /* README.md */, + 537273F71FA4263700BAC5A8 /* App */, + 2206265B1A1E330400A45150 /* Tensorflow */, + 5317E24C1F940F1600514D13 /* Models */, + 537273F61FA2E37900BAC5A8 /* Assets */, + 537273F51FA2E34500BAC5A8 /* SupportingFiles */, + 2206265A1A1E330400A45150 /* Products */, + 635E144E1F82E28C0025E912 /* Frameworks */, + ); + sourceTree = ""; + }; + 2206265A1A1E330400A45150 /* Products */ = { + isa = PBXGroup; + children = ( + 220626591A1E330400A45150 /* tensorflowiOS.app */, + ); + name = Products; + sourceTree = ""; + }; + 2206265B1A1E330400A45150 /* Tensorflow */ = { + isa = PBXGroup; + children = ( + 637BC7831F804F920029265F /* TensorflowGraph.h */, + 637BC7841F804F920029265F /* TensorflowGraph.mm */, + 638820521F8C66880009B2DE /* TensorflowPrediction.h */, + 638820531F8C66880009B2DE /* TensorflowPrediction.m */, + 5353B3DA1F8B30A200493AB0 /* string_int_label_map.pb.cc */, + 5353B3DB1F8B30A300493AB0 /* string_int_label_map.pb.h */, + 635E145F1F82FEF80025E912 /* tensorflowUtils.h */, + 635E14601F82FEF80025E912 /* tensorflowUtils.mm */, + 635E14621F82FF370025E912 /* constants.h */, + 635E14631F82FF380025E912 /* constants.m */, + ); + path = Tensorflow; + sourceTree = ""; + }; + 5317E24C1F940F1600514D13 /* Models */ = { + isa = PBXGroup; + children = ( + 5317E2511F940F6900514D13 /* mscoco_label_map.txt */, + 5317E24F1F940F3E00514D13 /* SSD-MobileNet */, + 5317E24E1F940F3600514D13 /* SSD-Inception */, + 5317E24D1F940F2B00514D13 /* Faster-RCNN */, + ); + path = Models; + sourceTree = ""; + }; + 5317E24D1F940F2B00514D13 /* Faster-RCNN */ = { + isa = PBXGroup; + children = ( + 5317E2501F940F6100514D13 /* op_inference_graph_rcnn.pb */, + ); + name = "Faster-RCNN"; + path = faster_rcnn_resnet101_coco_11_06_2017; + sourceTree = ""; + }; + 5317E24E1F940F3600514D13 /* SSD-Inception */ = { + isa = PBXGroup; + children = ( + 5317E2521F940F7100514D13 /* op_inference_graph_inv2.pb */, + ); + name = "SSD-Inception"; + path = ssd_inception_v2_coco_11_06_2017; + sourceTree = ""; + }; + 5317E24F1F940F3E00514D13 /* SSD-MobileNet */ = { + isa = PBXGroup; + children = ( + 5317E2531F940F7800514D13 /* op_inference_graph.pb */, + ); + name = "SSD-MobileNet"; + path = ssd_mobilenet_v1_coco_11_06_2017; + sourceTree = ""; + }; + 537273F51FA2E34500BAC5A8 /* SupportingFiles */ = { + isa = PBXGroup; + children = ( + 2206265D1A1E330400A45150 /* Info.plist */, + 63E6BBB11F8BDB74003DB62B /* tensorflow.xcconfig */, + 2206265E1A1E330400A45150 /* main.m */, + ); + path = SupportingFiles; + sourceTree = ""; + }; + 537273F61FA2E37900BAC5A8 /* Assets */ = { + isa = PBXGroup; + children = ( + 53341A231FA44935003D2A2D /* Default-568h@2x.png */, + 7A74447D1CEE6B5900C70C83 /* Main.storyboard */, + 7A74447B1CEE6B4B00C70C83 /* Assets.xcassets */, + 53341A201FA44770003D2A2D /* LaunchScreen.storyboard */, + ); + path = Assets; + sourceTree = ""; + }; + 537273F71FA4263700BAC5A8 /* App */ = { + isa = PBXGroup; + children = ( + 220626831A1E345E00A45150 /* AppDelegate.m */, + 220626821A1E345E00A45150 /* AppDelegate.h */, + 220626861A1E345E00A45150 /* ViewController.h */, + 220626871A1E345E00A45150 /* ViewController.mm */, + 220626841A1E345E00A45150 /* CameraPreviewView.h */, + 220626851A1E345E00A45150 /* CameraPreviewView.m */, + 638820571F8C72490009B2DE /* BoundingBoxView.h */, + 638820581F8C72490009B2DE /* BoundingBoxView.m */, + ); + path = App; + sourceTree = ""; + }; + 635E144E1F82E28C0025E912 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 635E14671F8301840025E912 /* libstdc++.tbd */, + 635E14651F83015C0025E912 /* libstdc++.6.0.9.tbd */, + 635E145A1F82E9250025E912 /* libc++.tbd */, + 635E14551F82E4330025E912 /* libprotobuf-lite.a */, + 635E14561F82E4330025E912 /* libprotobuf.a */, + 635E14531F82E3CE0025E912 /* libtensorflow-core.a */, + 635E14511F82E37E0025E912 /* libtensorflow-core-arm64.a */, + 635E144F1F82E28D0025E912 /* Accelerate.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 220626581A1E330400A45150 /* tensorflowiOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = 2206267C1A1E330400A45150 /* Build configuration list for PBXNativeTarget "tensorflowiOS" */; + buildPhases = ( + 220626551A1E330400A45150 /* Sources */, + 220626561A1E330400A45150 /* Frameworks */, + 220626571A1E330400A45150 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = tensorflowiOS; + productName = tensorflowiOS; + productReference = 220626591A1E330400A45150 /* tensorflowiOS.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 220626511A1E330400A45150 /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 0900; + ORGANIZATIONNAME = Apple; + TargetAttributes = { + 220626581A1E330400A45150 = { + CreatedOnToolsVersion = 6.1; + DevelopmentTeam = G8WMBH6ZQE; + ProvisioningStyle = Automatic; + }; + }; + }; + buildConfigurationList = 220626541A1E330400A45150 /* Build configuration list for PBXProject "tensorflowiOS" */; + compatibilityVersion = "Xcode 3.2"; + developmentRegion = English; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 220626501A1E330400A45150; + productRefGroup = 2206265A1A1E330400A45150 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 220626581A1E330400A45150 /* tensorflowiOS */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 220626571A1E330400A45150 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 5317E2561F940FBD00514D13 /* op_inference_graph_rcnn.pb in Resources */, + 7A74447E1CEE6B5900C70C83 /* Main.storyboard in Resources */, + 5317E2571F940FC100514D13 /* mscoco_label_map.txt in Resources */, + 7A74447C1CEE6B4B00C70C83 /* Assets.xcassets in Resources */, + 53341A221FA44770003D2A2D /* LaunchScreen.storyboard in Resources */, + 53341A241FA44935003D2A2D /* Default-568h@2x.png in Resources */, + 5317E2551F940FB900514D13 /* op_inference_graph_inv2.pb in Resources */, + 5317E2541F940FB700514D13 /* op_inference_graph.pb in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 220626551A1E330400A45150 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 220626881A1E345E00A45150 /* AppDelegate.m in Sources */, + 2206265F1A1E330400A45150 /* main.m in Sources */, + 638820591F8C72490009B2DE /* BoundingBoxView.m in Sources */, + 635E14641F82FF380025E912 /* constants.m in Sources */, + 635E14611F82FEF90025E912 /* tensorflowUtils.mm in Sources */, + 220626891A1E345E00A45150 /* CameraPreviewView.m in Sources */, + 637BC7851F804F920029265F /* TensorflowGraph.mm in Sources */, + 5353B3DC1F8B30A300493AB0 /* string_int_label_map.pb.cc in Sources */, + 2206268A1A1E345E00A45150 /* ViewController.mm in Sources */, + 638820541F8C66880009B2DE /* TensorflowPrediction.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXVariantGroup section */ + 53341A201FA44770003D2A2D /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 53341A211FA44770003D2A2D /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 2206267A1A1E330400A45150 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_SYMBOLS_PRIVATE_EXTERN = NO; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 2206267B1A1E330400A45150 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = YES; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 2206267D1A1E330400A45150 /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 63E6BBB11F8BDB74003DB62B /* tensorflow.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CODE_SIGN_IDENTITY = "iPhone Developer"; + DEVELOPMENT_TEAM = G8WMBH6ZQE; + ENABLE_BITCODE = NO; + GCC_INPUT_FILETYPE = automatic; + HEADER_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/eigen", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/protobuf/src", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/proto", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/public/", + ); + INFOPLIST_FILE = SupportingFiles/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/protobuf_ios/lib/", + ); + OTHER_LDFLAGS = ( + "-force_load", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a", + "-lprotobuf-lite", + "-lprotobuf", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/builds/${CURRENT_ARCH}.ios.c++11/nsync.a", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.username.tensorflowiOS; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SDKROOT = iphoneos; + }; + name = Debug; + }; + 2206267E1A1E330400A45150 /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 63E6BBB11F8BDB74003DB62B /* tensorflow.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CODE_SIGN_IDENTITY = "iPhone Developer"; + DEVELOPMENT_TEAM = G8WMBH6ZQE; + ENABLE_BITCODE = NO; + GCC_INPUT_FILETYPE = automatic; + HEADER_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/eigen", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/protobuf/src", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/proto", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/public/", + ); + INFOPLIST_FILE = SupportingFiles/Info.plist; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/protobuf_ios/lib/", + ); + OTHER_LDFLAGS = ( + "-force_load", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a", + "-lprotobuf-lite", + "-lprotobuf", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/builds/${CURRENT_ARCH}.ios.c++11/nsync.a", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.username.tensorflowiOS; + PRODUCT_NAME = "$(TARGET_NAME)"; + PROVISIONING_PROFILE_SPECIFIER = ""; + SDKROOT = iphoneos; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 220626541A1E330400A45150 /* Build configuration list for PBXProject "tensorflowiOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 2206267A1A1E330400A45150 /* Debug */, + 2206267B1A1E330400A45150 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 2206267C1A1E330400A45150 /* Build configuration list for PBXNativeTarget "tensorflowiOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 2206267D1A1E330400A45150 /* Debug */, + 2206267E1A1E330400A45150 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 220626511A1E330400A45150 /* Project object */; +} diff --git a/objC/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/objC/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000..919434a --- /dev/null +++ b/objC/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/swift/App/AppDelegate.swift b/swift/App/AppDelegate.swift new file mode 100644 index 0000000..9412bf0 --- /dev/null +++ b/swift/App/AppDelegate.swift @@ -0,0 +1,46 @@ +// +// AppDelegate.swift +// tensorflowiOS +// +// Created by Chris Sharp on 11/10/17. +// Copyright © 2017 Chris Sharp. All rights reserved. +// + +import UIKit + +@UIApplicationMain +class AppDelegate: UIResponder, UIApplicationDelegate { + + var window: UIWindow? + + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { + // Override point for customization after application launch. + return true + } + + func applicationWillResignActive(_ application: UIApplication) { + // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. + // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. + } + + func applicationDidEnterBackground(_ application: UIApplication) { + // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. + // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. + } + + func applicationWillEnterForeground(_ application: UIApplication) { + // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. + } + + func applicationDidBecomeActive(_ application: UIApplication) { + // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. + } + + func applicationWillTerminate(_ application: UIApplication) { + // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. + } + + +} + diff --git a/swift/App/BoundingBoxView.swift b/swift/App/BoundingBoxView.swift new file mode 100644 index 0000000..bc597ef --- /dev/null +++ b/swift/App/BoundingBoxView.swift @@ -0,0 +1,125 @@ +// +// BoundingBoxView.swift +// tensorflowiOS +// +// Created by Chris Sharp on 11/11/17. +// Copyright © 2017 Chris Sharp. All rights reserved. +// + +import UIKit + +class BoundingBoxView: UIView +{ + let BoundingBoxLineWidth = 3.5 + var boxesToBeErased : [TensorflowPrediction] = [] + var boxesToBeDrawn : [TensorflowPrediction] = [] + var labels : [UILabel] = [] + + // + // in drawRect we have a clear UIView that we draw green bounding boxes in. + // As a new list of boundingboxes comes in we erase the old boxes and draw the new ones. + // Since this view is just a layer over the videoPreview the bounding boxes could be a few + // frames behind and the box will not align with the object underneath it. This will likely + // be an issue until Tensorflow processing is as fast as the video preview's frame capture. + // + override func draw(_ rect: CGRect) + { + // + // Our drawing context + // + if let context:CGContext = UIGraphicsGetCurrentContext() { + + // + // The width of the bounding box lines. + // + context.setLineWidth(CGFloat(BoundingBoxLineWidth)); + + // + // The fill color of the bounding box is always clear + // + context.setFillColor(red: 1.0, green: 1.0, blue: 1.0, alpha: 0.0) + + // + // Erase boxes from the previous frame + // + if (!self.boxesToBeErased.isEmpty) + { + for pred:TensorflowPrediction in self.boxesToBeErased + { + // Erase the previous bounding box by using a clear stroke color + context.setStrokeColor(red: 1.0, green: 1.0, blue: 1.0, alpha: 0.0) + + // Calculate box dimensions of box to be erased. + let x = CGFloat(pred.left) * self.frame.size.width + let y = CGFloat(pred.top) * self.frame.size.height + let w = (CGFloat(pred.right) * self.frame.size.width) - x + let h = (CGFloat(pred.bottom) * self.frame.size.height) - y + let rectangle:CGRect = CGRect(x: x, y: y, width: w, height: h) + + //Erase it. (draw clear pixels over the green) + context.fill(rectangle) + context.stroke(rectangle) + } + + // + // Remove existing labels too. + // + for label in labels + { + label.removeFromSuperview() + } + + self.labels.removeAll() + self.boxesToBeErased.removeAll() + } + + // + // Draw newly predicted boxes + // + for pred:TensorflowPrediction in boxesToBeDrawn { + // + // Calculate the box dimensions. The box dimensions are given + // as normalized values. Because this view has the same dimensions + // as the original image multiplying by width and height gives the + // correct location for the bounding box. + // + let x = CGFloat(pred.left) * self.frame.size.width; + let y = CGFloat(pred.top) * self.frame.size.height; + let w = (CGFloat(pred.right) * self.frame.size.width) - x; + let h = (CGFloat(pred.bottom) * self.frame.size.height) - y; + let rectangle = CGRect(x: x, y: y, width: w, height: h) + + // Draw with a green stroke. + context.setStrokeColor(red: 0.0, green: 1.0, blue: 0.0, alpha: 0.75) + context.fill(rectangle) + context.stroke(rectangle) + + // Add the label to the upper left of the bounding box + let label:UILabel = UILabel(frame: CGRect(x: x, y: y, width: 75, height: 35)) + label.backgroundColor = UIColor.white + label.textColor = UIColor.orange + label.text = pred.label + self.addSubview(label) + + // + // Keep a list of labels so we can easily remove from superview. + // + labels.append(label) + } + } + } + + func updateBoundingBoxes(_ boxes:[TensorflowPrediction]) + { + // + // flag the old boxes to be erased and flag the new to be drawn. + // + self.boxesToBeErased = self.boxesToBeDrawn; + self.boxesToBeDrawn = boxes; + + // + // trigger a drawRect call next frame + // + self.setNeedsDisplay() + } +} diff --git a/swift/App/CameraPreviewView.swift b/swift/App/CameraPreviewView.swift new file mode 100644 index 0000000..cabc0f0 --- /dev/null +++ b/swift/App/CameraPreviewView.swift @@ -0,0 +1,288 @@ +// +// CameraPreviewView.swift +// tensorflowiOS +// +// Created by Chris Sharp on 11/11/17. +// Copyright © 2017 Chris Sharp. All rights reserved. +// + +import UIKit +import AVFoundation + +class CameraPreviewView: UIView +{ + private enum SessionSetupResult + { + case success + case notAuthorized + case configurationFailed + } + + private var cameraSetupResult: SessionSetupResult = .success + private let avSession = AVCaptureSession() + private var isSessionRunning = false + + // Communicate with the session and other session objects on this queue. + private let previewSessionQueue = DispatchQueue(label: "PreviewSessionQueue") + + // We use a serial queue for the video frames so that they are dispatched in the order that they are captured + private let videoSessionQueue = DispatchQueue(label: "VideoFrameQueue") + + private let videoOutput:AVCaptureVideoDataOutput = AVCaptureVideoDataOutput() + + private var keyValueObservations = [NSKeyValueObservation]() + + required init?(coder aDecoder: NSCoder) + { + super.init(coder: aDecoder) + setupSession() + } + + //////////////////////////////////////// + // MARK: - Video Session Setup and Configuration + + func setupSession() + { + self.videoPreviewLayer.session = avSession + + switch AVCaptureDevice.authorizationStatus(for: .video) + { + case .authorized: + // The user has previously granted access to the camera. + break + + case .notDetermined: + /* + The user has not yet been presented with the option to grant + video access. We suspend the session queue to delay session + setup until the access request has completed. + */ + previewSessionQueue.suspend() + AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in + if !granted + { + self.cameraSetupResult = .notAuthorized + } + self.previewSessionQueue.resume() + }) + + default: + // The user has previously denied access. + cameraSetupResult = .notAuthorized + } + } + + + func configureSession(delegate:AVCaptureVideoDataOutputSampleBufferDelegate ) + { + previewSessionQueue.async { + + if (self.cameraSetupResult != .success) + { + return + } + + self.avSession.beginConfiguration() + + // Add video input. + do + { + var defaultVideoDevice: AVCaptureDevice? + + // Choose the back dual camera if available, otherwise default to a wide angle camera. + if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) + { + defaultVideoDevice = dualCameraDevice + } + else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) + { + // If the back dual camera is not available, default to the back wide angle camera. + defaultVideoDevice = backCameraDevice + } + else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + { + /* + In some cases where users break their phones, the back wide angle camera is not available. + In this case, we should default to the front wide angle camera. + */ + defaultVideoDevice = frontCameraDevice + } + + let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice!) + + if self.avSession.canAddInput(videoDeviceInput) + { + self.avSession.addInput(videoDeviceInput) + + DispatchQueue.main.async { + // Dispatching this to the main queue because a UIView (CameraPreviewView) can only be + // changed on the main thread. + let statusBarOrientation = UIApplication.shared.statusBarOrientation + var initialVideoOrientation: AVCaptureVideoOrientation = .portrait + if (statusBarOrientation != .unknown) + { + if let videoOrientation = AVCaptureVideoOrientation(interfaceOrientation: statusBarOrientation) { + initialVideoOrientation = videoOrientation + } + } + + self.videoPreviewLayer.connection?.videoOrientation = initialVideoOrientation + } + } + else + { + print("Could not add video device input to the session") + self.cameraSetupResult = .configurationFailed + self.avSession.commitConfiguration() + return + } + } + catch + { + print("Could not create video device input: \(error)") + self.cameraSetupResult = .configurationFailed + self.avSession.commitConfiguration() + return + } + + // + // let's not forget that we need video output too. + // + self.addVideoOutput(delegate) + + self.avSession.commitConfiguration() + } // previewSessionQueue.async() + } + + private func addVideoOutput(_ delegate:AVCaptureVideoDataOutputSampleBufferDelegate) + { + // + // We use the 32 bit BGRA pixel format type. That way we can just pass the data to + // Tensorflow without pre-processing. + // + let newSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_32BGRA] + videoOutput.videoSettings = newSettings; + videoOutput.alwaysDiscardsLateVideoFrames = true + videoOutput.setSampleBufferDelegate(delegate, queue: videoSessionQueue) + + // + // Add the videoOutput to our AVSession + // + if avSession.canAddOutput(videoOutput) + { + avSession.beginConfiguration() + avSession.addOutput(videoOutput) + avSession.sessionPreset = AVCaptureSession.Preset.high; + let connection:AVCaptureConnection = videoOutput.connection(with: AVMediaType.video)! + if ( connection.isVideoStabilizationSupported ) + { + connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto + } + + avSession.commitConfiguration() + } + } + + /////////////////////////////////////////////////////////////////////// + // MARK: - UIView and Session life cycle + var videoPreviewLayer: AVCaptureVideoPreviewLayer + { + guard let layer = layer as? AVCaptureVideoPreviewLayer else { + fatalError("Expected `AVCaptureVideoPreviewLayer` type for layer. Check PreviewView.layerClass implementation.") + } + + return layer + } + + var session: AVCaptureSession? { + get { + return videoPreviewLayer.session + } + set { + videoPreviewLayer.session = newValue + } + } + + override class var layerClass: AnyClass + { + return AVCaptureVideoPreviewLayer.self + } + + private func addObservers() + { + NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(notification:)), + name: .AVCaptureSessionRuntimeError, + object: avSession) + } + + private func removeObservers() + { + NotificationCenter.default.removeObserver(self) + + for keyValueObservation in keyValueObservations + { + keyValueObservation.invalidate() + } + keyValueObservations.removeAll() + } + + func startSession() + { + previewSessionQueue.async { + switch ( self.cameraSetupResult ) + { + case SessionSetupResult.success: + // if setup succeeded we can add Observers and the frame delegate and run the session. + self.addObservers() + + self.avSession.startRunning() + self.isSessionRunning = self.avSession.isRunning; + + // Let everyone know we have a session. + NotificationCenter.default.post(name: NSNotification.Name(rawValue: kAVSessionStarted), object:nil) + + case .notAuthorized: + NotificationCenter.default.post(name: NSNotification.Name(rawValue: kSetupResultCameraNotAuthorized), object: nil) + + case .configurationFailed: + NotificationCenter.default.post(name: NSNotification.Name(rawValue: kSetupResultSessionConfigurationFailed), object: nil) + } + } + } + + func stopSession() + { + previewSessionQueue.async { + if ( self.cameraSetupResult == .success ) + { + self.avSession.stopRunning() + self.removeObservers() + } + } + } + + + @objc func sessionRuntimeError(notification: NSNotification) + { + guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } + + print("Capture session runtime error: \(error)") + + /* + Automatically try to restart the session running if media services were + reset and the last start running succeeded. Otherwise, enable the user + to try to resume the session running. + */ + if error.code == .mediaServicesWereReset + { + previewSessionQueue.async { + if self.isSessionRunning + { + self.avSession.startRunning() + self.isSessionRunning = self.avSession.isRunning + } + } + } + } +} + diff --git a/swift/App/ViewController.swift b/swift/App/ViewController.swift new file mode 100644 index 0000000..cf544b9 --- /dev/null +++ b/swift/App/ViewController.swift @@ -0,0 +1,225 @@ + +// +// ViewController.swift +// tensorflowiOS +// +// Created by Chris Sharp on 11/10/17. +// Copyright © 2017 Chris Sharp. All rights reserved. +// + +import UIKit +import AVFoundation + + +class ViewController:UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate +{ + @IBOutlet weak var cameraUnavailableLabel : UILabel! + @IBOutlet weak var boundingBoxView : BoundingBoxView! + @IBOutlet weak var cameraPreviewView : CameraPreviewView! + var tensorflowGraph:TensorflowGraph? = nil + + override func viewDidLoad() + { + super.viewDidLoad() + + // + // Configure the video preview. We will grab frames + // from the video preview and feed them into the tensorflow graph. + // Then bounding boxes can be rendered onto the boundingBoxView. + // + cameraPreviewView.configureSession(delegate: self) + } + + + override func viewWillAppear(_ animated: Bool) + { + super.viewWillAppear(animated) + + // + // Listen for the start of the AVSession. This will signal the start + // of the delivery of video frames and will trigger the + // initialization of the tensorflow graph + // + NotificationCenter.default.addObserver(self, selector: #selector(OnAvSessionStarted(notification:)), + name: NSNotification.Name(rawValue: kAVSessionStarted), + object: nil) + + // + // Also Listen for Session initialization failure or for when + // the user doesn't authorize the use of the camera + // + NotificationCenter.default.addObserver(self, selector: #selector(OnSetupResultCameraNotAuthorized(notification:)), + name: Notification.Name(kSetupResultCameraNotAuthorized), + object:nil) + + NotificationCenter.default.addObserver(self, selector: #selector(OnSetupResultSessionConfigurationFailed(notification:)), + name: Notification.Name(kSetupResultSessionConfigurationFailed), + object:nil) + + // + // Respond to the tensorflow graph's update of predictions. This will + // trigger the redrawing of the bounding boxes. + // + NotificationCenter.default.addObserver(self, selector: #selector(OnPredictionsUpdated(notification:)), + name: Notification.Name(kPredictionsUpdated), + object:nil) + // + // Start the AV Session. This will prompt the user for + // permission to use the camera to present a video preview. + // + cameraPreviewView.startSession() + } + + // + // when the view disappears we shut down the session. It will be restarted in ViewWillAppear + // + override func viewWillDisappear(_ animated: Bool) + { + super.viewWillAppear(animated) + cameraPreviewView.stopSession() + } + + // + // Yes, please autorotate, but we will have to change the orientation of the pixel buffer when we run the graph. + // + override var shouldAutorotate: Bool + { + return true + } + + // + // Supporting only landscape. + // + override var supportedInterfaceOrientations: UIInterfaceOrientationMask + { + return .landscape + } + + // + // Override viewWillTransitionToSize so that we can update the videoPreviewLayer with the new orientation. + // + override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) + { + // + // call super so the coordinator can be passed on. + // + super.viewWillTransition(to: size, with: coordinator) + + if let videoPreviewLayerConnection = cameraPreviewView.videoPreviewLayer.connection + { + // + // ignore everything but landscape orientation changes. + // + let deviceOrientation = UIDevice.current.orientation + guard let newVideoOrientation = AVCaptureVideoOrientation(deviceOrientation: deviceOrientation), deviceOrientation.isLandscape else { + return + } + + videoPreviewLayerConnection.videoOrientation = newVideoOrientation + } + } + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) + { + let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! + + if tensorflowGraph != nil + { + tensorflowGraph?.runModel(on: pixelBuffer, orientation: UIDevice.current.orientation) + } + } + + func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) + { + //do something with dropped frames here + } + + + + + + //////////////////////////////////////////////////////////////////////////////////////////////////////////// + // MARK: - Notification Handlers + + @objc func OnAvSessionStarted(notification: NSNotification) + { + // Now that the user has granted permission to the camera + // and we have a video session we can initialize our graph. + tensorflowGraph = TensorflowGraph() + } + + @objc func OnSetupResultCameraNotAuthorized(notification: NSNotification) + { + DispatchQueue.main.async { + let changePrivacySetting = "Please grant permission to use the camera in Settings" + let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when we have no access to the camera") + let alertController = UIAlertController(title: "TensorflowiOS", message: message, preferredStyle: .alert) + + alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), + style: .cancel, + handler: nil)) + + alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Button to open Settings"), + style: .`default`, + handler: { _ in + UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil) + })) + + self.present(alertController, animated: true, completion: nil) + } + } + + @objc func OnSetupResultSessionConfigurationFailed(notification: NSNotification) + { + DispatchQueue.main.async { + let alertMsg = "Something went wrong during capture session configuration" + let message = NSLocalizedString("Unable to capture media", comment: alertMsg) + let alertController = UIAlertController(title: "TensorflowiOS", message: message, preferredStyle: .alert) + + alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "OK button"), + style: .cancel, + handler: nil)) + + self.present(alertController, animated: true, completion: nil) + } + } + + @objc func OnPredictionsUpdated(notification: NSNotification) + { + DispatchQueue.main.async { + if let userinfo = notification.userInfo { + if let predictions:[TensorflowPrediction] = userinfo["predictions"] as? [TensorflowPrediction] { + // Update the Bounding boxes and labels from the + // new predictions coming out of the graph. + self.boundingBoxView.updateBoundingBoxes(predictions) + } + } + } + } +} + +//////////////////////////////////////////////////////////////////// +// MARK: - AVCaptureVideoOrientation extension + +extension AVCaptureVideoOrientation { + init?(deviceOrientation: UIDeviceOrientation) { + switch deviceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeRight + case .landscapeRight: self = .landscapeLeft + default: return nil + } + } + + init?(interfaceOrientation: UIInterfaceOrientation) { + switch interfaceOrientation { + case .portrait: self = .portrait + case .portraitUpsideDown: self = .portraitUpsideDown + case .landscapeLeft: self = .landscapeLeft + case .landscapeRight: self = .landscapeRight + default: return nil + } + } +} + diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json b/swift/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..a218566 --- /dev/null +++ b/swift/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,154 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "2x" + }, + { + "idiom" : "iphone", + "size" : "20x20", + "scale" : "3x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_29x29-1.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_29x29@2x-1.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_29x29@3x.png", + "scale" : "3x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_40x40@2x-1.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_40x40@3x.png", + "scale" : "3x" + }, + { + "size" : "57x57", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_57x57.png", + "scale" : "1x" + }, + { + "size" : "57x57", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_57x57@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_60x60@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "tensorflowiOS_Icon_60x60@3x.png", + "scale" : "3x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "1x" + }, + { + "idiom" : "ipad", + "size" : "20x20", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_29x29.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_40x40.png", + "scale" : "1x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "50x50", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_50x50.png", + "scale" : "1x" + }, + { + "size" : "50x50", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_50x50@2x.png", + "scale" : "2x" + }, + { + "size" : "72x72", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_72x72.png", + "scale" : "1x" + }, + { + "size" : "72x72", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_72x72@2x.png", + "scale" : "2x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_76x76.png", + "scale" : "1x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_76x76@2x.png", + "scale" : "2x" + }, + { + "size" : "83.5x83.5", + "idiom" : "ipad", + "filename" : "tensorflowiOS_Icon_83.5x83.5.png", + "scale" : "2x" + }, + { + "size" : "1024x1024", + "idiom" : "ios-marketing", + "filename" : "tensorflowiOS_Icon_1024x1024-1.png", + "scale" : "1x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png new file mode 100644 index 0000000..447f367 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png new file mode 100644 index 0000000..bf647a7 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png new file mode 100644 index 0000000..bf647a7 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png new file mode 100644 index 0000000..a585b4f Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png new file mode 100644 index 0000000..a585b4f Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png new file mode 100644 index 0000000..93bf8ec Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png new file mode 100644 index 0000000..93c35cd Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png new file mode 100644 index 0000000..b7072e8 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png new file mode 100644 index 0000000..b7072e8 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png new file mode 100644 index 0000000..90db855 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png new file mode 100644 index 0000000..a659364 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png new file mode 100644 index 0000000..864bcb6 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png new file mode 100644 index 0000000..5e77b32 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png new file mode 100644 index 0000000..1b29107 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png new file mode 100644 index 0000000..90db855 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png new file mode 100644 index 0000000..f3211ca Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png new file mode 100644 index 0000000..bc25098 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png new file mode 100644 index 0000000..5cc1e60 Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png new file mode 100644 index 0000000..e1e4b5c Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png new file mode 100644 index 0000000..4e1d83d Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png differ diff --git a/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png new file mode 100644 index 0000000..382373f Binary files /dev/null and b/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png differ diff --git a/swift/Assets/Assets.xcassets/Contents.json b/swift/Assets/Assets.xcassets/Contents.json new file mode 100644 index 0000000..da4a164 --- /dev/null +++ b/swift/Assets/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/swift/Assets/Base.lproj/LaunchScreen.storyboard b/swift/Assets/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 0000000..5dc6a62 --- /dev/null +++ b/swift/Assets/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/swift/Assets/Base.lproj/Main.storyboard b/swift/Assets/Base.lproj/Main.storyboard new file mode 100644 index 0000000..200f4cf --- /dev/null +++ b/swift/Assets/Base.lproj/Main.storyboard @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/swift/Assets/Default-568h@2x.png b/swift/Assets/Default-568h@2x.png new file mode 100644 index 0000000..0891b7a Binary files /dev/null and b/swift/Assets/Default-568h@2x.png differ diff --git a/swift/Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb b/swift/Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb new file mode 100644 index 0000000..89e45da Binary files /dev/null and b/swift/Models/faster_rcnn_resnet101_coco_11_06_2017/op_inference_graph_rcnn.pb differ diff --git a/swift/Models/mscoco_label_map.txt b/swift/Models/mscoco_label_map.txt new file mode 100644 index 0000000..0f1be65 --- /dev/null +++ b/swift/Models/mscoco_label_map.txt @@ -0,0 +1,400 @@ +item { + name: "/m/01g317" + id: 1 + display_name: "person" +} +item { + name: "/m/0199g" + id: 2 + display_name: "bicycle" +} +item { + name: "/m/0k4j" + id: 3 + display_name: "car" +} +item { + name: "/m/04_sv" + id: 4 + display_name: "motorcycle" +} +item { + name: "/m/05czz6l" + id: 5 + display_name: "airplane" +} +item { + name: "/m/01bjv" + id: 6 + display_name: "bus" +} +item { + name: "/m/07jdr" + id: 7 + display_name: "train" +} +item { + name: "/m/07r04" + id: 8 + display_name: "truck" +} +item { + name: "/m/019jd" + id: 9 + display_name: "boat" +} +item { + name: "/m/015qff" + id: 10 + display_name: "traffic light" +} +item { + name: "/m/01pns0" + id: 11 + display_name: "fire hydrant" +} +item { + name: "/m/02pv19" + id: 13 + display_name: "stop sign" +} +item { + name: "/m/015qbp" + id: 14 + display_name: "parking meter" +} +item { + name: "/m/0cvnqh" + id: 15 + display_name: "bench" +} +item { + name: "/m/015p6" + id: 16 + display_name: "bird" +} +item { + name: "/m/01yrx" + id: 17 + display_name: "cat" +} +item { + name: "/m/0bt9lr" + id: 18 + display_name: "dog" +} +item { + name: "/m/03k3r" + id: 19 + display_name: "horse" +} +item { + name: "/m/07bgp" + id: 20 + display_name: "sheep" +} +item { + name: "/m/01xq0k1" + id: 21 + display_name: "cow" +} +item { + name: "/m/0bwd_0j" + id: 22 + display_name: "elephant" +} +item { + name: "/m/01dws" + id: 23 + display_name: "bear" +} +item { + name: "/m/0898b" + id: 24 + display_name: "zebra" +} +item { + name: "/m/03bk1" + id: 25 + display_name: "giraffe" +} +item { + name: "/m/01940j" + id: 27 + display_name: "backpack" +} +item { + name: "/m/0hnnb" + id: 28 + display_name: "umbrella" +} +item { + name: "/m/080hkjn" + id: 31 + display_name: "handbag" +} +item { + name: "/m/01rkbr" + id: 32 + display_name: "tie" +} +item { + name: "/m/01s55n" + id: 33 + display_name: "suitcase" +} +item { + name: "/m/02wmf" + id: 34 + display_name: "frisbee" +} +item { + name: "/m/071p9" + id: 35 + display_name: "skis" +} +item { + name: "/m/06__v" + id: 36 + display_name: "snowboard" +} +item { + name: "/m/018xm" + id: 37 + display_name: "sports ball" +} +item { + name: "/m/02zt3" + id: 38 + display_name: "kite" +} +item { + name: "/m/03g8mr" + id: 39 + display_name: "baseball bat" +} +item { + name: "/m/03grzl" + id: 40 + display_name: "baseball glove" +} +item { + name: "/m/06_fw" + id: 41 + display_name: "skateboard" +} +item { + name: "/m/019w40" + id: 42 + display_name: "surfboard" +} +item { + name: "/m/0dv9c" + id: 43 + display_name: "tennis racket" +} +item { + name: "/m/04dr76w" + id: 44 + display_name: "bottle" +} +item { + name: "/m/09tvcd" + id: 46 + display_name: "wine glass" +} +item { + name: "/m/08gqpm" + id: 47 + display_name: "cup" +} +item { + name: "/m/0dt3t" + id: 48 + display_name: "fork" +} +item { + name: "/m/04ctx" + id: 49 + display_name: "knife" +} +item { + name: "/m/0cmx8" + id: 50 + display_name: "spoon" +} +item { + name: "/m/04kkgm" + id: 51 + display_name: "bowl" +} +item { + name: "/m/09qck" + id: 52 + display_name: "banana" +} +item { + name: "/m/014j1m" + id: 53 + display_name: "apple" +} +item { + name: "/m/0l515" + id: 54 + display_name: "sandwich" +} +item { + name: "/m/0cyhj_" + id: 55 + display_name: "orange" +} +item { + name: "/m/0hkxq" + id: 56 + display_name: "broccoli" +} +item { + name: "/m/0fj52s" + id: 57 + display_name: "carrot" +} +item { + name: "/m/01b9xk" + id: 58 + display_name: "hot dog" +} +item { + name: "/m/0663v" + id: 59 + display_name: "pizza" +} +item { + name: "/m/0jy4k" + id: 60 + display_name: "donut" +} +item { + name: "/m/0fszt" + id: 61 + display_name: "cake" +} +item { + name: "/m/01mzpv" + id: 62 + display_name: "chair" +} +item { + name: "/m/02crq1" + id: 63 + display_name: "couch" +} +item { + name: "/m/03fp41" + id: 64 + display_name: "potted plant" +} +item { + name: "/m/03ssj5" + id: 65 + display_name: "bed" +} +item { + name: "/m/04bcr3" + id: 67 + display_name: "dining table" +} +item { + name: "/m/09g1w" + id: 70 + display_name: "toilet" +} +item { + name: "/m/07c52" + id: 72 + display_name: "tv" +} +item { + name: "/m/01c648" + id: 73 + display_name: "laptop" +} +item { + name: "/m/020lf" + id: 74 + display_name: "mouse" +} +item { + name: "/m/0qjjc" + id: 75 + display_name: "remote" +} +item { + name: "/m/01m2v" + id: 76 + display_name: "keyboard" +} +item { + name: "/m/050k8" + id: 77 + display_name: "cell phone" +} +item { + name: "/m/0fx9l" + id: 78 + display_name: "microwave" +} +item { + name: "/m/029bxz" + id: 79 + display_name: "oven" +} +item { + name: "/m/01k6s3" + id: 80 + display_name: "toaster" +} +item { + name: "/m/0130jx" + id: 81 + display_name: "sink" +} +item { + name: "/m/040b_t" + id: 82 + display_name: "refrigerator" +} +item { + name: "/m/0bt_c3" + id: 84 + display_name: "book" +} +item { + name: "/m/01x3z" + id: 85 + display_name: "clock" +} +item { + name: "/m/02s195" + id: 86 + display_name: "vase" +} +item { + name: "/m/01lsmm" + id: 87 + display_name: "scissors" +} +item { + name: "/m/0kmg4" + id: 88 + display_name: "teddy bear" +} +item { + name: "/m/03wvsk" + id: 89 + display_name: "hair drier" +} +item { + name: "/m/012xff" + id: 90 + display_name: "toothbrush" +} \ No newline at end of file diff --git a/swift/Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb b/swift/Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb new file mode 100644 index 0000000..54c686c Binary files /dev/null and b/swift/Models/ssd_inception_v2_coco_11_06_2017/op_inference_graph_inv2.pb differ diff --git a/swift/Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb b/swift/Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb new file mode 100644 index 0000000..e315d10 Binary files /dev/null and b/swift/Models/ssd_mobilenet_v1_coco_11_06_2017/op_inference_graph.pb differ diff --git a/swift/SupportingFiles/Constants.swift b/swift/SupportingFiles/Constants.swift new file mode 100644 index 0000000..ba2d675 --- /dev/null +++ b/swift/SupportingFiles/Constants.swift @@ -0,0 +1,15 @@ +// +// Constants.swift +// tensorflowiOS +// +// Created by Chris Sharp on 11/11/17. +// Copyright © 2017 Chris Sharp. All rights reserved. +// + +import Foundation + +let kAVSessionStarted:String = "kAVSessionStarted"; +let kSetupResultCameraNotAuthorized:String = "kSetupResultCameraNotAuthorized"; +let kSetupResultSessionConfigurationFailed:String = "SetupResultSessionConfigurationFailed"; +let kPredictionsUpdated:String = "kPredictionsUpdated"; + diff --git a/swift/SupportingFiles/Info.plist b/swift/SupportingFiles/Info.plist new file mode 100644 index 0000000..d24c0f2 --- /dev/null +++ b/swift/SupportingFiles/Info.plist @@ -0,0 +1,46 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + APPL + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + NSCameraUsageDescription + For the Video Preview + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/swift/SupportingFiles/tensorflow.xcconfig b/swift/SupportingFiles/tensorflow.xcconfig new file mode 100644 index 0000000..298eee8 --- /dev/null +++ b/swift/SupportingFiles/tensorflow.xcconfig @@ -0,0 +1,9 @@ +// +// tensorflow.xcconfig +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +TENSORFLOW_ROOT = /Users/username/Development/tensorflow diff --git a/swift/Tensorflow/TensorflowGraph.h b/swift/Tensorflow/TensorflowGraph.h new file mode 100644 index 0000000..22ad294 --- /dev/null +++ b/swift/Tensorflow/TensorflowGraph.h @@ -0,0 +1,12 @@ + +#import +#import +#include + + +@interface TensorflowGraph : NSObject + +- (id) init; +- (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuf orientation: (UIDeviceOrientation) orientation; + +@end diff --git a/swift/Tensorflow/TensorflowGraph.mm b/swift/Tensorflow/TensorflowGraph.mm new file mode 100644 index 0000000..73c855f --- /dev/null +++ b/swift/Tensorflow/TensorflowGraph.mm @@ -0,0 +1,307 @@ + +#import "TensorflowGraph.h" +#import +#import "TensorflowUtils.h" +#import "TensorflowPrediction.h" +#include +#import "tensorflow/core/public/session.h" +#import "tensorflow/core/util/memmapped_file_system.h" +#include "string_int_label_map.pb.h" + + +const int kGraphChannels = 3; // BGR. +const int kGraphImageWidth = 480; // The width of the pixels going into the graph. +const int kGraphImageHeight = 270; // the height of the pixels going into the graph. +const float kPredictionThreshold = 0.50; // Prediction percentages lower than this will be discarded. +const int kGraphMaxPredictions = 15; // After this many predictions we move on. +const int kAverageEveryXFrames = 50; // Output average processing time every X frames + +@interface TensorflowGraph() +{ + std::unique_ptr tfSession; + object_detection::protos::StringIntLabelMap labelMap; +} + +// processingTime and framesProcessed are used for keeping an average time to make predictions. +@property (nonatomic) double processingTime; +@property (nonatomic) int framesProcessed; + +// Keep a load status - if loading fails we don't want to attempt to run +// anything through a non-existent graph. +@property (nonatomic) tensorflow::Status loadStatus; +@property (nonatomic) tensorflow::Status labelStatus; +@property (nonatomic) BOOL isProcessingFrame; + +@end + + +@implementation TensorflowGraph + +- (id) init +{ + self = [super init]; + if (self) + { + // change model name here to use one of the other models. + NSString *model = @"op_inference_graph"; + NSString *label = @"mscoco_label_map"; + + if (![self loadModelWithFileName:model modelFileType:@"pb"]) + { + NSLog(@"Failed to load model"); + } + + if (![self loadLabelsWithFileName:label labelsFileType:@"txt"]) + { + NSLog(@"Failed to load labels"); + } + } + return self; +} + +- (BOOL)loadModelWithFileName:(NSString *)modelFileName modelFileType:(NSString *)modelFileType +{ + self.loadStatus = loadModel(modelFileName, modelFileType, &tfSession); + return self.loadStatus.ok(); +} + +- (BOOL)loadLabelsWithFileName:(NSString *)labelsFileName labelsFileType:(NSString *)labelsFileType +{ + // + // load the labels from the file. labelMap is populated by calling loadLabels. + self.labelStatus = loadLabels(labelsFileName, labelsFileType, &labelMap); + return self.labelStatus.ok(); +} + +- (BOOL) canProcessFrame +{ + return (!self.isProcessingFrame); +} + + +- (CGImageRef) copyPixelBuffer: (CVImageBufferRef) pixelBuffer +{ + // + // alloc a CIImage with the pixel buffer. + CIImage* ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer]; + + const int pixelBufHeight = (int) CVPixelBufferGetHeight(pixelBuffer); + const int pixelBufWidth = (int) CVPixelBufferGetWidth(pixelBuffer); + CGAffineTransform scale = CGAffineTransformMakeScale(float(kGraphImageWidth)/pixelBufWidth, + float(kGraphImageHeight)/pixelBufHeight); + CIImage* resized = [ciImage imageByApplyingTransform:scale]; + + // + // Create a cgImage from the frame pixels + // + CIContext *context = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [context createCGImage:resized fromRect:resized.extent]; + + return cgImage; +} + + +// +// Takes a pixel buffer coming from the Camera preview session and obtains predictions w/bounding boxes from +// a tensorflow graph. +// +- (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuffer orientation: (UIDeviceOrientation) orientation +{ + // + // if the graph hasn't loaded we can't do anything yet. + // + if (!self.loadStatus.ok() || self.isProcessingFrame) + { + return; + } + + // + // Retain the pixel buffer, copy and make a CGImage out of it. + // + CFRetain(pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); + CGImageRef cgImage = [self copyPixelBuffer:pixelBuffer]; + CFRelease(pixelBuffer); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + // + // mark the graph as busy + // + self.isProcessingFrame = YES; + + // + // Create a tensor for running through the graph. + // + tensorflow::Tensor imageTensor(tensorflow::DT_UINT8, tensorflow::TensorShape({1, kGraphImageHeight, kGraphImageWidth, kGraphChannels})); + auto imageTensorDimensioned = imageTensor.tensor(); + + // + // Gather needed dimensions of the CGImage + // + const int srcHeight = (int) CGImageGetHeight(cgImage); + const int srcWidth = (int) CGImageGetWidth(cgImage); + const int bytesPerRow = (int) CGImageGetBytesPerRow(cgImage); + const int srcChannels = (int) bytesPerRow / srcWidth; + + // + // Scale the pixel data down, drop the alpha channel, and populate the image_tensor. + // The source pointer iterates through the pixelBuffer and the destination pointer + // writes pixel data into the reshaped image tensor. Changing the GraphInputWidth and Height + // may increase (or decrease) speed and/or accuracy. + // + CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage)); + unsigned char *srcStartAddress = (unsigned char*) CFDataGetBytePtr(pixelData); + + // + // if the orientation is landscape-right the source pixels start at the end of the pixel buffer + // and read backwards. dest pixel still ends up in the same row, col. + // + if (orientation == UIDeviceOrientationLandscapeRight) + { + srcStartAddress += (bytesPerRow * srcHeight); + } + + // + // Scale the buffer down to the expected size and shape of the input tensor for the TF graph + // also, drop the alpha component as the pixel format going in is BGA. + // + unsigned char *destStartAddress = imageTensorDimensioned.data(); + for (int row = 0; row < kGraphImageHeight; ++row) + { + unsigned char *destRow = destStartAddress + (row * kGraphImageWidth * kGraphChannels); + for (int col = 0; col < kGraphImageWidth; ++col) + { + const int srcRow = (int) (row * (srcHeight / kGraphImageHeight)); + const int srcCol = (int) (col * (srcWidth / kGraphImageWidth)); + unsigned char* srcPixel; + + if (orientation == UIDeviceOrientationLandscapeRight) + { + // landscape right - we start at the end of the buffer and read backwards + srcPixel = srcStartAddress - (srcRow * bytesPerRow) - (srcCol * srcChannels); + } + else + { + // landscape left - we start at the beginning of the buffer and read forward + srcPixel = srcStartAddress + (srcRow * bytesPerRow) + (srcCol * srcChannels); + } + + unsigned char* destPixel = destRow + (col * kGraphChannels); + for (int c = 0; c < kGraphChannels; ++c) + { + destPixel[c] = srcPixel[c]; + } + } + } + + // we are done with the CFDataRef + CFRelease(pixelData); + + // + // Move the tensorflow processing to another thread. Not only are there limited pixelBuffers + // but if the thread running the videoPreview gets blocked we will get Late Frame warninigs. + // Running the graph on a background thread keeps things moving. + // + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ + + // + // Get a start time. We will clock the tensorflow processing time. + // + struct timespec ts_start; + clock_gettime(CLOCK_MONOTONIC, &ts_start); + + if (tfSession.get()) + { + // Run through the graph. + std::vector outputs; + tensorflow::Status runStatus = tfSession->Run({{"image_tensor", imageTensor}}, {"detection_boxes", "detection_scores", "detection_classes", "num_detections"}, {}, &outputs); + + if (!runStatus.ok()) + { + LOG(FATAL) << "Error: " << runStatus; + } + else + { + // + // Generate our list of predictions and bounding boxes + // + auto boundingBoxesFlat = outputs[0].flat(); + tensorflow::TTypes::Flat scores_flat = outputs[1].flat(); + tensorflow::TTypes::Flat indices_flat = outputs[2].flat(); + + NSMutableArray * predictions = [[NSMutableArray alloc] init]; + for (int i = 0; i < kGraphMaxPredictions; ++i) + { + // + // once the prediction score falls below our threshold don't bother + // processing any more predictions. + // + const float score = scores_flat(i); + if (score < kPredictionThreshold) + { + break; + } + + // + // Keep an array of predictions + // + TensorflowPrediction * prediction = [[TensorflowPrediction alloc] init]; + prediction.score = score; + const int label_index = (tensorflow::int32)indices_flat(i); + prediction.label = [NSString stringWithUTF8String:GetDisplayName(&labelMap, label_index).c_str()]; + prediction.top = boundingBoxesFlat(i * 4 + 0); + prediction.left = boundingBoxesFlat(i * 4 + 1); + prediction.bottom = boundingBoxesFlat(i * 4 + 2); + prediction.right = boundingBoxesFlat(i * 4 + 3); + + // + // Crop the pixels out of the bounding box and put the cropped + // image into the prediction object + // + const int x = prediction.left * kGraphImageWidth; + const int y = prediction.top * kGraphImageHeight; + const int w = (prediction.right * kGraphImageWidth) - x; + const int h = (prediction.bottom * kGraphImageHeight) - y; + CGRect croppedArea = CGRectMake(x, y, w, h); + CGImageRef cropped = CGImageCreateWithImageInRect(cgImage, croppedArea); + prediction.image = [UIImage imageWithCGImage:cropped]; + CGImageRelease(cropped); + + [predictions addObject:prediction]; + } + + // + // Now that predictions are done calculate the amount of time elapsed since the start of processing. + // + struct timespec ts_end; + clock_gettime(CLOCK_MONOTONIC, &ts_end); + struct timespec elapsed = diff(ts_start, ts_end); + + // + // Calculate an average time and output every X frames. + // + self.processingTime += elapsed.tv_sec; + self.processingTime += (elapsed.tv_nsec / 1000000000.0f); + self.framesProcessed += 1; + if (self.framesProcessed % kAverageEveryXFrames == 0) + { + printf("Avg. prediction time: %f\n", self.processingTime / self.framesProcessed); + } + + // + // Notify the UI that we have new predictions. Another class will receive this + // and use the data to draw bounding boxes. + // + dispatch_async(dispatch_get_main_queue(), ^(void) { + [[NSNotificationCenter defaultCenter] postNotificationName:@"kPredictionsUpdated" object:nil userInfo:@{@"predictions" : predictions}]; + }); + + CGImageRelease(cgImage); + } + + self.isProcessingFrame = NO; + } // end --- if (tfSession.get) + }); // end --- dispatch_async +} // end --- runModelOnPixelBuffer() + +@end diff --git a/swift/Tensorflow/TensorflowPrediction.h b/swift/Tensorflow/TensorflowPrediction.h new file mode 100644 index 0000000..c0603ef --- /dev/null +++ b/swift/Tensorflow/TensorflowPrediction.h @@ -0,0 +1,20 @@ +// +// TensorflowPrediction.h +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +#import +#import + +@interface TensorflowPrediction : NSObject +@property (nonatomic) NSString *label; +@property (nonatomic) UIImage *image; +@property (nonatomic) float score; +@property (nonatomic) float top; +@property (nonatomic) float left; +@property (nonatomic) float right; +@property (nonatomic) float bottom; +@end diff --git a/swift/Tensorflow/TensorflowPrediction.m b/swift/Tensorflow/TensorflowPrediction.m new file mode 100644 index 0000000..63cddb1 --- /dev/null +++ b/swift/Tensorflow/TensorflowPrediction.m @@ -0,0 +1,13 @@ +// +// TensorflowPrediction.m +// tensorflowiOS +// +// Created by Sharp, Chris T on 10/9/17. +// Copyright © 2017 Apple. All rights reserved. +// + +#import "TensorflowPrediction.h" + +@implementation TensorflowPrediction + +@end diff --git a/swift/Tensorflow/TensorflowUtils.h b/swift/Tensorflow/TensorflowUtils.h new file mode 100644 index 0000000..1409cd0 --- /dev/null +++ b/swift/Tensorflow/TensorflowUtils.h @@ -0,0 +1,40 @@ + +#ifndef tensorflowUtils_h +#define tensorflowUtils_h + +#include "tensorflow/core/public/session.h" +#include "tensorflow/core/util/memmapped_file_system.h" +#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" +namespace object_detection +{ + namespace protos + { + class StringIntLabelMap; + } +} + +// Reads a serialized GraphDef protobuf file from the bundle, typically +// created with the freeze_graph script. Populates the session argument with a +// Session object that has the model loaded. +tensorflow::Status loadModel(NSString* file_name, + NSString* file_type, + std::unique_ptr* session); + +// Loads a model from a file that has been created using the +// convert_graphdef_memmapped_format tool. This bundles together a GraphDef +// proto together with a file that can be memory-mapped, containing the weight +// parameters for the model. This is useful because it reduces the overall +// memory pressure, since the read-only parameter regions can be easily paged +// out and don't count toward memory limits on iOS. +tensorflow::Status loadMemoryMappedModel(NSString* file_name, + NSString* file_type, + std::unique_ptr* session, + std::unique_ptr* memmapped_env); + +// Loads a text file of a label map in mscoco style. +tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap); + +// Takes a label Map and an index into it. Returns the 'DisplayName' field in the protobuf +std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index); +timespec diff(timespec start, timespec end); +#endif /* tensorflowUtils_h */ diff --git a/swift/Tensorflow/TensorflowUtils.mm b/swift/Tensorflow/TensorflowUtils.mm new file mode 100644 index 0000000..36ca157 --- /dev/null +++ b/swift/Tensorflow/TensorflowUtils.mm @@ -0,0 +1,166 @@ + +#import + +#include "TensorflowUtils.h" +#include +#include +#include +#include "string_int_label_map.pb.h" + + +// Helper class borrowed from some utils that loads protobufs efficiently. +namespace +{ + class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream + { + public: + explicit IfstreamInputStream(const std::string& file_name) : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {} + ~IfstreamInputStream() { ifs_.close(); } + + int Read(void *buffer, int size) + { + if (!ifs_) + { + return -1; + } + ifs_.read(static_cast(buffer), size); + return (int)ifs_.gcount(); + } + + private: + std::ifstream ifs_; + }; +} + +#pragma mark - Private + +NSString *filePathForResourceName(NSString *name, NSString *extension) +{ + NSString *filePath = [[NSBundle mainBundle] pathForResource:name ofType:extension]; + + if (filePath == NULL) + { + LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "." << [extension UTF8String] << "' in bundle."; + return nullptr; + } + return filePath; +} + +bool PortableReadFileToProto(const std::string& fileName, ::google::protobuf::MessageLite *proto) +{ + ::google::protobuf::io::CopyingInputStreamAdaptor stream(new IfstreamInputStream(fileName)); + stream.SetOwnsCopyingStream(true); + ::google::protobuf::io::CodedInputStream codedStream(&stream); + + // Total bytes hard limit / warning limit are set to 1GB and 512MB + // respectively. + codedStream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); + return proto->ParseFromCodedStream(&codedStream); +} + +#pragma mark - Public + +tensorflow::Status loadModel(NSString *fileName, NSString *fileType, std::unique_ptr *session) +{ + tensorflow::SessionOptions options; + + tensorflow::Session *sessionPointer = nullptr; + tensorflow::Status sessionStatus = tensorflow::NewSession(options, &sessionPointer); + + if (!sessionStatus.ok()) + { + LOG(ERROR) << "Could not create TensorFlow Session: " << sessionStatus; + return sessionStatus; + } + session->reset(sessionPointer); + + tensorflow::GraphDef tensorflowGraph; + + NSString *modelPath = filePathForResourceName(fileName, fileType); + + if (!modelPath) + { + LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; + return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); + } + + const bool readProtoSucceeded = PortableReadFileToProto([modelPath UTF8String], &tensorflowGraph); + + if (!readProtoSucceeded) + { + LOG(ERROR) << "Failed to load model proto from" << [modelPath UTF8String]; + return tensorflow::errors::NotFound([modelPath UTF8String]); + } + + tensorflow::Status create_status = (*session)->Create(tensorflowGraph); + + if (!create_status.ok()) + { + LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status; + return create_status; + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap) +{ + // Read the label list + NSString *labelsPath = filePathForResourceName(fileName, fileType); + + if (!labelsPath) + { + LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; + return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); + } + + int fileDescriptor = open([labelsPath UTF8String], O_RDONLY); + if (fileDescriptor >= 0) + { + google::protobuf::io::FileInputStream fileInput(fileDescriptor); + fileInput.SetCloseOnDelete( true ); + + if (!google::protobuf::TextFormat::Parse(&fileInput, labelMap)) + { + LOG(ERROR) << "Failed to parse label file.\n"; + return tensorflow::errors::Aborted([fileName UTF8String], [fileType UTF8String]); + } + } + + return tensorflow::Status::OK(); +} + +std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index) +{ + for (int i = 0; i < labels->item_size(); ++i) + { + const object_detection::protos::StringIntLabelMapItem& item = labels->item(i); + if (index == item.id()) + { + return item.display_name(); + } + } + + return ""; +} + +// +// Calculate and return elapsed time between to struct timespecs +// +timespec diff(timespec start, timespec end) +{ + timespec temp; + if ((end.tv_nsec-start.tv_nsec)<0) + { + temp.tv_sec = end.tv_sec-start.tv_sec-1; + temp.tv_nsec = 1000000000+end.tv_nsec-start.tv_nsec; + } + else + { + temp.tv_sec = end.tv_sec-start.tv_sec; + temp.tv_nsec = end.tv_nsec-start.tv_nsec; + } + return temp; +} + + diff --git a/swift/Tensorflow/string_int_label_map.pb.cc b/swift/Tensorflow/string_int_label_map.pb.cc new file mode 100644 index 0000000..0f8e818 --- /dev/null +++ b/swift/Tensorflow/string_int_label_map.pb.cc @@ -0,0 +1,961 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: string_int_label_map.proto + +#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION +#include "string_int_label_map.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) + +namespace object_detection { +namespace protos { +class StringIntLabelMapItemDefaultTypeInternal { +public: + ::google::protobuf::internal::ExplicitlyConstructed + _instance; +} _StringIntLabelMapItem_default_instance_; +class StringIntLabelMapDefaultTypeInternal { +public: + ::google::protobuf::internal::ExplicitlyConstructed + _instance; +} _StringIntLabelMap_default_instance_; + +namespace protobuf_string_5fint_5flabel_5fmap_2eproto { + + +namespace { + +::google::protobuf::Metadata file_level_metadata[2]; + +} // namespace + +PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTableField + const TableStruct::entries[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + {0, 0, 0, ::google::protobuf::internal::kInvalidMask, 0, 0}, +}; + +PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::AuxillaryParseTableField + const TableStruct::aux[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + ::google::protobuf::internal::AuxillaryParseTableField(), +}; +PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTable const + TableStruct::schema[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + { NULL, NULL, 0, -1, -1, -1, -1, NULL, false }, + { NULL, NULL, 0, -1, -1, -1, -1, NULL, false }, +}; + +const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, _has_bits_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, name_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, id_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMapItem, display_name_), + 0, + 2, + 1, + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMap, _has_bits_), + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMap, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(StringIntLabelMap, item_), + ~0u, +}; +static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + { 0, 8, sizeof(StringIntLabelMapItem)}, + { 11, 17, sizeof(StringIntLabelMap)}, +}; + +static ::google::protobuf::Message const * const file_default_instances[] = { + reinterpret_cast(&_StringIntLabelMapItem_default_instance_), + reinterpret_cast(&_StringIntLabelMap_default_instance_), +}; + +namespace { + +void protobuf_AssignDescriptors() { + AddDescriptors(); + ::google::protobuf::MessageFactory* factory = NULL; + AssignDescriptors( + "string_int_label_map.proto", schemas, file_default_instances, TableStruct::offsets, factory, + file_level_metadata, NULL, NULL); +} + +void protobuf_AssignDescriptorsOnce() { + static GOOGLE_PROTOBUF_DECLARE_ONCE(once); + ::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors); +} + +void protobuf_RegisterTypes(const ::std::string&) GOOGLE_ATTRIBUTE_COLD; +void protobuf_RegisterTypes(const ::std::string&) { + protobuf_AssignDescriptorsOnce(); + ::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 2); +} + +} // namespace +void TableStruct::InitDefaultsImpl() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + ::google::protobuf::internal::InitProtobufDefaults(); + _StringIntLabelMapItem_default_instance_._instance.DefaultConstruct(); + ::google::protobuf::internal::OnShutdownDestroyMessage( + &_StringIntLabelMapItem_default_instance_);_StringIntLabelMap_default_instance_._instance.DefaultConstruct(); + ::google::protobuf::internal::OnShutdownDestroyMessage( + &_StringIntLabelMap_default_instance_);} + +void InitDefaults() { + static GOOGLE_PROTOBUF_DECLARE_ONCE(once); + ::google::protobuf::GoogleOnceInit(&once, &TableStruct::InitDefaultsImpl); +} +namespace { +void AddDescriptorsImpl() { + InitDefaults(); + static const char descriptor[] GOOGLE_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { + "\n\032string_int_label_map.proto\022\027object_det" + "ection.protos\"G\n\025StringIntLabelMapItem\022\014" + "\n\004name\030\001 \001(\t\022\n\n\002id\030\002 \001(\005\022\024\n\014display_name" + "\030\003 \001(\t\"Q\n\021StringIntLabelMap\022<\n\004item\030\001 \003(" + "\0132..object_detection.protos.StringIntLab" + "elMapItem" + }; + ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( + descriptor, 209); + ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( + "string_int_label_map.proto", &protobuf_RegisterTypes); +} +} // anonymous namespace + +void AddDescriptors() { + static GOOGLE_PROTOBUF_DECLARE_ONCE(once); + ::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl); +} +// Force AddDescriptors() to be called at dynamic initialization time. +struct StaticDescriptorInitializer { + StaticDescriptorInitializer() { + AddDescriptors(); + } +} static_descriptor_initializer; + +} // namespace protobuf_string_5fint_5flabel_5fmap_2eproto + + +// =================================================================== + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int StringIntLabelMapItem::kNameFieldNumber; +const int StringIntLabelMapItem::kIdFieldNumber; +const int StringIntLabelMapItem::kDisplayNameFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +StringIntLabelMapItem::StringIntLabelMapItem() + : ::google::protobuf::Message(), _internal_metadata_(NULL) { + if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + } + SharedCtor(); + // @@protoc_insertion_point(constructor:object_detection.protos.StringIntLabelMapItem) +} +StringIntLabelMapItem::StringIntLabelMapItem(const StringIntLabelMapItem& from) + : ::google::protobuf::Message(), + _internal_metadata_(NULL), + _has_bits_(from._has_bits_), + _cached_size_(0) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.has_name()) { + name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); + } + display_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.has_display_name()) { + display_name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.display_name_); + } + id_ = from.id_; + // @@protoc_insertion_point(copy_constructor:object_detection.protos.StringIntLabelMapItem) +} + +void StringIntLabelMapItem::SharedCtor() { + _cached_size_ = 0; + name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + display_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + id_ = 0; +} + +StringIntLabelMapItem::~StringIntLabelMapItem() { + // @@protoc_insertion_point(destructor:object_detection.protos.StringIntLabelMapItem) + SharedDtor(); +} + +void StringIntLabelMapItem::SharedDtor() { + name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + display_name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +void StringIntLabelMapItem::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* StringIntLabelMapItem::descriptor() { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; +} + +const StringIntLabelMapItem& StringIntLabelMapItem::default_instance() { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + return *internal_default_instance(); +} + +StringIntLabelMapItem* StringIntLabelMapItem::New(::google::protobuf::Arena* arena) const { + StringIntLabelMapItem* n = new StringIntLabelMapItem; + if (arena != NULL) { + arena->Own(n); + } + return n; +} + +void StringIntLabelMapItem::Clear() { +// @@protoc_insertion_point(message_clear_start:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + cached_has_bits = _has_bits_[0]; + if (cached_has_bits & 3u) { + if (cached_has_bits & 0x00000001u) { + GOOGLE_DCHECK(!name_.IsDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited())); + (*name_.UnsafeRawStringPointer())->clear(); + } + if (cached_has_bits & 0x00000002u) { + GOOGLE_DCHECK(!display_name_.IsDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited())); + (*display_name_.UnsafeRawStringPointer())->clear(); + } + } + id_ = 0; + _has_bits_.Clear(); + _internal_metadata_.Clear(); +} + +bool StringIntLabelMapItem::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:object_detection.protos.StringIntLabelMapItem) + for (;;) { + ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // optional string name = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->name().data(), static_cast(this->name().length()), + ::google::protobuf::internal::WireFormat::PARSE, + "object_detection.protos.StringIntLabelMapItem.name"); + } else { + goto handle_unusual; + } + break; + } + + // optional int32 id = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { + set_has_id(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( + input, &id_))); + } else { + goto handle_unusual; + } + break; + } + + // optional string display_name = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_display_name())); + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->display_name().data(), static_cast(this->display_name().length()), + ::google::protobuf::internal::WireFormat::PARSE, + "object_detection.protos.StringIntLabelMapItem.display_name"); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:object_detection.protos.StringIntLabelMapItem) + return true; +failure: + // @@protoc_insertion_point(parse_failure:object_detection.protos.StringIntLabelMapItem) + return false; +#undef DO_ +} + +void StringIntLabelMapItem::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + cached_has_bits = _has_bits_[0]; + // optional string name = 1; + if (cached_has_bits & 0x00000001u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->name().data(), static_cast(this->name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.name"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->name(), output); + } + + // optional int32 id = 2; + if (cached_has_bits & 0x00000004u) { + ::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->id(), output); + } + + // optional string display_name = 3; + if (cached_has_bits & 0x00000002u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->display_name().data(), static_cast(this->display_name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.display_name"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 3, this->display_name(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:object_detection.protos.StringIntLabelMapItem) +} + +::google::protobuf::uint8* StringIntLabelMapItem::InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const { + (void)deterministic; // Unused + // @@protoc_insertion_point(serialize_to_array_start:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + cached_has_bits = _has_bits_[0]; + // optional string name = 1; + if (cached_has_bits & 0x00000001u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->name().data(), static_cast(this->name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.name"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->name(), target); + } + + // optional int32 id = 2; + if (cached_has_bits & 0x00000004u) { + target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->id(), target); + } + + // optional string display_name = 3; + if (cached_has_bits & 0x00000002u) { + ::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField( + this->display_name().data(), static_cast(this->display_name().length()), + ::google::protobuf::internal::WireFormat::SERIALIZE, + "object_detection.protos.StringIntLabelMapItem.display_name"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->display_name(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:object_detection.protos.StringIntLabelMapItem) + return target; +} + +size_t StringIntLabelMapItem::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:object_detection.protos.StringIntLabelMapItem) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + if (_has_bits_[0 / 32] & 7u) { + // optional string name = 1; + if (has_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->name()); + } + + // optional string display_name = 3; + if (has_display_name()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->display_name()); + } + + // optional int32 id = 2; + if (has_id()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::Int32Size( + this->id()); + } + + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = cached_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void StringIntLabelMapItem::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:object_detection.protos.StringIntLabelMapItem) + GOOGLE_DCHECK_NE(&from, this); + const StringIntLabelMapItem* source = + ::google::protobuf::internal::DynamicCastToGenerated( + &from); + if (source == NULL) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:object_detection.protos.StringIntLabelMapItem) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:object_detection.protos.StringIntLabelMapItem) + MergeFrom(*source); + } +} + +void StringIntLabelMapItem::MergeFrom(const StringIntLabelMapItem& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:object_detection.protos.StringIntLabelMapItem) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + cached_has_bits = from._has_bits_[0]; + if (cached_has_bits & 7u) { + if (cached_has_bits & 0x00000001u) { + set_has_name(); + name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); + } + if (cached_has_bits & 0x00000002u) { + set_has_display_name(); + display_name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.display_name_); + } + if (cached_has_bits & 0x00000004u) { + id_ = from.id_; + } + _has_bits_[0] |= cached_has_bits; + } +} + +void StringIntLabelMapItem::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:object_detection.protos.StringIntLabelMapItem) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void StringIntLabelMapItem::CopyFrom(const StringIntLabelMapItem& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:object_detection.protos.StringIntLabelMapItem) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool StringIntLabelMapItem::IsInitialized() const { + return true; +} + +void StringIntLabelMapItem::Swap(StringIntLabelMapItem* other) { + if (other == this) return; + InternalSwap(other); +} +void StringIntLabelMapItem::InternalSwap(StringIntLabelMapItem* other) { + using std::swap; + name_.Swap(&other->name_); + display_name_.Swap(&other->display_name_); + swap(id_, other->id_); + swap(_has_bits_[0], other->_has_bits_[0]); + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(_cached_size_, other->_cached_size_); +} + +::google::protobuf::Metadata StringIntLabelMapItem::GetMetadata() const { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages]; +} + +#if PROTOBUF_INLINE_NOT_IN_HEADERS +// StringIntLabelMapItem + +// optional string name = 1; +bool StringIntLabelMapItem::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +void StringIntLabelMapItem::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +void StringIntLabelMapItem::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +void StringIntLabelMapItem::clear_name() { + name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_name(); +} +const ::std::string& StringIntLabelMapItem::name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.name) + return name_.GetNoArena(); +} +void StringIntLabelMapItem::set_name(const ::std::string& value) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.name) +} +#if LANG_CXX11 +void StringIntLabelMapItem::set_name(::std::string&& value) { + set_has_name(); + name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.name) +} +#endif +void StringIntLabelMapItem::set_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.name) +} +void StringIntLabelMapItem::set_name(const char* value, size_t size) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.name) +} +::std::string* StringIntLabelMapItem::mutable_name() { + set_has_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.name) + return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +::std::string* StringIntLabelMapItem::release_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.name) + clear_has_name(); + return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +void StringIntLabelMapItem::set_allocated_name(::std::string* name) { + if (name != NULL) { + set_has_name(); + } else { + clear_has_name(); + } + name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.name) +} + +// optional int32 id = 2; +bool StringIntLabelMapItem::has_id() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +void StringIntLabelMapItem::set_has_id() { + _has_bits_[0] |= 0x00000004u; +} +void StringIntLabelMapItem::clear_has_id() { + _has_bits_[0] &= ~0x00000004u; +} +void StringIntLabelMapItem::clear_id() { + id_ = 0; + clear_has_id(); +} +::google::protobuf::int32 StringIntLabelMapItem::id() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.id) + return id_; +} +void StringIntLabelMapItem::set_id(::google::protobuf::int32 value) { + set_has_id(); + id_ = value; + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.id) +} + +// optional string display_name = 3; +bool StringIntLabelMapItem::has_display_name() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +void StringIntLabelMapItem::set_has_display_name() { + _has_bits_[0] |= 0x00000002u; +} +void StringIntLabelMapItem::clear_has_display_name() { + _has_bits_[0] &= ~0x00000002u; +} +void StringIntLabelMapItem::clear_display_name() { + display_name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_display_name(); +} +const ::std::string& StringIntLabelMapItem::display_name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.GetNoArena(); +} +void StringIntLabelMapItem::set_display_name(const ::std::string& value) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.display_name) +} +#if LANG_CXX11 +void StringIntLabelMapItem::set_display_name(::std::string&& value) { + set_has_display_name(); + display_name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.display_name) +} +#endif +void StringIntLabelMapItem::set_display_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.display_name) +} +void StringIntLabelMapItem::set_display_name(const char* value, size_t size) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.display_name) +} +::std::string* StringIntLabelMapItem::mutable_display_name() { + set_has_display_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +::std::string* StringIntLabelMapItem::release_display_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.display_name) + clear_has_display_name(); + return display_name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +void StringIntLabelMapItem::set_allocated_display_name(::std::string* display_name) { + if (display_name != NULL) { + set_has_display_name(); + } else { + clear_has_display_name(); + } + display_name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), display_name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.display_name) +} + +#endif // PROTOBUF_INLINE_NOT_IN_HEADERS + +// =================================================================== + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int StringIntLabelMap::kItemFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +StringIntLabelMap::StringIntLabelMap() + : ::google::protobuf::Message(), _internal_metadata_(NULL) { + if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + } + SharedCtor(); + // @@protoc_insertion_point(constructor:object_detection.protos.StringIntLabelMap) +} +StringIntLabelMap::StringIntLabelMap(const StringIntLabelMap& from) + : ::google::protobuf::Message(), + _internal_metadata_(NULL), + _has_bits_(from._has_bits_), + _cached_size_(0), + item_(from.item_) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + // @@protoc_insertion_point(copy_constructor:object_detection.protos.StringIntLabelMap) +} + +void StringIntLabelMap::SharedCtor() { + _cached_size_ = 0; +} + +StringIntLabelMap::~StringIntLabelMap() { + // @@protoc_insertion_point(destructor:object_detection.protos.StringIntLabelMap) + SharedDtor(); +} + +void StringIntLabelMap::SharedDtor() { +} + +void StringIntLabelMap::SetCachedSize(int size) const { + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); +} +const ::google::protobuf::Descriptor* StringIntLabelMap::descriptor() { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; +} + +const StringIntLabelMap& StringIntLabelMap::default_instance() { + protobuf_string_5fint_5flabel_5fmap_2eproto::InitDefaults(); + return *internal_default_instance(); +} + +StringIntLabelMap* StringIntLabelMap::New(::google::protobuf::Arena* arena) const { + StringIntLabelMap* n = new StringIntLabelMap; + if (arena != NULL) { + arena->Own(n); + } + return n; +} + +void StringIntLabelMap::Clear() { +// @@protoc_insertion_point(message_clear_start:object_detection.protos.StringIntLabelMap) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + item_.Clear(); + _has_bits_.Clear(); + _internal_metadata_.Clear(); +} + +bool StringIntLabelMap::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:object_detection.protos.StringIntLabelMap) + for (;;) { + ::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == + static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( + input, add_item())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:object_detection.protos.StringIntLabelMap) + return true; +failure: + // @@protoc_insertion_point(parse_failure:object_detection.protos.StringIntLabelMap) + return false; +#undef DO_ +} + +void StringIntLabelMap::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:object_detection.protos.StringIntLabelMap) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + for (unsigned int i = 0, + n = static_cast(this->item_size()); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, this->item(static_cast(i)), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:object_detection.protos.StringIntLabelMap) +} + +::google::protobuf::uint8* StringIntLabelMap::InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const { + (void)deterministic; // Unused + // @@protoc_insertion_point(serialize_to_array_start:object_detection.protos.StringIntLabelMap) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + for (unsigned int i = 0, + n = static_cast(this->item_size()); i < n; i++) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageNoVirtualToArray( + 1, this->item(static_cast(i)), deterministic, target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:object_detection.protos.StringIntLabelMap) + return target; +} + +size_t StringIntLabelMap::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:object_detection.protos.StringIntLabelMap) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + { + unsigned int count = static_cast(this->item_size()); + total_size += 1UL * count; + for (unsigned int i = 0; i < count; i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual( + this->item(static_cast(i))); + } + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN(); + _cached_size_ = cached_size; + GOOGLE_SAFE_CONCURRENT_WRITES_END(); + return total_size; +} + +void StringIntLabelMap::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:object_detection.protos.StringIntLabelMap) + GOOGLE_DCHECK_NE(&from, this); + const StringIntLabelMap* source = + ::google::protobuf::internal::DynamicCastToGenerated( + &from); + if (source == NULL) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:object_detection.protos.StringIntLabelMap) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:object_detection.protos.StringIntLabelMap) + MergeFrom(*source); + } +} + +void StringIntLabelMap::MergeFrom(const StringIntLabelMap& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:object_detection.protos.StringIntLabelMap) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + item_.MergeFrom(from.item_); +} + +void StringIntLabelMap::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:object_detection.protos.StringIntLabelMap) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void StringIntLabelMap::CopyFrom(const StringIntLabelMap& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:object_detection.protos.StringIntLabelMap) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool StringIntLabelMap::IsInitialized() const { + return true; +} + +void StringIntLabelMap::Swap(StringIntLabelMap* other) { + if (other == this) return; + InternalSwap(other); +} +void StringIntLabelMap::InternalSwap(StringIntLabelMap* other) { + using std::swap; + item_.InternalSwap(&other->item_); + swap(_has_bits_[0], other->_has_bits_[0]); + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(_cached_size_, other->_cached_size_); +} + +::google::protobuf::Metadata StringIntLabelMap::GetMetadata() const { + protobuf_string_5fint_5flabel_5fmap_2eproto::protobuf_AssignDescriptorsOnce(); + return protobuf_string_5fint_5flabel_5fmap_2eproto::file_level_metadata[kIndexInFileMessages]; +} + +#if PROTOBUF_INLINE_NOT_IN_HEADERS +// StringIntLabelMap + +// repeated .object_detection.protos.StringIntLabelMapItem item = 1; +int StringIntLabelMap::item_size() const { + return item_.size(); +} +void StringIntLabelMap::clear_item() { + item_.Clear(); +} +const ::object_detection::protos::StringIntLabelMapItem& StringIntLabelMap::item(int index) const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMap.item) + return item_.Get(index); +} +::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::mutable_item(int index) { + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMap.item) + return item_.Mutable(index); +} +::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::add_item() { + // @@protoc_insertion_point(field_add:object_detection.protos.StringIntLabelMap.item) + return item_.Add(); +} +::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >* +StringIntLabelMap::mutable_item() { + // @@protoc_insertion_point(field_mutable_list:object_detection.protos.StringIntLabelMap.item) + return &item_; +} +const ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >& +StringIntLabelMap::item() const { + // @@protoc_insertion_point(field_list:object_detection.protos.StringIntLabelMap.item) + return item_; +} + +#endif // PROTOBUF_INLINE_NOT_IN_HEADERS + +// @@protoc_insertion_point(namespace_scope) + +} // namespace protos +} // namespace object_detection + +// @@protoc_insertion_point(global_scope) diff --git a/swift/Tensorflow/string_int_label_map.pb.h b/swift/Tensorflow/string_int_label_map.pb.h new file mode 100644 index 0000000..a834acd --- /dev/null +++ b/swift/Tensorflow/string_int_label_map.pb.h @@ -0,0 +1,528 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: string_int_label_map.proto + +#ifndef PROTOBUF_string_5fint_5flabel_5fmap_2eproto__INCLUDED +#define PROTOBUF_string_5fint_5flabel_5fmap_2eproto__INCLUDED + +#include + +#include + +#if GOOGLE_PROTOBUF_VERSION < 3004000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 3004000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include +#include +#include +#include // IWYU pragma: export +#include // IWYU pragma: export +#include +// @@protoc_insertion_point(includes) +namespace object_detection { +namespace protos { +class StringIntLabelMap; +class StringIntLabelMapDefaultTypeInternal; +extern StringIntLabelMapDefaultTypeInternal _StringIntLabelMap_default_instance_; +class StringIntLabelMapItem; +class StringIntLabelMapItemDefaultTypeInternal; +extern StringIntLabelMapItemDefaultTypeInternal _StringIntLabelMapItem_default_instance_; +} // namespace protos +} // namespace object_detection + +namespace object_detection { +namespace protos { + +namespace protobuf_string_5fint_5flabel_5fmap_2eproto { +// Internal implementation detail -- do not call these. +struct TableStruct { + static const ::google::protobuf::internal::ParseTableField entries[]; + static const ::google::protobuf::internal::AuxillaryParseTableField aux[]; + static const ::google::protobuf::internal::ParseTable schema[]; + static const ::google::protobuf::uint32 offsets[]; + static const ::google::protobuf::internal::FieldMetadata field_metadata[]; + static const ::google::protobuf::internal::SerializationTable serialization_table[]; + static void InitDefaultsImpl(); +}; +void AddDescriptors(); +void InitDefaults(); +} // namespace protobuf_string_5fint_5flabel_5fmap_2eproto + +// =================================================================== + +class StringIntLabelMapItem : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:object_detection.protos.StringIntLabelMapItem) */ { + public: + StringIntLabelMapItem(); + virtual ~StringIntLabelMapItem(); + + StringIntLabelMapItem(const StringIntLabelMapItem& from); + + inline StringIntLabelMapItem& operator=(const StringIntLabelMapItem& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + StringIntLabelMapItem(StringIntLabelMapItem&& from) noexcept + : StringIntLabelMapItem() { + *this = ::std::move(from); + } + + inline StringIntLabelMapItem& operator=(StringIntLabelMapItem&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _internal_metadata_.unknown_fields(); + } + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return _internal_metadata_.mutable_unknown_fields(); + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const StringIntLabelMapItem& default_instance(); + + static inline const StringIntLabelMapItem* internal_default_instance() { + return reinterpret_cast( + &_StringIntLabelMapItem_default_instance_); + } + static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = + 0; + + void Swap(StringIntLabelMapItem* other); + friend void swap(StringIntLabelMapItem& a, StringIntLabelMapItem& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline StringIntLabelMapItem* New() const PROTOBUF_FINAL { return New(NULL); } + + StringIntLabelMapItem* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; + void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void CopyFrom(const StringIntLabelMapItem& from); + void MergeFrom(const StringIntLabelMapItem& from); + void Clear() PROTOBUF_FINAL; + bool IsInitialized() const PROTOBUF_FINAL; + + size_t ByteSizeLong() const PROTOBUF_FINAL; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; + int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const PROTOBUF_FINAL; + void InternalSwap(StringIntLabelMapItem* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return NULL; + } + inline void* MaybeArenaPtr() const { + return NULL; + } + public: + + ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // optional string name = 1; + bool has_name() const; + void clear_name(); + static const int kNameFieldNumber = 1; + const ::std::string& name() const; + void set_name(const ::std::string& value); + #if LANG_CXX11 + void set_name(::std::string&& value); + #endif + void set_name(const char* value); + void set_name(const char* value, size_t size); + ::std::string* mutable_name(); + ::std::string* release_name(); + void set_allocated_name(::std::string* name); + + // optional string display_name = 3; + bool has_display_name() const; + void clear_display_name(); + static const int kDisplayNameFieldNumber = 3; + const ::std::string& display_name() const; + void set_display_name(const ::std::string& value); + #if LANG_CXX11 + void set_display_name(::std::string&& value); + #endif + void set_display_name(const char* value); + void set_display_name(const char* value, size_t size); + ::std::string* mutable_display_name(); + ::std::string* release_display_name(); + void set_allocated_display_name(::std::string* display_name); + + // optional int32 id = 2; + bool has_id() const; + void clear_id(); + static const int kIdFieldNumber = 2; + ::google::protobuf::int32 id() const; + void set_id(::google::protobuf::int32 value); + + // @@protoc_insertion_point(class_scope:object_detection.protos.StringIntLabelMapItem) + private: + void set_has_name(); + void clear_has_name(); + void set_has_id(); + void clear_has_id(); + void set_has_display_name(); + void clear_has_display_name(); + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::HasBits<1> _has_bits_; + mutable int _cached_size_; + ::google::protobuf::internal::ArenaStringPtr name_; + ::google::protobuf::internal::ArenaStringPtr display_name_; + ::google::protobuf::int32 id_; + friend struct protobuf_string_5fint_5flabel_5fmap_2eproto::TableStruct; +}; +// ------------------------------------------------------------------- + +class StringIntLabelMap : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:object_detection.protos.StringIntLabelMap) */ { + public: + StringIntLabelMap(); + virtual ~StringIntLabelMap(); + + StringIntLabelMap(const StringIntLabelMap& from); + + inline StringIntLabelMap& operator=(const StringIntLabelMap& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + StringIntLabelMap(StringIntLabelMap&& from) noexcept + : StringIntLabelMap() { + *this = ::std::move(from); + } + + inline StringIntLabelMap& operator=(StringIntLabelMap&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const { + return _internal_metadata_.unknown_fields(); + } + inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() { + return _internal_metadata_.mutable_unknown_fields(); + } + + static const ::google::protobuf::Descriptor* descriptor(); + static const StringIntLabelMap& default_instance(); + + static inline const StringIntLabelMap* internal_default_instance() { + return reinterpret_cast( + &_StringIntLabelMap_default_instance_); + } + static PROTOBUF_CONSTEXPR int const kIndexInFileMessages = + 1; + + void Swap(StringIntLabelMap* other); + friend void swap(StringIntLabelMap& a, StringIntLabelMap& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline StringIntLabelMap* New() const PROTOBUF_FINAL { return New(NULL); } + + StringIntLabelMap* New(::google::protobuf::Arena* arena) const PROTOBUF_FINAL; + void CopyFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void MergeFrom(const ::google::protobuf::Message& from) PROTOBUF_FINAL; + void CopyFrom(const StringIntLabelMap& from); + void MergeFrom(const StringIntLabelMap& from); + void Clear() PROTOBUF_FINAL; + bool IsInitialized() const PROTOBUF_FINAL; + + size_t ByteSizeLong() const PROTOBUF_FINAL; + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) PROTOBUF_FINAL; + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const PROTOBUF_FINAL; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + bool deterministic, ::google::protobuf::uint8* target) const PROTOBUF_FINAL; + int GetCachedSize() const PROTOBUF_FINAL { return _cached_size_; } + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const PROTOBUF_FINAL; + void InternalSwap(StringIntLabelMap* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return NULL; + } + inline void* MaybeArenaPtr() const { + return NULL; + } + public: + + ::google::protobuf::Metadata GetMetadata() const PROTOBUF_FINAL; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .object_detection.protos.StringIntLabelMapItem item = 1; + int item_size() const; + void clear_item(); + static const int kItemFieldNumber = 1; + const ::object_detection::protos::StringIntLabelMapItem& item(int index) const; + ::object_detection::protos::StringIntLabelMapItem* mutable_item(int index); + ::object_detection::protos::StringIntLabelMapItem* add_item(); + ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >* + mutable_item(); + const ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >& + item() const; + + // @@protoc_insertion_point(class_scope:object_detection.protos.StringIntLabelMap) + private: + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::HasBits<1> _has_bits_; + mutable int _cached_size_; + ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem > item_; + friend struct protobuf_string_5fint_5flabel_5fmap_2eproto::TableStruct; +}; +// =================================================================== + + +// =================================================================== + +#if !PROTOBUF_INLINE_NOT_IN_HEADERS +#ifdef __GNUC__ + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstrict-aliasing" +#endif // __GNUC__ +// StringIntLabelMapItem + +// optional string name = 1; +inline bool StringIntLabelMapItem::has_name() const { + return (_has_bits_[0] & 0x00000001u) != 0; +} +inline void StringIntLabelMapItem::set_has_name() { + _has_bits_[0] |= 0x00000001u; +} +inline void StringIntLabelMapItem::clear_has_name() { + _has_bits_[0] &= ~0x00000001u; +} +inline void StringIntLabelMapItem::clear_name() { + name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_name(); +} +inline const ::std::string& StringIntLabelMapItem::name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.name) + return name_.GetNoArena(); +} +inline void StringIntLabelMapItem::set_name(const ::std::string& value) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.name) +} +#if LANG_CXX11 +inline void StringIntLabelMapItem::set_name(::std::string&& value) { + set_has_name(); + name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.name) +} +#endif +inline void StringIntLabelMapItem::set_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.name) +} +inline void StringIntLabelMapItem::set_name(const char* value, size_t size) { + set_has_name(); + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.name) +} +inline ::std::string* StringIntLabelMapItem::mutable_name() { + set_has_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.name) + return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* StringIntLabelMapItem::release_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.name) + clear_has_name(); + return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void StringIntLabelMapItem::set_allocated_name(::std::string* name) { + if (name != NULL) { + set_has_name(); + } else { + clear_has_name(); + } + name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.name) +} + +// optional int32 id = 2; +inline bool StringIntLabelMapItem::has_id() const { + return (_has_bits_[0] & 0x00000004u) != 0; +} +inline void StringIntLabelMapItem::set_has_id() { + _has_bits_[0] |= 0x00000004u; +} +inline void StringIntLabelMapItem::clear_has_id() { + _has_bits_[0] &= ~0x00000004u; +} +inline void StringIntLabelMapItem::clear_id() { + id_ = 0; + clear_has_id(); +} +inline ::google::protobuf::int32 StringIntLabelMapItem::id() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.id) + return id_; +} +inline void StringIntLabelMapItem::set_id(::google::protobuf::int32 value) { + set_has_id(); + id_ = value; + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.id) +} + +// optional string display_name = 3; +inline bool StringIntLabelMapItem::has_display_name() const { + return (_has_bits_[0] & 0x00000002u) != 0; +} +inline void StringIntLabelMapItem::set_has_display_name() { + _has_bits_[0] |= 0x00000002u; +} +inline void StringIntLabelMapItem::clear_has_display_name() { + _has_bits_[0] &= ~0x00000002u; +} +inline void StringIntLabelMapItem::clear_display_name() { + display_name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_display_name(); +} +inline const ::std::string& StringIntLabelMapItem::display_name() const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.GetNoArena(); +} +inline void StringIntLabelMapItem::set_display_name(const ::std::string& value) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:object_detection.protos.StringIntLabelMapItem.display_name) +} +#if LANG_CXX11 +inline void StringIntLabelMapItem::set_display_name(::std::string&& value) { + set_has_display_name(); + display_name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:object_detection.protos.StringIntLabelMapItem.display_name) +} +#endif +inline void StringIntLabelMapItem::set_display_name(const char* value) { + GOOGLE_DCHECK(value != NULL); + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:object_detection.protos.StringIntLabelMapItem.display_name) +} +inline void StringIntLabelMapItem::set_display_name(const char* value, size_t size) { + set_has_display_name(); + display_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:object_detection.protos.StringIntLabelMapItem.display_name) +} +inline ::std::string* StringIntLabelMapItem::mutable_display_name() { + set_has_display_name(); + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMapItem.display_name) + return display_name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* StringIntLabelMapItem::release_display_name() { + // @@protoc_insertion_point(field_release:object_detection.protos.StringIntLabelMapItem.display_name) + clear_has_display_name(); + return display_name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void StringIntLabelMapItem::set_allocated_display_name(::std::string* display_name) { + if (display_name != NULL) { + set_has_display_name(); + } else { + clear_has_display_name(); + } + display_name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), display_name); + // @@protoc_insertion_point(field_set_allocated:object_detection.protos.StringIntLabelMapItem.display_name) +} + +// ------------------------------------------------------------------- + +// StringIntLabelMap + +// repeated .object_detection.protos.StringIntLabelMapItem item = 1; +inline int StringIntLabelMap::item_size() const { + return item_.size(); +} +inline void StringIntLabelMap::clear_item() { + item_.Clear(); +} +inline const ::object_detection::protos::StringIntLabelMapItem& StringIntLabelMap::item(int index) const { + // @@protoc_insertion_point(field_get:object_detection.protos.StringIntLabelMap.item) + return item_.Get(index); +} +inline ::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::mutable_item(int index) { + // @@protoc_insertion_point(field_mutable:object_detection.protos.StringIntLabelMap.item) + return item_.Mutable(index); +} +inline ::object_detection::protos::StringIntLabelMapItem* StringIntLabelMap::add_item() { + // @@protoc_insertion_point(field_add:object_detection.protos.StringIntLabelMap.item) + return item_.Add(); +} +inline ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >* +StringIntLabelMap::mutable_item() { + // @@protoc_insertion_point(field_mutable_list:object_detection.protos.StringIntLabelMap.item) + return &item_; +} +inline const ::google::protobuf::RepeatedPtrField< ::object_detection::protos::StringIntLabelMapItem >& +StringIntLabelMap::item() const { + // @@protoc_insertion_point(field_list:object_detection.protos.StringIntLabelMap.item) + return item_; +} + +#ifdef __GNUC__ + #pragma GCC diagnostic pop +#endif // __GNUC__ +#endif // !PROTOBUF_INLINE_NOT_IN_HEADERS +// ------------------------------------------------------------------- + + +// @@protoc_insertion_point(namespace_scope) + + +} // namespace protos +} // namespace object_detection + +// @@protoc_insertion_point(global_scope) + +#endif // PROTOBUF_string_5fint_5flabel_5fmap_2eproto__INCLUDED diff --git a/swift/Tensorflow/tensorflowiOS-Bridging-Header.h b/swift/Tensorflow/tensorflowiOS-Bridging-Header.h new file mode 100644 index 0000000..b4a8f7f --- /dev/null +++ b/swift/Tensorflow/tensorflowiOS-Bridging-Header.h @@ -0,0 +1,18 @@ +// +// Use this file to import your target's public headers that you would like to expose to Swift. +// + +#ifdef __cplusplus +extern "C" { +#endif + +#ifdef __cplusplus +} +#endif + + + +#import "TensorflowGraph.h" +#import "TensorflowPrediction.h" + + diff --git a/swift/tensorflowiOS.xcodeproj/project.pbxproj b/swift/tensorflowiOS.xcodeproj/project.pbxproj new file mode 100644 index 0000000..52ff7df --- /dev/null +++ b/swift/tensorflowiOS.xcodeproj/project.pbxproj @@ -0,0 +1,517 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 48; + objects = { + +/* Begin PBXBuildFile section */ + 5365E49E1FB762BE0004EFFD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 5365E48B1FB762BE0004EFFD /* Assets.xcassets */; }; + 5365E49F1FB762BE0004EFFD /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 5365E48C1FB762BE0004EFFD /* LaunchScreen.storyboard */; }; + 5365E4A01FB762BE0004EFFD /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 5365E48E1FB762BE0004EFFD /* Main.storyboard */; }; + 5365E4A11FB762BE0004EFFD /* op_inference_graph_rcnn.pb in Resources */ = {isa = PBXBuildFile; fileRef = 5365E4921FB762BE0004EFFD /* op_inference_graph_rcnn.pb */; }; + 5365E4A21FB762BE0004EFFD /* mscoco_label_map.txt in Resources */ = {isa = PBXBuildFile; fileRef = 5365E4931FB762BE0004EFFD /* mscoco_label_map.txt */; }; + 5365E4A31FB762BE0004EFFD /* op_inference_graph_inv2.pb in Resources */ = {isa = PBXBuildFile; fileRef = 5365E4951FB762BE0004EFFD /* op_inference_graph_inv2.pb */; }; + 5365E4A41FB762BE0004EFFD /* op_inference_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 5365E4971FB762BE0004EFFD /* op_inference_graph.pb */; }; + 5365E4A51FB762BE0004EFFD /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4991FB762BE0004EFFD /* AppDelegate.swift */; }; + 5365E4A61FB762BE0004EFFD /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E49A1FB762BE0004EFFD /* ViewController.swift */; }; + 5365E4A91FB7660F0004EFFD /* CameraPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4A81FB7660F0004EFFD /* CameraPreviewView.swift */; }; + 5365E4AB1FB7807E0004EFFD /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4AA1FB7807E0004EFFD /* Constants.swift */; }; + 5365E4AD1FB796B80004EFFD /* BoundingBoxView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4AC1FB796B80004EFFD /* BoundingBoxView.swift */; }; + 5365E4B81FB7A8B90004EFFD /* tensorflow.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */; }; + 53D01D8E1FB95AA400AEAFC6 /* TensorflowGraph.mm in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D8D1FB95AA400AEAFC6 /* TensorflowGraph.mm */; }; + 53D01D911FB95BE700AEAFC6 /* string_int_label_map.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D8F1FB95BE700AEAFC6 /* string_int_label_map.pb.cc */; }; + 53D01D941FB95C1800AEAFC6 /* TensorflowPrediction.m in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D931FB95C1800AEAFC6 /* TensorflowPrediction.m */; }; + 53D01D971FB95D1D00AEAFC6 /* TensorflowUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D961FB95D1D00AEAFC6 /* TensorflowUtils.mm */; }; + 53D01D9A1FB95DCF00AEAFC6 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 53D01D991FB95DB600AEAFC6 /* Accelerate.framework */; }; + 53D01DA11FB961C200AEAFC6 /* libstdc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 53D01DA01FB961C200AEAFC6 /* libstdc++.tbd */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 5365E4701FB6D9E00004EFFD /* tensorflowiOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tensorflowiOS.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 5365E48B1FB762BE0004EFFD /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 5365E48D1FB762BE0004EFFD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 5365E48F1FB762BE0004EFFD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 5365E4921FB762BE0004EFFD /* op_inference_graph_rcnn.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = op_inference_graph_rcnn.pb; sourceTree = ""; }; + 5365E4931FB762BE0004EFFD /* mscoco_label_map.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = mscoco_label_map.txt; sourceTree = ""; }; + 5365E4951FB762BE0004EFFD /* op_inference_graph_inv2.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = op_inference_graph_inv2.pb; sourceTree = ""; }; + 5365E4971FB762BE0004EFFD /* op_inference_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = op_inference_graph.pb; sourceTree = ""; }; + 5365E4991FB762BE0004EFFD /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 5365E49A1FB762BE0004EFFD /* ViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + 5365E49C1FB762BE0004EFFD /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 5365E4A81FB7660F0004EFFD /* CameraPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPreviewView.swift; sourceTree = ""; }; + 5365E4AA1FB7807E0004EFFD /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; + 5365E4AC1FB796B80004EFFD /* BoundingBoxView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BoundingBoxView.swift; sourceTree = ""; }; + 5365E4B01FB7A4530004EFFD /* tensorflowiOS-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "tensorflowiOS-Bridging-Header.h"; sourceTree = ""; }; + 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = tensorflow.xcconfig; sourceTree = ""; }; + 53AA6BDA1FC5FA7000074E49 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; + 53D01D8C1FB95AA400AEAFC6 /* TensorflowGraph.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TensorflowGraph.h; sourceTree = ""; }; + 53D01D8D1FB95AA400AEAFC6 /* TensorflowGraph.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorflowGraph.mm; sourceTree = ""; }; + 53D01D8F1FB95BE700AEAFC6 /* string_int_label_map.pb.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = string_int_label_map.pb.cc; sourceTree = ""; }; + 53D01D901FB95BE700AEAFC6 /* string_int_label_map.pb.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = string_int_label_map.pb.h; sourceTree = ""; }; + 53D01D921FB95C1800AEAFC6 /* TensorflowPrediction.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TensorflowPrediction.h; sourceTree = ""; }; + 53D01D931FB95C1800AEAFC6 /* TensorflowPrediction.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TensorflowPrediction.m; sourceTree = ""; }; + 53D01D951FB95D1D00AEAFC6 /* TensorflowUtils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TensorflowUtils.h; sourceTree = ""; }; + 53D01D961FB95D1D00AEAFC6 /* TensorflowUtils.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorflowUtils.mm; sourceTree = ""; }; + 53D01D991FB95DB600AEAFC6 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; + 53D01D9E1FB961A800AEAFC6 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; }; + 53D01DA01FB961C200AEAFC6 /* libstdc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libstdc++.tbd"; path = "usr/lib/libstdc++.tbd"; sourceTree = SDKROOT; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 5365E46D1FB6D9E00004EFFD /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 53D01DA11FB961C200AEAFC6 /* libstdc++.tbd in Frameworks */, + 53D01D9A1FB95DCF00AEAFC6 /* Accelerate.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 5365E4671FB6D9E00004EFFD = { + isa = PBXGroup; + children = ( + 53AA6BDA1FC5FA7000074E49 /* README.md */, + 5365E4721FB6D9E00004EFFD /* tensorflowiOS */, + 5365E4711FB6D9E00004EFFD /* Products */, + 53D01D981FB95DB600AEAFC6 /* Frameworks */, + ); + sourceTree = ""; + }; + 5365E4711FB6D9E00004EFFD /* Products */ = { + isa = PBXGroup; + children = ( + 5365E4701FB6D9E00004EFFD /* tensorflowiOS.app */, + ); + name = Products; + sourceTree = ""; + }; + 5365E4721FB6D9E00004EFFD /* tensorflowiOS */ = { + isa = PBXGroup; + children = ( + 5365E4981FB762BE0004EFFD /* App */, + 5365E49D1FB762BE0004EFFD /* Tensorflow */, + 5365E4901FB762BE0004EFFD /* Models */, + 5365E48A1FB762BE0004EFFD /* Assets */, + 5365E49B1FB762BE0004EFFD /* SupportingFiles */, + ); + path = tensorflowiOS; + sourceTree = ""; + }; + 5365E48A1FB762BE0004EFFD /* Assets */ = { + isa = PBXGroup; + children = ( + 5365E48B1FB762BE0004EFFD /* Assets.xcassets */, + 5365E48C1FB762BE0004EFFD /* LaunchScreen.storyboard */, + 5365E48E1FB762BE0004EFFD /* Main.storyboard */, + ); + path = Assets; + sourceTree = SOURCE_ROOT; + }; + 5365E4901FB762BE0004EFFD /* Models */ = { + isa = PBXGroup; + children = ( + 5365E4911FB762BE0004EFFD /* faster_rcnn_resnet101_coco_11_06_2017 */, + 5365E4931FB762BE0004EFFD /* mscoco_label_map.txt */, + 5365E4941FB762BE0004EFFD /* ssd_inception_v2_coco_11_06_2017 */, + 5365E4961FB762BE0004EFFD /* ssd_mobilenet_v1_coco_11_06_2017 */, + ); + path = Models; + sourceTree = SOURCE_ROOT; + }; + 5365E4911FB762BE0004EFFD /* faster_rcnn_resnet101_coco_11_06_2017 */ = { + isa = PBXGroup; + children = ( + 5365E4921FB762BE0004EFFD /* op_inference_graph_rcnn.pb */, + ); + path = faster_rcnn_resnet101_coco_11_06_2017; + sourceTree = ""; + }; + 5365E4941FB762BE0004EFFD /* ssd_inception_v2_coco_11_06_2017 */ = { + isa = PBXGroup; + children = ( + 5365E4951FB762BE0004EFFD /* op_inference_graph_inv2.pb */, + ); + path = ssd_inception_v2_coco_11_06_2017; + sourceTree = ""; + }; + 5365E4961FB762BE0004EFFD /* ssd_mobilenet_v1_coco_11_06_2017 */ = { + isa = PBXGroup; + children = ( + 5365E4971FB762BE0004EFFD /* op_inference_graph.pb */, + ); + path = ssd_mobilenet_v1_coco_11_06_2017; + sourceTree = ""; + }; + 5365E4981FB762BE0004EFFD /* App */ = { + isa = PBXGroup; + children = ( + 5365E4991FB762BE0004EFFD /* AppDelegate.swift */, + 5365E49A1FB762BE0004EFFD /* ViewController.swift */, + 5365E4A81FB7660F0004EFFD /* CameraPreviewView.swift */, + 5365E4AC1FB796B80004EFFD /* BoundingBoxView.swift */, + ); + path = App; + sourceTree = SOURCE_ROOT; + }; + 5365E49B1FB762BE0004EFFD /* SupportingFiles */ = { + isa = PBXGroup; + children = ( + 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */, + 5365E49C1FB762BE0004EFFD /* Info.plist */, + 5365E4AA1FB7807E0004EFFD /* Constants.swift */, + ); + path = SupportingFiles; + sourceTree = SOURCE_ROOT; + }; + 5365E49D1FB762BE0004EFFD /* Tensorflow */ = { + isa = PBXGroup; + children = ( + 53D01D951FB95D1D00AEAFC6 /* TensorflowUtils.h */, + 53D01D961FB95D1D00AEAFC6 /* TensorflowUtils.mm */, + 53D01D8C1FB95AA400AEAFC6 /* TensorflowGraph.h */, + 53D01D8D1FB95AA400AEAFC6 /* TensorflowGraph.mm */, + 5365E4B01FB7A4530004EFFD /* tensorflowiOS-Bridging-Header.h */, + 53D01D921FB95C1800AEAFC6 /* TensorflowPrediction.h */, + 53D01D931FB95C1800AEAFC6 /* TensorflowPrediction.m */, + 53D01D8F1FB95BE700AEAFC6 /* string_int_label_map.pb.cc */, + 53D01D901FB95BE700AEAFC6 /* string_int_label_map.pb.h */, + ); + path = Tensorflow; + sourceTree = SOURCE_ROOT; + }; + 53D01D981FB95DB600AEAFC6 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 53D01DA01FB961C200AEAFC6 /* libstdc++.tbd */, + 53D01D9E1FB961A800AEAFC6 /* libc++.tbd */, + 53D01D991FB95DB600AEAFC6 /* Accelerate.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 5365E46F1FB6D9E00004EFFD /* tensorflowiOS */ = { + isa = PBXNativeTarget; + buildConfigurationList = 5365E4821FB6D9E00004EFFD /* Build configuration list for PBXNativeTarget "tensorflowiOS" */; + buildPhases = ( + 5365E46C1FB6D9E00004EFFD /* Sources */, + 5365E46D1FB6D9E00004EFFD /* Frameworks */, + 5365E46E1FB6D9E00004EFFD /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = tensorflowiOS; + productName = tensorflowiOS; + productReference = 5365E4701FB6D9E00004EFFD /* tensorflowiOS.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 5365E4681FB6D9E00004EFFD /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 0910; + LastUpgradeCheck = 0910; + ORGANIZATIONNAME = "Chris Sharp"; + TargetAttributes = { + 5365E46F1FB6D9E00004EFFD = { + CreatedOnToolsVersion = 9.1; + LastSwiftMigration = 0910; + ProvisioningStyle = Automatic; + }; + }; + }; + buildConfigurationList = 5365E46B1FB6D9E00004EFFD /* Build configuration list for PBXProject "tensorflowiOS" */; + compatibilityVersion = "Xcode 8.0"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 5365E4671FB6D9E00004EFFD; + productRefGroup = 5365E4711FB6D9E00004EFFD /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 5365E46F1FB6D9E00004EFFD /* tensorflowiOS */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 5365E46E1FB6D9E00004EFFD /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 5365E4A21FB762BE0004EFFD /* mscoco_label_map.txt in Resources */, + 5365E4A11FB762BE0004EFFD /* op_inference_graph_rcnn.pb in Resources */, + 5365E49E1FB762BE0004EFFD /* Assets.xcassets in Resources */, + 5365E4A01FB762BE0004EFFD /* Main.storyboard in Resources */, + 5365E49F1FB762BE0004EFFD /* LaunchScreen.storyboard in Resources */, + 5365E4B81FB7A8B90004EFFD /* tensorflow.xcconfig in Resources */, + 5365E4A31FB762BE0004EFFD /* op_inference_graph_inv2.pb in Resources */, + 5365E4A41FB762BE0004EFFD /* op_inference_graph.pb in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 5365E46C1FB6D9E00004EFFD /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 5365E4A51FB762BE0004EFFD /* AppDelegate.swift in Sources */, + 53D01D941FB95C1800AEAFC6 /* TensorflowPrediction.m in Sources */, + 53D01D971FB95D1D00AEAFC6 /* TensorflowUtils.mm in Sources */, + 5365E4A61FB762BE0004EFFD /* ViewController.swift in Sources */, + 53D01D911FB95BE700AEAFC6 /* string_int_label_map.pb.cc in Sources */, + 5365E4AD1FB796B80004EFFD /* BoundingBoxView.swift in Sources */, + 5365E4A91FB7660F0004EFFD /* CameraPreviewView.swift in Sources */, + 53D01D8E1FB95AA400AEAFC6 /* TensorflowGraph.mm in Sources */, + 5365E4AB1FB7807E0004EFFD /* Constants.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXVariantGroup section */ + 5365E48C1FB762BE0004EFFD /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 5365E48D1FB762BE0004EFFD /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; + 5365E48E1FB762BE0004EFFD /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 5365E48F1FB762BE0004EFFD /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 5365E4801FB6D9E00004EFFD /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.1; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + 5365E4811FB6D9E00004EFFD /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + CODE_SIGN_IDENTITY = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.1; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 5365E4831FB6D9E00004EFFD /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = G8WMBH6ZQE; + HEADER_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/public/", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/proto", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/protobuf/src", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/eigen", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads", + "$(TENSORFLOW_ROOT)", + ); + INFOPLIST_FILE = "$(SRCROOT)/SupportingFiles/Info.plist"; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/protobuf_ios/lib/", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib", + ); + OTHER_LDFLAGS = ( + "-force_load", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a", + "-lprotobuf", + "-lprotobuf-lite", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/builds/${CURRENT_ARCH}.ios.c++11/nsync.a", + "-lc++", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.username.tensorflowiOS; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Tensorflow/tensorflowiOS-Bridging-Header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 4.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 5365E4841FB6D9E00004EFFD /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_TEAM = G8WMBH6ZQE; + HEADER_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/public/", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/proto", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/protobuf/src", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/eigen", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads", + "$(TENSORFLOW_ROOT)", + ); + INFOPLIST_FILE = "$(SRCROOT)/SupportingFiles/Info.plist"; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/protobuf_ios/lib/", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib", + ); + OTHER_LDFLAGS = ( + "-force_load", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a", + "-lprotobuf", + "-lprotobuf-lite", + "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/builds/${CURRENT_ARCH}.ios.c++11/nsync.a", + "-lc++", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.username.tensorflowiOS; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Tensorflow/tensorflowiOS-Bridging-Header.h"; + SWIFT_VERSION = 4.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 5365E46B1FB6D9E00004EFFD /* Build configuration list for PBXProject "tensorflowiOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 5365E4801FB6D9E00004EFFD /* Debug */, + 5365E4811FB6D9E00004EFFD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 5365E4821FB6D9E00004EFFD /* Build configuration list for PBXNativeTarget "tensorflowiOS" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 5365E4831FB6D9E00004EFFD /* Debug */, + 5365E4841FB6D9E00004EFFD /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 5365E4681FB6D9E00004EFFD /* Project object */; +} diff --git a/swift/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/swift/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000..5080e47 --- /dev/null +++ b/swift/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + +