Skip to content

Commit

Permalink
Polished the code and dropped the iOS 4 support
Browse files Browse the repository at this point in the history
  • Loading branch information
Benjamin Loulier committed May 10, 2013
1 parent f8eb31e commit d817223
Show file tree
Hide file tree
Showing 14 changed files with 261 additions and 379 deletions.
32 changes: 2 additions & 30 deletions Classes/MyAVController.h
Expand Up @@ -8,36 +8,8 @@
@class AVController
@author Benjamin Loulier
@brief Controller to demonstrate how we can have a direct access to the camera using the iPhone SDK 4
@brief Controller to demonstrate how we can have a direct access to the camera
*/
@interface MyAVController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate> {
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
}

/*!
@brief The capture session takes the input from the camera and capture it
*/
@property (nonatomic, retain) AVCaptureSession *captureSession;

/*!
@brief The UIImageView we use to display the image generated from the imageBuffer
*/
@property (nonatomic, retain) UIImageView *imageView;
/*!
@brief The CALayer we use to display the CGImageRef generated from the imageBuffer
*/
@property (nonatomic, retain) CALayer *customLayer;
/*!
@brief The CALAyer customized by apple to display the video corresponding to a capture session
*/
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;

/*!
@brief This method initializes the capture session
*/
- (void)initCapture;
@interface MyAVController : UIViewController

@end
143 changes: 70 additions & 73 deletions Classes/MyAVController.m
@@ -1,32 +1,43 @@
#import "MyAVController.h"

@interface MyAVController () <AVCaptureVideoDataOutputSampleBufferDelegate>

@implementation MyAVController
/*!
@brief The capture session takes the input from the camera and capture it
*/
@property (nonatomic, strong) AVCaptureSession *captureSession;

/*!
@brief The UIImageView we use to display the image generated from the imageBuffer
*/
@property (nonatomic, strong) UIImageView *imageView;
/*!
@brief The CALayer we use to display the CGImageRef generated from the imageBuffer
*/
@property (nonatomic, strong) CALayer *customLayer;
/*!
@brief The CALAyer customized by apple to display the video corresponding to a capture session
*/
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *prevLayer;

/*!
@brief This method initializes the capture session
*/
- (void)setupCapture;

@synthesize captureSession = _captureSession;
@synthesize imageView = _imageView;
@synthesize customLayer = _customLayer;
@synthesize prevLayer = _prevLayer;
@end

@implementation MyAVController

#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
/*We initialize some variables (they might be not initialized depending on what is commented or not)*/
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}

- (void)viewDidLoad {
/*We intialize the capture*/
[self initCapture];
[self setupCapture];
}

- (void)initCapture {
- (void)setupCapture {
/*We setup the input*/
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
Expand All @@ -48,9 +59,7 @@ - (void)initCapture {
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
NSDictionary* videoSettings = @{(__bridge NSString*)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]};
[captureOutput setVideoSettings:videoSettings];
/*And we create a capture session*/
self.captureSession = [[AVCaptureSession alloc] init];
Expand Down Expand Up @@ -85,62 +94,50 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
/*We create an autorelease pool because as we are not in the main_queue our code is
not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/

NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);

/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);

/*We display the result on the custom layer. All the display stuff must be done in the main thread because
UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];

/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
Same thing as for the CALayer we are not in the main thread so ...*/
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
@autoreleasepool {

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);

/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);

/*We display the result on the custom layer. All the display stuff must be done in the main thread because
UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
dispatch_sync(dispatch_get_main_queue(), ^{
[self.customLayer setContents:(__bridge id)newImage];
});

/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
Same thing as for the CALayer we are not in the main thread so ...*/
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];

/*We relase the CGImageRef*/
CGImageRelease(newImage);

dispatch_sync(dispatch_get_main_queue(), ^{
[self.imageView setImage:image];
});

/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}

/*We relase the CGImageRef*/
CGImageRelease(newImage);

[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];

/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);

[pool drain];
}

#pragma mark -
#pragma mark Memory management

- (void)viewDidUnload {
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
}

- (void)dealloc {
[self.captureSession release];
[super dealloc];
}


@end
4 changes: 2 additions & 2 deletions Classes/MyAVControllerAppDelegate.h
Expand Up @@ -7,8 +7,8 @@
WelcomeViewController *viewController;
}

@property (nonatomic, retain) IBOutlet UIWindow *window;
@property (nonatomic, retain) IBOutlet WelcomeViewController *viewController;
@property (nonatomic, strong) IBOutlet UIWindow *window;
@property (nonatomic, strong) IBOutlet WelcomeViewController *viewController;

@end

5 changes: 0 additions & 5 deletions Classes/MyAVControllerAppDelegate.m
Expand Up @@ -64,11 +64,6 @@ Free up as much memory as possible by purging cached data objects that can be re
}


- (void)dealloc {
[viewController release];
[window release];
[super dealloc];
}


@end
2 changes: 1 addition & 1 deletion Classes/WelcomeViewController.h
Expand Up @@ -3,7 +3,7 @@
@interface WelcomeViewController : UIViewController {
}

- (IBAction)startFlashcodeDetection;
- (IBAction)start;

@end

5 changes: 1 addition & 4 deletions Classes/WelcomeViewController.m
Expand Up @@ -3,12 +3,9 @@

@implementation WelcomeViewController

- (IBAction)startFlashcodeDetection {
- (IBAction)start {
[self presentModalViewController:[[MyAVController alloc] init] animated:YES];
}

- (void)dealloc {
[super dealloc];
}

@end
Binary file added Default-568h@2x.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
16 changes: 14 additions & 2 deletions MyAVController.xcodeproj/project.pbxproj
Expand Up @@ -21,6 +21,7 @@
438CE43111D2766D00CF55BC /* MyAVController.m in Sources */ = {isa = PBXBuildFile; fileRef = 438CE43011D2766D00CF55BC /* MyAVController.m */; };
43D32BCE11D341F400AFA790 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 43D32BCD11D341F400AFA790 /* QuartzCore.framework */; };
43D32E3011D3A1DB00AFA790 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 43D32E2F11D3A1DB00AFA790 /* AudioToolbox.framework */; };
B5A05E2C17395DBC00943B9A /* Default-568h@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = B5A05E2B17395DBC00943B9A /* Default-568h@2x.png */; };
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
Expand All @@ -44,6 +45,7 @@
43D32BCD11D341F400AFA790 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
43D32E2F11D3A1DB00AFA790 /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; };
8D1107310486CEB800E47090 /* MyAVController-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "MyAVController-Info.plist"; plistStructureDefinitionIdentifier = "com.apple.xcode.plist.structure-definition.iphone.info-plist"; sourceTree = "<group>"; };
B5A05E2B17395DBC00943B9A /* Default-568h@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Default-568h@2x.png"; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
Expand Down Expand Up @@ -89,6 +91,7 @@
29B97314FDCFA39411CA2CEA /* CustomTemplate */ = {
isa = PBXGroup;
children = (
B5A05E2B17395DBC00943B9A /* Default-568h@2x.png */,
080E96DDFE201D6D7F000001 /* Classes */,
29B97315FDCFA39411CA2CEA /* Other Sources */,
29B97317FDCFA39411CA2CEA /* Resources */,
Expand Down Expand Up @@ -159,7 +162,11 @@
isa = PBXProject;
buildConfigurationList = C01FCF4E08A954540054247B /* Build configuration list for PBXProject "MyAVController" */;
compatibilityVersion = "Xcode 3.1";
developmentRegion = English;
hasScannedForEncodings = 1;
knownRegions = (
en,
);
mainGroup = 29B97314FDCFA39411CA2CEA /* CustomTemplate */;
projectDirPath = "";
projectRoot = "";
Expand All @@ -176,6 +183,7 @@
files = (
28AD733F0D9D9553002E5188 /* MainWindow.xib in Resources */,
2899E5220DE3E06400AC0155 /* WelcomeViewController.xib in Resources */,
B5A05E2C17395DBC00943B9A /* Default-568h@2x.png in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
Expand All @@ -200,6 +208,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ENABLE_OBJC_ARC = YES;
COPY_PHASE_STRIP = NO;
GCC_DYNAMIC_NO_PIC = NO;
GCC_OPTIMIZATION_LEVEL = 0;
Expand All @@ -214,6 +223,7 @@
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ENABLE_OBJC_ARC = YES;
COPY_PHASE_STRIP = YES;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
GCC_PREFIX_HEADER = MyAVController_Prefix.pch;
Expand All @@ -231,8 +241,9 @@
GCC_C_LANGUAGE_STANDARD = c99;
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 5.0;
PREBINDING = NO;
SDKROOT = iphoneos4.0;
SDKROOT = iphoneos;
};
name = Debug;
};
Expand All @@ -244,9 +255,10 @@
GCC_C_LANGUAGE_STANDARD = c99;
GCC_WARN_ABOUT_RETURN_TYPE = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 5.0;
OTHER_CFLAGS = "-DNS_BLOCK_ASSERTIONS=1";
PREBINDING = NO;
SDKROOT = iphoneos4.0;
SDKROOT = iphoneos;
};
name = Release;
};
Expand Down
Binary file not shown.
@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>HasAskedToTakeAutomaticSnapshotBeforeSignificantChanges</key>
<true/>
<key>SnapshotAutomaticallyBeforeSignificantChanges</key>
<true/>
</dict>
</plist>

0 comments on commit d817223

Please sign in to comment.