Skip to content
This repository has been archived by the owner on Jun 16, 2023. It is now read-only.

Commit

Permalink
feat(preview): add preview methods and more fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
jgfidelis committed May 29, 2018
1 parent 60b9ef1 commit b9fb708
Show file tree
Hide file tree
Showing 3 changed files with 144 additions and 28 deletions.
8 changes: 7 additions & 1 deletion ios/RN/RNCamera.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@
@property (assign, nonatomic) NSInteger autoFocus;
@property (assign, nonatomic) float focusDepth;
@property (assign, nonatomic) NSInteger whiteBalance;
@property (nonatomic, assign, getter=isReadingBarCodes) BOOL barCodeReading;
@property (assign, nonatomic) AVCaptureSessionPreset pictureSize;
@property (nonatomic, assign) BOOL isReadingBarCodes;
@property (nonatomic, assign) BOOL isDetectingFaces;
@property(assign, nonatomic) AVVideoCodecType videoCodecType;

- (id)initWithBridge:(RCTBridge *)bridge;
Expand All @@ -39,18 +41,22 @@
- (void)updateFocusDepth;
- (void)updateZoom;
- (void)updateWhiteBalance;
- (void)updatePictureSize;
- (void)updateFaceDetecting:(id)isDetectingFaces;
- (void)updateFaceDetectionMode:(id)requestedMode;
- (void)updateFaceDetectionLandmarks:(id)requestedLandmarks;
- (void)updateFaceDetectionClassifications:(id)requestedClassifications;
- (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)record:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject;
- (void)stopRecording;
- (void)resumePreview;
- (void)pausePreview;
- (void)setupOrDisableBarcodeScanner;
- (void)onReady:(NSDictionary *)event;
- (void)onMountingError:(NSDictionary *)event;
- (void)onCodeRead:(NSDictionary *)event;
- (void)onFacesDetected:(NSDictionary *)event;
- (void)onPictureSaved:(NSDictionary *)event;

@end

55 changes: 47 additions & 8 deletions ios/RN/RNCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ @interface RNCamera ()
@property (nonatomic, copy) RCTDirectEventBlock onMountError;
@property (nonatomic, copy) RCTDirectEventBlock onBarCodeRead;
@property (nonatomic, copy) RCTDirectEventBlock onFacesDetected;
@property (nonatomic, copy) RCTDirectEventBlock onPictureSaved;

@end

Expand Down Expand Up @@ -85,6 +86,13 @@ - (void)onCodeRead:(NSDictionary *)event
}
}

- (void)onPictureSaved:(NSDictionary *)event
{
if (_onPictureSaved) {
_onPictureSaved(event);
}
}

- (void)layoutSubviews
{
[super layoutSubviews];
Expand Down Expand Up @@ -210,7 +218,7 @@ - (void)updateFocusDepth
AVCaptureDevice *device = [self.videoCaptureDeviceInput device];
NSError *error = nil;

if (self.autoFocus < 0 || device.focusMode != RNCameraAutoFocusOff || device.position == RNCameraTypeFront) {
if (device == nil || self.autoFocus < 0 || device.focusMode != RNCameraAutoFocusOff || device.position == RNCameraTypeFront) {
return;
}

Expand Down Expand Up @@ -284,6 +292,11 @@ - (void)updateWhiteBalance
[device unlockForConfiguration];
}

- (void)updatePictureSize
{
[self updateSessionPreset:self.pictureSize];
}

#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
- (void)updateFaceDetecting:(id)faceDetecting
{
Expand Down Expand Up @@ -318,16 +331,24 @@ - (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)reso
[connection setVideoOrientation:orientation];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
if (imageSampleBuffer && !error) {
BOOL useFastMode = options[@"fastMode"] && [options[@"fastMode"] boolValue];
if (useFastMode) {
resolve(nil);
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];

UIImage *takenImage = [UIImage imageWithData:imageData];

CGRect frame = [_previewLayer metadataOutputRectOfInterestForRect:self.frame];
CGImageRef takenCGImage = takenImage.CGImage;
size_t width = CGImageGetWidth(takenCGImage);
size_t height = CGImageGetHeight(takenCGImage);
CGRect cropRect = CGRectMake(frame.origin.x * width, frame.origin.y * height, frame.size.width * width, frame.size.height * height);
takenImage = [RNImageUtils cropImage:takenImage toRect:cropRect];
CGSize previewSize;
if (UIInterfaceOrientationIsPortrait([[UIApplication sharedApplication] statusBarOrientation])) {
previewSize = CGSizeMake(self.previewLayer.frame.size.height, self.previewLayer.frame.size.width);
} else {
previewSize = CGSizeMake(self.previewLayer.frame.size.width, self.previewLayer.frame.size.height);
}
CGRect cropRect = CGRectMake(0, 0, CGImageGetWidth(takenCGImage), CGImageGetHeight(takenCGImage));
CGRect croppedSize = AVMakeRectWithAspectRatioInsideRect(previewSize, cropRect);
takenImage = [RNImageUtils cropImage:takenImage toRect:croppedSize];

if ([options[@"mirrorImage"] boolValue]) {
takenImage = [RNImageUtils mirrorImage:takenImage];
Expand Down Expand Up @@ -377,7 +398,11 @@ - (void)takePicture:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)reso
[RNImageUtils updatePhotoMetadata:imageSampleBuffer withAdditionalData:@{ @"Orientation": @(imageRotation) } inResponse:response]; // TODO
}

resolve(response);
if (useFastMode) {
[self onPictureSaved:@{@"data": response, @"id": options[@"id"]}];
} else {
resolve(response);
}
} else {
reject(@"E_IMAGE_CAPTURE_FAILED", @"Image could not be captured", error);
}
Expand Down Expand Up @@ -447,6 +472,16 @@ - (void)stopRecording
[self.movieFileOutput stopRecording];
}

- (void)resumePreview
{
[[self.previewLayer connection] setEnabled:YES];
}

- (void)pausePreview
{
[[self.previewLayer connection] setEnabled:NO];
}

- (void)startSession
{
#if TARGET_IPHONE_SIMULATOR
Expand Down Expand Up @@ -571,10 +606,14 @@ - (void)initializeCaptureSessionInput

#pragma mark - internal

- (void)updateSessionPreset:(NSString *)preset
- (void)updateSessionPreset:(AVCaptureSessionPreset)preset
{
#if !(TARGET_IPHONE_SIMULATOR)
if (preset) {
if (self.isDetectingFaces && [preset isEqual:AVCaptureSessionPresetPhoto]) {
RCTLog(@"AVCaptureSessionPresetPhoto not supported during face detection. Falling back to AVCaptureSessionPresetHigh");
preset = AVCaptureSessionPresetHigh;
}
dispatch_async(self.sessionQueue, ^{
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:preset]) {
Expand Down
109 changes: 90 additions & 19 deletions ios/RN/RNCameraManager.m
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ @implementation RNCameraManager
RCT_EXPORT_VIEW_PROPERTY(onMountError, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onBarCodeRead, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onFacesDetected, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onPictureSaved, RCTDirectEventBlock);

+ (BOOL)requiresMainQueueSetup
{
Expand Down Expand Up @@ -65,13 +66,13 @@ - (NSDictionary *)constantsToExport
},
@"VideoCodec": [[self class] validCodecTypes],
@"BarCodeType" : [[self class] validBarCodeTypes],
@"FaceDetection" : [[self class] faceDetectorConstants]
@"FaceDetection" : [[self class] faceDetectorConstants]
};
}

- (NSArray<NSString *> *)supportedEvents
{
return @[@"onCameraReady", @"onMountError", @"onBarCodeRead", @"onFacesDetected"];
return @[@"onCameraReady", @"onMountError", @"onBarCodeRead", @"onFacesDetected", @"onPictureSaved"];
}

+ (NSDictionary *)validCodecTypes
Expand Down Expand Up @@ -111,6 +112,21 @@ + (NSDictionary *)validBarCodeTypes
};
}

+ (NSDictionary *)pictureSizes
{
return @{
@"3840x2160" : AVCaptureSessionPreset3840x2160,
@"1920x1080" : AVCaptureSessionPreset1920x1080,
@"1280x720" : AVCaptureSessionPreset1280x720,
@"640x480" : AVCaptureSessionPreset640x480,
@"352x288" : AVCaptureSessionPreset352x288,
@"Photo" : AVCaptureSessionPresetPhoto,
@"High" : AVCaptureSessionPresetHigh,
@"Medium" : AVCaptureSessionPresetMedium,
@"Low" : AVCaptureSessionPresetLow
};
}

+ (NSDictionary *)faceDetectorConstants
{
#if __has_include(<GoogleMobileVision/GoogleMobileVision.h>)
Expand Down Expand Up @@ -158,12 +174,20 @@ + (NSDictionary *)faceDetectorConstants

RCT_CUSTOM_VIEW_PROPERTY(whiteBalance, NSInteger, RNCamera)
{
[view setWhiteBalance: [RCTConvert NSInteger:json]];
[view setWhiteBalance:[RCTConvert NSInteger:json]];
[view updateWhiteBalance];
}

RCT_CUSTOM_VIEW_PROPERTY(pictureSize, NSString *, RNCamera)
{
[view setPictureSize:[[self class] pictureSizes][[RCTConvert NSString:json]]];
[view updatePictureSize];
}


RCT_CUSTOM_VIEW_PROPERTY(faceDetectorEnabled, BOOL, RNCamera)
{
view.isDetectingFaces = [RCTConvert BOOL:json];
[view updateFaceDetecting:json];
}

Expand All @@ -185,7 +209,7 @@ + (NSDictionary *)faceDetectorConstants
RCT_CUSTOM_VIEW_PROPERTY(barCodeScannerEnabled, BOOL, RNCamera)
{

view.barCodeReading = [RCTConvert BOOL:json];
view.isReadingBarCodes = [RCTConvert BOOL:json];
[view setupOrDisableBarcodeScanner];
}

Expand All @@ -200,29 +224,37 @@ + (NSDictionary *)faceDetectorConstants
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
#if TARGET_IPHONE_SIMULATOR
NSMutableDictionary *response = [[NSMutableDictionary alloc] init];
float quality = [options[@"quality"] floatValue];
NSString *path = [RNFileSystem generatePathInDirectory:[[RNFileSystem cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
UIImage *generatedPhoto = [RNImageUtils generatePhotoOfSize:CGSizeMake(200, 200)];
NSData *photoData = UIImageJPEGRepresentation(generatedPhoto, quality);
response[@"uri"] = [RNImageUtils writeImage:photoData toPath:path];
response[@"width"] = @(generatedPhoto.size.width);
response[@"height"] = @(generatedPhoto.size.height);
if ([options[@"base64"] boolValue]) {
response[@"base64"] = [photoData base64EncodedStringWithOptions:0];
}
resolve(response);
#else
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
#if TARGET_IPHONE_SIMULATOR
NSMutableDictionary *response = [[NSMutableDictionary alloc] init];
float quality = [options[@"quality"] floatValue];
NSString *path = [RNFileSystem generatePathInDirectory:[[RNFileSystem cacheDirectoryPath] stringByAppendingPathComponent:@"Camera"] withExtension:@".jpg"];
UIImage *generatedPhoto = [RNImageUtils generatePhotoOfSize:CGSizeMake(200, 200)];
BOOL useFastMode = options[@"fastMode"] && [options[@"fastMode"] boolValue];
if (useFastMode) {
resolve(nil);
}
NSData *photoData = UIImageJPEGRepresentation(generatedPhoto, quality);
response[@"uri"] = [RNImageUtils writeImage:photoData toPath:path];
response[@"width"] = @(generatedPhoto.size.width);
response[@"height"] = @(generatedPhoto.size.height);
if ([options[@"base64"] boolValue]) {
response[@"base64"] = [photoData base64EncodedStringWithOptions:0];
}
if (useFastMode) {
[view onPictureSaved:@{@"data": response, @"id": options[@"id"]}];
} else {
resolve(response);
}
#else
[view takePicture:options resolve:resolve reject:reject];
#endif
}
}];
#endif
}

RCT_REMAP_METHOD(record,
Expand All @@ -245,6 +277,36 @@ + (NSDictionary *)faceDetectorConstants
}];
}

RCT_EXPORT_METHOD(resumePreview:(nonnull NSNumber *)reactTag)
{
#if TARGET_IPHONE_SIMULATOR
return;
#endif
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view resumePreview];
}
}];
}

RCT_EXPORT_METHOD(pausePreview:(nonnull NSNumber *)reactTag)
{
#if TARGET_IPHONE_SIMULATOR
return;
#endif
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
RNCamera *view = viewRegistry[reactTag];
if (![view isKindOfClass:[RNCamera class]]) {
RCTLogError(@"Invalid view returned from registry, expecting RNCamera, got: %@", view);
} else {
[view pausePreview];
}
}];
}

RCT_REMAP_METHOD(stopRecording, reactTag:(nonnull NSNumber *)reactTag)
{
[self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary<NSNumber *, RNCamera *> *viewRegistry) {
Expand Down Expand Up @@ -283,5 +345,14 @@ + (NSDictionary *)faceDetectorConstants
}];
}

RCT_REMAP_METHOD(getAvailablePictureSizes,
ratio:(NSString *)ratio
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
{
resolve([[[self class] pictureSizes] allKeys]);
}

@end

0 comments on commit b9fb708

Please sign in to comment.