Skip to content
Permalink
Browse files

quick hack to support capturing depthbuffer on the iPhone X

  • Loading branch information...
ara4n committed Jan 27, 2018
1 parent 09819ec commit c3044670d87c305d8f8ee72751939e281bf5223f
@@ -22,7 +22,7 @@ NS_ASSUME_NONNULL_BEGIN
// The reason for this is because other webrtc objects own cricket::VideoCapturer, which is not
// ref counted. To prevent bad behavior we do not expose this class directly.
@interface RTCAVFoundationVideoCapturerInternal
: NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
: NSObject <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureDepthDataOutputDelegate>

@property(nonatomic, readonly) AVCaptureSession *captureSession;
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
@@ -27,6 +27,7 @@ @implementation RTCAVFoundationVideoCapturerInternal {
AVCaptureDeviceInput *_frontCameraInput;
AVCaptureDeviceInput *_backCameraInput;
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureDepthDataOutput *_depthDataOutput;
// The cricket::VideoCapturer that owns this class. Should never be NULL.
webrtc::AVFoundationVideoCapturer *_capturer;
BOOL _hasRetriedOnFatalError;
@@ -186,7 +187,12 @@ - (void)stop {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
if (_videoDataOutput) {
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
}
// if (_depthDataOutput) {
// [_depthDataOutput setDelegate:nil callbackQueue:nullptr];
// }
[_captureSession stopRunning];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
@@ -263,6 +269,58 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput
RTCLogError(@"Dropped sample buffer.");
}

#pragma mark AVCaptureDepthDataOutputDelegate

- (void)depthDataOutput:(AVCaptureDepthDataOutput *)depthDataOutput
didOutputDepthData:(AVDepthData *)depthData
timestamp:(CMTime)timestamp
connection:(AVCaptureConnection *)connection {

NSParameterAssert(depthDataOutput == _depthDataOutput);
if (!self.hasStarted) {
return;
}

// Assume that rotation metadata, if any, will be tracked by non-depth capture
// where we can use AVCaptureSession devicePositionForSampleBuffer correctly
webrtc::VideoRotation rotation = webrtc::kVideoRotation_0;

OSStatus status;
CMFormatDescriptionRef desc = NULL;
status = CMVideoFormatDescriptionCreateForImageBuffer(NULL, depthData.depthDataMap, &desc);
if (status != noErr) {
RTCLogError(@"CMVideoFormatDescriptionCreateForImageBuffer failed to set: %d", status);
}

CMSampleTimingInfo timing;
timing.duration = kCMTimeInvalid;
timing.presentationTimeStamp = timestamp;
timing.decodeTimeStamp = kCMTimeInvalid;

// Convert our depthData into a SampleBuffer
CMSampleBufferRef sampleBuffer;
status = CMSampleBufferCreateReadyWithImageBuffer(
kCFAllocatorDefault,
depthData.depthDataMap,
desc,
&timing,
&sampleBuffer
);
if (status != noErr) {
RTCLogError(@"CMSampleBufferCreateReadyWithImageBuffer failed to set: %d", status);
}

_capturer->CaptureSampleBuffer(sampleBuffer, rotation);
}

- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output
didDropDepthData:(AVDepthData *)depthData
timestamp:(CMTime)timestamp
connection:(AVCaptureConnection *)connection
reason:(AVCaptureOutputDataDroppedReason)reason {
RTCLogError(@"Dropped depthData buffer.");
}

#pragma mark - AVCaptureSession notifications

- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
@@ -380,13 +438,26 @@ - (BOOL)setupCaptureSession {
#if defined(WEBRTC_IOS)
captureSession.usesApplicationAudioSession = NO;
#endif

/*
// XXX: for now, just capture depth.
// Add the output.
AVCaptureVideoDataOutput *videoDataOutput = [self videoDataOutput];
AVCaptureVideoDataOutput *videoDataOutput = [self getVideoDataOutput];
if (![captureSession canAddOutput:videoDataOutput]) {
RTCLogError(@"Video data output unsupported.");
return NO;
}
[captureSession addOutput:videoDataOutput];
*/

// Add the depth output.
AVCaptureDepthDataOutput *depthDataOutput = [self getDepthDataOutput];
if (![captureSession canAddOutput:depthDataOutput]) {
RTCLogError(@"Depth data output unsupported.");
return NO;
}
[captureSession addOutput:depthDataOutput];

// Get the front and back cameras. If there isn't a front camera
// give up.
@@ -410,7 +481,7 @@ - (BOOL)setupCaptureSession {
return YES;
}

- (AVCaptureVideoDataOutput *)videoDataOutput {
- (AVCaptureVideoDataOutput *)getVideoDataOutput {
if (!_videoDataOutput) {
// Make the capturer output NV12. Ideally we want I420 but that's not
// currently supported on iPhone / iPad.
@@ -427,12 +498,44 @@ - (AVCaptureVideoDataOutput *)videoDataOutput {
return _videoDataOutput;
}

- (AVCaptureDepthDataOutput *)getDepthDataOutput {
if (!_depthDataOutput) {
AVCaptureDepthDataOutput *depthDataOutput = [[AVCaptureDepthDataOutput alloc] init];
// depthDataOutput.videoSettings = @{
// (NSString *)
// // TODO(denicija): Remove this color conversion and use the original capture format directly.
// kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
// };
depthDataOutput.alwaysDiscardsLateDepthData = NO;
depthDataOutput.filteringEnabled = YES;
[depthDataOutput setDelegate:self callbackQueue:self.frameQueue];
_depthDataOutput = depthDataOutput;
}
return _depthDataOutput;
}

- (AVCaptureDevice *)videoCaptureDeviceForPosition:(AVCaptureDevicePosition)position {
AVCaptureDevice *device;
device = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInTrueDepthCamera
mediaType: AVMediaTypeDepthData
position: position];
if (device != nil) {
return device;
}
device = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInDuoCamera
mediaType: AVMediaTypeDepthData
position: position];
if (device != nil) {
return device;
}

/*
for (AVCaptureDevice *captureDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (captureDevice.position == position) {
return captureDevice;
}
}
*/
return nil;
}

@@ -1,17 +1,18 @@
Pod::Spec.new do |s|
s.name = "WebRTC"
s.version = "${FRAMEWORK_VERSION_NUMBER}"
s.version = "99"
s.summary = "WebRTC SDK for iOS"
s.description = <<-DESC
WebRTC is a free, open project that provides browsers and mobile
applications with Real-Time Communications (RTC) capabilities via simple
APIs.
DESC
s.homepage = "http://webrtc.org/"
s.source = { :path => '/Users/matthew/workspace/webrtc/src/out_ios_libs/' }
s.license = { :type => "BSD", :file => "LICENSE" }
s.author = "Google Inc."

s.platform = :ios, "8.0"
s.platform = :ios, "11.1"

s.public_header_files = "WebRTC.framework/Headers/**/*.h"
s.vendored_frameworks = "WebRTC.framework"
@@ -33,8 +33,8 @@
SDK_LIB_NAME = 'librtc_sdk_objc.a'
SDK_FRAMEWORK_NAME = 'WebRTC.framework'

DEFAULT_ARCHS = ENABLED_ARCHS = ['arm64', 'arm', 'x64', 'x86']
IOS_DEPLOYMENT_TARGET = '9.0'
DEFAULT_ARCHS = ENABLED_ARCHS = ['arm64', 'x64', 'x86']
IOS_DEPLOYMENT_TARGET = '11.1'
LIBVPX_BUILD_VP9 = False

sys.path.append(os.path.join(SCRIPT_DIR, '..', 'libs'))

0 comments on commit c304467

Please sign in to comment.
You can’t perform that action at this time.