From e71c396aef5a1d0354cbb963ced05494bbc072aa Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Wed, 18 May 2022 13:04:36 +0800 Subject: [PATCH] Darwin improvements [Mac/iOS] feat: Add RTCYUVHelper for darwin. (#28) Cross-platform `RTCMTLVideoView` for both iOS / macOS (#40) rotationOverride should not be assign (#44) [ObjC] Expose properties / methods required for AV1 codec support (#60) Workaround: Render PixelBuffer in RTCMTLVideoView (#58) Improve iOS/macOS H264 encoder (#70) fix: fix video encoder not resuming correctly upon foregrounding (#75). add PrivacyInfo.xcprivacy to darwin frameworks. (#112) Add NSPrivacyCollectedDataTypes key to xcprivacy file (#114) Thread-safe `RTCInitFieldTrialDictionary` (#116) Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> --- sdk/BUILD.gn | 21 +- sdk/objc/PrivacyInfo.xcprivacy | 28 +++ sdk/objc/api/peerconnection/RTCFieldTrials.mm | 31 ++- .../peerconnection/RTCPeerConnectionFactory.h | 4 + .../RTCPeerConnectionFactory.mm | 17 ++ .../peerconnection/RTCRtpCodecParameters.h | 1 + .../peerconnection/RTCRtpCodecParameters.mm | 1 + .../peerconnection/RTCRtpEncodingParameters.h | 4 + .../RTCRtpEncodingParameters.mm | 7 + .../api/peerconnection/RTCRtpTransceiver.h | 4 + .../api/peerconnection/RTCRtpTransceiver.mm | 16 ++ .../renderer/metal/RTCMTLNSVideoView.m | 122 ---------- .../renderer/metal/RTCMTLVideoView.h | 20 +- .../renderer/metal/RTCMTLVideoView.m | 68 ++++-- .../video_codec/RTCVideoEncoderH264.mm | 230 +++++++++++++----- sdk/objc/helpers/RTCYUVHelper.h | 118 +++++++++ sdk/objc/helpers/RTCYUVHelper.mm | 179 ++++++++++++++ video/video_stream_encoder.cc | 12 +- 18 files changed, 661 insertions(+), 222 deletions(-) create mode 100644 sdk/objc/PrivacyInfo.xcprivacy delete mode 100644 sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m create mode 100644 sdk/objc/helpers/RTCYUVHelper.h create mode 100644 sdk/objc/helpers/RTCYUVHelper.mm diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index d708c54643..04063216cc 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -144,12 +144,15 @@ if (is_ios || is_mac) { "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCDispatcher.m", + "objc/helpers/RTCYUVHelper.h", + "objc/helpers/RTCYUVHelper.mm", "objc/helpers/scoped_cftyperef.h", ] deps = [ ":base_objc", "../rtc_base:checks", + "//third_party/libyuv", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] @@ -617,17 +620,13 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios) { + if (is_ios || is_mac) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", ] } if (is_mac) { - sources += [ - "objc/components/renderer/metal/RTCMTLNSVideoView.h", - "objc/components/renderer/metal/RTCMTLNSVideoView.m", - ] frameworks += [ "AppKit.framework" ] } deps = [ @@ -1287,6 +1286,13 @@ if (is_ios || is_mac) { } } + bundle_data("darwin_privacy_info") { + sources = [ + "objc/PrivacyInfo.xcprivacy", + ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + if (is_ios) { apple_framework_bundle_with_umbrella_header("framework_objc") { info_plist = "objc/Info.plist" @@ -1334,6 +1340,7 @@ if (is_ios || is_mac) { "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", "objc/helpers/UIDevice+RTCDevice.h", "objc/api/peerconnection/RTCAudioDeviceModule.h", "objc/api/peerconnection/RTCIODevice.h", @@ -1417,6 +1424,7 @@ if (is_ios || is_mac) { ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (!build_with_chromium) { deps += [ @@ -1526,6 +1534,7 @@ if (is_ios || is_mac) { "objc/base/RTCYUVPlanarBuffer.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1538,6 +1547,7 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", # Added for Simulcast support "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/api/video_codec/RTCVideoEncoderSimulcast.h", @@ -1558,6 +1568,7 @@ if (is_ios || is_mac) { ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (!build_with_chromium) { deps += [ diff --git a/sdk/objc/PrivacyInfo.xcprivacy b/sdk/objc/PrivacyInfo.xcprivacy new file mode 100644 index 0000000000..7204a67c33 --- /dev/null +++ b/sdk/objc/PrivacyInfo.xcprivacy @@ -0,0 +1,28 @@ + + + + + NSPrivacyCollectedDataTypes + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryFileTimestamp + NSPrivacyAccessedAPITypeReasons + + C617.1 + + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategorySystemBootTime + NSPrivacyAccessedAPITypeReasons + + 35F9.1 + 8FFB.1 + + + + + \ No newline at end of file diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 193da9e4f7..b5a2eca8f0 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -10,47 +10,58 @@ #import "RTCFieldTrials.h" +#import #include - #import "base/RTCLogging.h" #include "system_wrappers/include/field_trial.h" NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; -NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; -NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; -NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey = +NSString *const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; +NSString *const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; +NSString *const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; +NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey = @"WebRTC-Audio-MinimizeResamplingOnMobile"; NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor"; -NSString * const kRTCFieldTrialEnabledValue = @"Enabled"; +NSString *const kRTCFieldTrialEnabledValue = @"Enabled"; // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. static char *gFieldTrialInitString = nullptr; +static os_unfair_lock fieldTrialLock = OS_UNFAIR_LOCK_INIT; void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials) { if (!fieldTrials) { RTCLogWarning(@"No fieldTrials provided."); return; } + // Assemble the keys and values into the field trial string. - // We don't perform any extra format checking. That should be done by the underlying WebRTC calls. NSMutableString *fieldTrialInitString = [NSMutableString string]; for (NSString *key in fieldTrials) { NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]]; [fieldTrialInitString appendString:fieldTrialEntry]; } + size_t len = fieldTrialInitString.length + 1; + + // Locking before modifying global variable + os_unfair_lock_lock(&fieldTrialLock); if (gFieldTrialInitString != nullptr) { delete[] gFieldTrialInitString; + gFieldTrialInitString = nullptr; } + gFieldTrialInitString = new char[len]; - if (![fieldTrialInitString getCString:gFieldTrialInitString - maxLength:len - encoding:NSUTF8StringEncoding]) { + bool success = [fieldTrialInitString getCString:gFieldTrialInitString + maxLength:len + encoding:NSUTF8StringEncoding]; + if (!success) { RTCLogError(@"Failed to convert field trial string."); + os_unfair_lock_unlock(&fieldTrialLock); return; } + webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString); + os_unfair_lock_unlock(&fieldTrialLock); } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 81a43bcf16..80eaaf5297 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -25,6 +25,10 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCVideoTrack); @class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); @class RTC_OBJC_TYPE(RTCAudioDeviceModule); +@class RTC_OBJC_TYPE(RTCRtpCapabilities); + +typedef NS_ENUM(NSInteger, RTCRtpMediaType); + @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); @protocol RTC_OBJC_TYPE diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 36011d045f..eefead6fdd 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -25,6 +25,9 @@ #import "RTCPeerConnection+Private.h" #import "RTCVideoSource+Private.h" #import "RTCVideoTrack+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoEncoderFactory.h" @@ -130,6 +133,20 @@ - (instancetype)init { #endif } +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + + return [[RTCRtpCapabilities alloc] initWithNativeRtpCapabilities: capabilities]; +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTCRtpReceiver nativeMediaTypeForMediaType: mediaType]); + + return [[RTCRtpCapabilities alloc] initWithNativeRtpCapabilities: capabilities]; +} + - (instancetype) initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing encoderFactory:(nullable id)encoderFactory diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 6135223720..4d24d3ccd6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -30,6 +30,7 @@ RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; RTC_EXTERN const NSString *const kRTCVp8CodecName; RTC_EXTERN const NSString *const kRTCVp9CodecName; RTC_EXTERN const NSString *const kRTCH264CodecName; +RTC_EXTERN const NSString *const kRTCAv1CodecName; /** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */ RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index 6201e57b93..42a310cb79 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -32,6 +32,7 @@ const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName); const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); +const NSString * const kRTCAv1CodecName = @(cricket::kAv1CodecName); @implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index 07f6b7a39c..af0c6993bc 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -69,6 +69,10 @@ RTC_OBJC_EXPORT https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */ @property(nonatomic, assign) BOOL adaptiveAudioPacketTime; +/** A case-sensitive identifier of the scalability mode to be used for this stream. + https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters */ +@property(nonatomic, copy, nullable) NSString *scalabilityMode; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index d6087dafb0..aecb88b6f6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -25,6 +25,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize bitratePriority = _bitratePriority; @synthesize networkPriority = _networkPriority; @synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime; +@synthesize scalabilityMode = _scalabilityMode; - (instancetype)init { webrtc::RtpEncodingParameters nativeParameters; @@ -59,6 +60,9 @@ - (instancetype)initWithNativeParameters: if (nativeParameters.ssrc) { _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } + if (nativeParameters.scalability_mode) { + _scalabilityMode = [NSString stringWithUTF8String:nativeParameters.scalability_mode->c_str()]; + } _bitratePriority = nativeParameters.bitrate_priority; _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) priorityFromNativePriority:nativeParameters.network_priority]; @@ -92,6 +96,9 @@ - (instancetype)initWithNativeParameters: if (_ssrc != nil) { parameters.ssrc = absl::optional(_ssrc.unsignedLongValue); } + if (_scalabilityMode != nil) { + parameters.scalability_mode = absl::optional(std::string([_scalabilityMode UTF8String])); + } parameters.bitrate_priority = _bitratePriority; parameters.network_priority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index 1701f680a4..fca088be7e 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -14,6 +14,8 @@ #import "RTCRtpReceiver.h" #import "RTCRtpSender.h" +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + NS_ASSUME_NONNULL_BEGIN extern NSString *const kRTCRtpTransceiverErrorDomain; @@ -105,6 +107,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, readonly) RTCRtpTransceiverDirection direction; +@property(nonatomic, copy) NSArray *codecPreferences; + /** The currentDirection attribute indicates the current direction negotiated * for this transceiver. If this transceiver has never been represented in an * offer/answer exchange, or if the transceiver is stopped, the value is not diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index eb0ad96738..fe9fd623f8 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -15,6 +15,8 @@ #import "RTCRtpParameters+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" +#import "RTCRtpCodecCapability.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" @@ -68,6 +70,20 @@ - (NSString *)mid { } } +- (NSArray *)codecPreferences { + + NSMutableArray *result = [NSMutableArray array]; + + std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); + + for (auto & element : capabilities) { + RTCRtpCodecCapability *object = [[RTCRtpCodecCapability alloc] initWithNativeRtpCodecCapability: element]; + [result addObject: object]; + } + + return result; +} + @synthesize sender = _sender; @synthesize receiver = _receiver; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m deleted file mode 100644 index 625fb1caa7..0000000000 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMTLNSVideoView.h" - -#import -#import - -#import "base/RTCVideoFrame.h" - -#import "RTCMTLI420Renderer.h" - -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) -() @property(nonatomic) id renderer; -@property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@end - -@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { - id _renderer; -} - -@synthesize delegate = _delegate; -@synthesize renderer = _renderer; -@synthesize metalView = _metalView; -@synthesize videoFrame = _videoFrame; - -- (instancetype)initWithFrame:(CGRect)frameRect { - self = [super initWithFrame:frameRect]; - if (self) { - [self configure]; - } - return self; -} - -- (instancetype)initWithCoder:(NSCoder *)aCoder { - self = [super initWithCoder:aCoder]; - if (self) { - [self configure]; - } - return self; -} - -#pragma mark - Private - -+ (BOOL)isMetalAvailable { - return [MTLCopyAllDevices() count] > 0; -} - -- (void)configure { - if ([[self class] isMetalAvailable]) { - _metalView = [[MTKView alloc] initWithFrame:self.bounds]; - [self addSubview:_metalView]; - _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; - _metalView.translatesAutoresizingMaskIntoConstraints = NO; - _metalView.framebufferOnly = YES; - _metalView.delegate = self; - - _renderer = [[RTCMTLI420Renderer alloc] init]; - if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) { - _renderer = nil; - }; - } -} - -- (void)updateConstraints { - NSDictionary *views = NSDictionaryOfVariableBindings(_metalView); - - NSArray *constraintsHorizontal = - [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsHorizontal]; - - NSArray *constraintsVertical = - [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsVertical]; - [super updateConstraints]; -} - -#pragma mark - MTKViewDelegate methods -- (void)drawInMTKView:(nonnull MTKView *)view { - if (self.videoFrame == nil) { - return; - } - if (view == self.metalView) { - [_renderer drawFrame:self.videoFrame]; - } -} - -- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { -} - -#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - -- (void)setSize:(CGSize)size { - _metalView.drawableSize = size; - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate videoView:self didChangeVideoSize:size]; - }); - [_metalView draw]; -} - -- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - if (frame == nil) { - return; - } - self.videoFrame = [frame newI420VideoFrame]; -} - -@end diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index 3320d12076..bed02ffa92 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -10,6 +10,10 @@ #import +#if TARGET_OS_OSX +#import +#endif + #import "RTCMacros.h" #import "RTCVideoFrame.h" #import "RTCVideoRenderer.h" @@ -22,14 +26,26 @@ NS_ASSUME_NONNULL_BEGIN * It has id property that renders video frames in the view's * bounds using Metal. */ +#if TARGET_OS_IPHONE NS_CLASS_AVAILABLE_IOS(9) +#elif TARGET_OS_OSX +NS_AVAILABLE_MAC(10.11) +#endif RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : + +#if TARGET_OS_IPHONE + UIView +#elif TARGET_OS_OSX + NSView +#endif @property(nonatomic, weak) id delegate; +#if TARGET_OS_IPHONE @property(nonatomic) UIViewContentMode videoContentMode; +#endif /** @abstract Enables/disables rendering. */ @@ -39,6 +55,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, nullable) NSValue* rotationOverride; ++ (BOOL)isMetalAvailable; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index c5d9e4385f..8b2ec1aaa3 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -51,6 +51,14 @@ @implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize lastFrameTimeNs = _lastFrameTimeNs; @synthesize rotationOverride = _rotationOverride; ++ (BOOL)isMetalAvailable { +#if TARGET_OS_IPHONE + return MTLCreateSystemDefaultDevice() != nil; +#elif TARGET_OS_OSX + return [MTLCopyAllDevices() count] > 0; +#endif +} + - (instancetype)initWithFrame:(CGRect)frameRect { self = [super initWithFrame:frameRect]; if (self) { @@ -75,6 +83,7 @@ - (void)setEnabled:(BOOL)enabled { self.metalView.paused = !enabled; } +#if TARGET_OS_IPHONE - (UIViewContentMode)videoContentMode { return self.metalView.contentMode; } @@ -82,13 +91,10 @@ - (UIViewContentMode)videoContentMode { - (void)setVideoContentMode:(UIViewContentMode)mode { self.metalView.contentMode = mode; } +#endif #pragma mark - Private -+ (BOOL)isMetalAvailable { - return MTLCreateSystemDefaultDevice() != nil; -} - + (MTKView *)createMetalView:(CGRect)frame { return [[MTKViewClass alloc] initWithFrame:frame]; } @@ -102,7 +108,7 @@ + (RTCMTLI420Renderer *)createI420Renderer { } + (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRenderer alloc] init]; + return [[RTCMTLRGBRendererClass alloc] init]; } - (void)configure { @@ -111,19 +117,24 @@ - (void)configure { self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; +#if TARGET_OS_IPHONE self.metalView.contentMode = UIViewContentModeScaleAspectFill; +#elif TARGET_OS_OSX + self.metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; +#endif + [self addSubview:self.metalView]; self.videoFrameSize = CGSizeZero; } +#if TARGET_OS_IPHONE - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - self.metalView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + self.metalView.multipleTouchEnabled = multipleTouchEnabled; } +#endif -- (void)layoutSubviews { - [super layoutSubviews]; - +- (void)performLayout { CGRect bounds = self.bounds; self.metalView.frame = bounds; if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { @@ -203,10 +214,10 @@ - (void)setRotationOverride:(NSValue *)rotationOverride { [self setNeedsLayout]; } -- (RTCVideoRotation)frameRotation { +- (RTCVideoRotation)videoRotation { if (self.rotationOverride) { RTCVideoRotation rotation; - if (@available(iOS 11, *)) { + if (@available(iOS 11, macos 10.13, *)) { [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; } else { [self.rotationOverride getValue:&rotation]; @@ -220,10 +231,10 @@ - (RTCVideoRotation)frameRotation { - (CGSize)drawableSize { // Flip width/height if the rotations are not the same. CGSize videoFrameSize = self.videoFrameSize; - RTCVideoRotation frameRotation = [self frameRotation]; + RTCVideoRotation videoRotation = [self videoRotation]; BOOL useLandscape = - (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180); + (videoRotation == RTCVideoRotation_0) || (videoRotation == RTCVideoRotation_180); BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || (self.videoFrame.rotation == RTCVideoRotation_180); @@ -259,7 +270,34 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } - self.videoFrame = frame; + + // Workaround to support RTCCVPixelBuffer rendering. + // RTCMTLRGBRenderer seems to be broken at the moment. + BOOL useI420 = NO; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); + useI420 = pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB; + } + self.videoFrame = useI420 ? [frame newI420VideoFrame] : frame; +} + +#pragma mark - Cross platform + +#if TARGET_OS_IPHONE +- (void)layoutSubviews { + [super layoutSubviews]; + [self performLayout]; +} +#elif TARGET_OS_OSX +- (void)layout { + [super layout]; + [self performLayout]; +} + +- (void)setNeedsLayout { + self.needsLayout = YES; } +#endif @end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 2160d79ae5..d3dd33aef6 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -54,14 +54,42 @@ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags // The ratio between kVTCompressionPropertyKey_DataRateLimits and // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher // than the average bit rate to avoid undershooting the target. -const float kLimitToAverageBitRateFactor = 1.5f; +const float kLimitToAverageBitRateFactor = 10.0f; // These thresholds deviate from the default h264 QP thresholds, as they // have been found to work better on devices that support VideoToolbox const int kLowH264QpThreshold = 28; const int kHighH264QpThreshold = 39; +const int kBitsPerByte = 8; const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; +typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { + Variable = 0, + Constant = 1, +}; + +NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTCVideoEncodeMode mode) { + switch (mode) { + case Variable: { + // 5 seconds should be an okay interval for VBR to enforce the long-term + // limit. + float avgInterval = 5.0; + uint32_t avgBytesPerSecond = computedBitrateBps / kBitsPerByte * avgInterval; + // And the peak bitrate is measured per-second in a way similar to CBR. + float peakInterval = 1.0; + uint32_t peakBytesPerSecond = + computedBitrateBps * kLimitToAverageBitRateFactor / kBitsPerByte; + return @[ @(peakBytesPerSecond), @(peakInterval), @(avgBytesPerSecond), @(avgInterval) ]; + } + case Constant: { + // CBR should be enforces with granularity of a second. + float targetInterval = 1.0; + int32_t targetBitrate = computedBitrateBps / kBitsPerByte; + return @[ @(targetBitrate), @(targetInterval) ]; + } + } +} + // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { @@ -180,10 +208,13 @@ void compressionOutputCallback(void *encoder, // no specific VideoToolbox profile for the specified level, AutoLevel will be // returned. The user must initialize the encoder with a resolution and // framerate conforming to the selected H264 level regardless. -CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) { +CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id, bool screenSharing) { switch (profile_level_id.profile) { case webrtc::H264Profile::kProfileConstrainedBaseline: case webrtc::H264Profile::kProfileBaseline: + if (screenSharing) { + return kVTProfileLevel_H264_Baseline_AutoLevel; + } switch (profile_level_id.level) { case webrtc::H264Level::kLevel3: return kVTProfileLevel_H264_Baseline_3_0; @@ -319,8 +350,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; - std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; + uint32_t _targetFrameRate; uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; @@ -330,10 +361,17 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { int32_t _width; int32_t _height; VTCompressionSessionRef _compressionSession; - RTCVideoCodecMode _mode; + CVPixelBufferPoolRef _pixelBufferPool; + RTCVideoCodecMode _codecMode; + unsigned int _maxQP; + unsigned int _minBitrate; + unsigned int _maxBitrate; + RTCVideoEncodeMode _encodeMode; webrtc::H264BitstreamParser _h264BitstreamParser; std::vector _frameScaleBuffer; + + CMTime _previousPresentationTimeStamp; } // .5 is set as a mininum to prevent overcompensating for large temporary @@ -346,12 +384,14 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { - (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; - _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); _packetizationMode = RTCH264PacketizationModeNonInterleaved; _profile_level_id = webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); + _previousPresentationTimeStamp = kCMTimeZero; RTC_DCHECK(_profile_level_id); - RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id)); + RTC_LOG(LS_INFO) << "Using profile " + << CFStringToString(ExtractProfile( + *_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); } return self; @@ -368,7 +408,12 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s _width = settings.width; _height = settings.height; - _mode = settings.mode; + _codecMode = settings.mode; + _maxQP = settings.qpMax; + + _encodeMode = Variable; // Always variable mode for now + _minBitrate = settings.minBitrate * 1000; // minBitrate is in kbps. + _maxBitrate = settings.maxBitrate * 1000; // maxBitrate is in kbps. uint32_t aligned_width = (((_width + 15) >> 4) << 4); uint32_t aligned_height = (((_height + 15) >> 4) << 4); @@ -376,9 +421,15 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s (aligned_width * aligned_height)); // We can only set average bitrate on the HW encoder. - _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); - _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + if (_encodeMode == Constant) { + _targetBitrateBps = _maxBitrate; + } else { + _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. + } + + _targetFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + _encoderBitrateBps = 0; + _encoderFrameRate = 0; if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate << " is larger than the " @@ -397,8 +448,15 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame if (!_callback || !_compressionSession) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - BOOL isKeyframeRequired = NO; + CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); + if (CMTimeCompare(presentationTimeStamp, _previousPresentationTimeStamp) == 0) { + // Same PTS + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + _previousPresentationTimeStamp = presentationTimeStamp; + + BOOL isKeyframeRequired = NO; // Get a pixel buffer from the pool and copy frame data over. if ([self resetCompressionSessionIfNeededWithFrame:frame]) { isKeyframeRequired = YES; @@ -425,8 +483,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame int dstWidth = CVPixelBufferGetWidth(pixelBuffer); int dstHeight = CVPixelBufferGetHeight(pixelBuffer); if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) { - int size = - [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight]; + int size = [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth + height:dstHeight]; _frameScaleBuffer.resize(size); } else { _frameScaleBuffer.clear(); @@ -466,7 +524,6 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame } } - CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); CFDictionaryRef frameProperties = nullptr; if (isKeyframeRequired) { CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; @@ -484,8 +541,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame frame.rotation)); encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; - // Update the bitrate if needed. - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate]; + // Update encoder bitrate or frameRate if needed. + [self updateEncoderBitrateAndFrameRate]; OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, pixelBuffer, @@ -526,14 +583,19 @@ - (void)setCallback:(RTCVideoEncoderCallback)callback { } - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate { - _targetBitrateBps = 1000 * bitrateKbit; - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); + // set target bitrate bps + _targetBitrateBps = bitrateKbit * 1000; + + RTC_LOG(LS_INFO) << "setBitrateKBit: " << bitrateKbit << " targetBps: " << _targetBitrateBps + << " frameRate: " << framerate; + if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the " << "maximal allowed frame rate " << _maxAllowedFrameRate << "."; } - framerate = MIN(framerate, _maxAllowedFrameRate); - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate]; + + _targetFrameRate = MIN(framerate, _maxAllowedFrameRate); + return WEBRTC_VIDEO_CODEC_OK; } @@ -585,7 +647,8 @@ - (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * CVPixelBufferPoolRef pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); if (!pixelBufferPool) { - return NO; + [self resetCompressionSessionWithPixelFormat:framePixelFormat]; + return YES; } NSDictionary *poolAttributes = @@ -631,14 +694,19 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat), }; - NSDictionary *encoder_specs; + NSMutableDictionary *encoder_specs; #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // Currently hw accl is supported above 360p on mac, below 360p // the compression session will be created with hw accl disabled. - encoder_specs = @{ + encoder_specs = [@{ (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES), - }; - + } mutableCopy]; + // Enable low-latency video encoding + if (@available(iOS 14.5, macOS 11.3, *)) { + [encoder_specs addEntriesFromDictionary:@{ + (NSString *)kVTVideoEncoderSpecification_EnableLowLatencyRateControl : @(YES), + }]; + } #endif OSStatus status = VTCompressionSessionCreate( nullptr, // use default allocator @@ -675,11 +743,30 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { - (void)configureCompressionSession { RTC_DCHECK(_compressionSession); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_ProfileLevel, - ExtractProfile(*_profile_level_id)); + // Sacrifice encoding speed over quality when necessary + if (@available(iOS 14.0, macOS 11.0, *)) { + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_PrioritizeEncodingSpeedOverQuality, true); + } + // Set maximum QP for screen sharing mode, range must be within 1 to 51 + // https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_maxallowedframeqp + if (@available(iOS 15.0, macOS 12.0, *)) { + // Only enable for screen sharing and let VideoToolbox do the optimizing as much as possible. + if (_codecMode == RTCVideoCodecModeScreensharing) { + RTC_LOG(LS_INFO) << "Configuring VideoToolbox to use maxQP: " << kHighH264QpThreshold + << " mode: " << _codecMode; + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_MaxAllowedFrameQP, kHighH264QpThreshold); + } + } + SetVTSessionProperty( + _compressionSession, + kVTCompressionPropertyKey_ProfileLevel, + ExtractProfile(*_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); - [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate]; + + // [self updateEncoderBitrateAndFrameRate]; + // TODO(tkchin): Look at entropy mode and colorspace matrices. // TODO(tkchin): Investigate to see if there's any way to make this work. // May need it to interop with Android. Currently this call just fails. @@ -706,49 +793,59 @@ - (NSString *)implementationName { return @"VideoToolbox"; } -- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) { - [self setEncoderBitrateBps:bitrateBps frameRate:frameRate]; +- (void)updateEncoderBitrateAndFrameRate { + // If no compression session simply return + if (!_compressionSession) { + return; } -} + // Initial status + OSStatus status = noErr; -- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_compressionSession) { - SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps); + uint32_t computedBitrateBps = _targetBitrateBps; - // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. - if (_maxAllowedFrameRate > 0) { - SetVTSessionProperty( - _compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate); - } + // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. + uint32_t computedFrameRate = _maxAllowedFrameRate > 0 ? _targetFrameRate : 0; - // TODO(tkchin): Add a helper method to set array value. - int64_t dataLimitBytesPerSecondValue = - static_cast(bitrateBps * kLimitToAverageBitRateFactor / 8); - CFNumberRef bytesPerSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue); - int64_t oneSecondValue = 1; - CFNumberRef oneSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue); - const void *nums[2] = {bytesPerSecond, oneSecond}; - CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks); - OSStatus status = VTSessionSetProperty( - _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits); - if (bytesPerSecond) { - CFRelease(bytesPerSecond); - } - if (oneSecond) { - CFRelease(oneSecond); + // Set frame rate + if (computedFrameRate != _encoderFrameRate) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_ExpectedFrameRate, + (__bridge CFTypeRef) @(computedFrameRate)); + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to set frame rate: " << computedFrameRate + << " error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder frame rate: " << computedFrameRate; } - if (dataRateLimits) { - CFRelease(dataRateLimits); + _encoderFrameRate = computedFrameRate; + } + + // Set bitrate + if (computedBitrateBps != _encoderBitrateBps) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_AverageBitRate, + (__bridge CFTypeRef) @(computedBitrateBps)); + + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to update encoder bitrate: " << computedBitrateBps + << "error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder bitrate: " << computedBitrateBps; } + + status = VTSessionSetProperty( + _compressionSession, + kVTCompressionPropertyKey_DataRateLimits, + (__bridge CFArrayRef)CreateRateLimitArray(computedBitrateBps, _encodeMode)); if (status != noErr) { - RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status; + RTC_LOG(LS_ERROR) << "Failed to update encoder data rate limits"; + } else { + RTC_LOG(LS_INFO) << "Did update encoder data rate limits"; } - _encoderBitrateBps = bitrateBps; - _encoderFrameRate = frameRate; + _encoderBitrateBps = computedBitrateBps; } } @@ -804,8 +901,9 @@ - (void)frameWasEncoded:(OSStatus)status frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; frame.rotation = rotation; - frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + frame.contentType = (_codecMode == RTCVideoCodecModeScreensharing) ? + RTCVideoContentTypeScreenshare : + RTCVideoContentTypeUnspecified; frame.flags = webrtc::VideoSendTiming::kInvalid; _h264BitstreamParser.ParseBitstream(*buffer); @@ -816,7 +914,6 @@ - (void)frameWasEncoded:(OSStatus)status RTC_LOG(LS_ERROR) << "Encode callback failed"; return; } - _bitrateAdjuster->Update(frame.buffer.length); } - (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { @@ -826,3 +923,4 @@ - (void)frameWasEncoded:(OSStatus)status } @end + diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h new file mode 100644 index 0000000000..2e6309c034 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -0,0 +1,118 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMacros.h" +#import "RTCVideoFrame.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCYUVHelper) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + width:(int)height + mode:(RTCVideoRotation)mode; + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height; + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height; + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height; + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height; + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height; + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height; + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height; + +@end diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm new file mode 100644 index 0000000000..3f610ff990 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -0,0 +1,179 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCYUVHelper.h" + +#include "third_party/libyuv/include/libyuv.h" + +@implementation RTC_OBJC_TYPE (RTCYUVHelper) + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + width:(int)height + mode:(RTCVideoRotation)mode { + libyuv::I420Rotate(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstU, + dstStrideU, + dstV, + dstStrideV, + width, + height, + (libyuv::RotationMode)mode); +} + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height { + return libyuv::I420ToNV12(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + width:(int)height { + return libyuv::I420ToNV21(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height { + return libyuv::I420ToARGB( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstARGB, dstStrideARGB, width, height); +} + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height { + return libyuv::I420ToBGRA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstBGRA, dstStrideBGRA, width, height); +} + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height { + return libyuv::I420ToABGR( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstABGR, dstStrideABGR, width, height); +} + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height { + return libyuv::I420ToRGBA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstRGBA, dstStrideRGBA, width, height); +} + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height { + return libyuv::I420ToRGB24(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstRGB24, + dstStrideRGB24, + width, + height); +} + +@end diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 5ed5110c96..dc0d34e720 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -2030,9 +2030,15 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, was_encode_called_since_last_initialization_ = true; if (encode_status < 0) { - RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " - << encoder_config_.video_format.ToString(); - RequestEncoderSwitch(); + if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { + RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); + } else { + RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " + << encode_status; + } + return; }