Skip to content
Permalink
Browse files
Add support for AVC H264 WebCodecsVideoEncoder
https://bugs.webkit.org/show_bug.cgi?id=246150
rdar://problem/100851081

Reviewed by Eric Carlson.

Add support for AVC encoder in libwebrtc.
Add support at WebKit layer by passing a boolean to set annexB or not, and a callback to get the description in case of AVC.
Add support at WebCore level to provide the metadata in the output callback when needed.

* LayoutTests/imported/w3c/web-platform-tests/webcodecs/full-cycle-test.https.any_h264_avc-expected.txt:
* LayoutTests/imported/w3c/web-platform-tests/webcodecs/reconfiguring-encoder.https.any.worker_h264_avc-expected.txt:
* LayoutTests/imported/w3c/web-platform-tests/webcodecs/reconfiguring-encoder.https.any_h264_avc-expected.txt:
* Source/ThirdParty/libwebrtc/Configurations/libwebrtc.iOS.exp:
* Source/ThirdParty/libwebrtc/Configurations/libwebrtc.iOSsim.exp:
* Source/ThirdParty/libwebrtc/Configurations/libwebrtc.mac.exp:
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitEncoder.h:
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/WebKit/WebKitEncoder.mm:
(-[WK_RTCLocalVideoH264H265Encoder setUseAnnexB:]):
(-[WK_RTCLocalVideoH264H265Encoder setDescriptionCallback:]):
(webrtc::createLocalEncoder):
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/base/RTCVideoEncoder.h:
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.h:
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm:
(-[RTCVideoEncoderH264 initWithCodecInfo:]):
(-[RTCVideoEncoderH264 setUseAnnexB:]):
(-[RTCVideoEncoderH264 setDescriptionCallback:]):
(-[RTCVideoEncoderH264 frameWasEncoded🎏sampleBuffer:codecSpecificInfo:width:height:renderTimeMs:timestamp:rotation:isKeyFrameRequired:]):
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/RTCVideoEncoderH265.h:
* Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/RTCVideoEncoderH265.mm:
(-[RTCVideoEncoderH265 initWithCodecInfo:]):
(-[RTCVideoEncoderH265 setUseAnnexB:]):
(-[RTCVideoEncoderH265 setDescriptionCallback:]):
(-[RTCVideoEncoderH265 frameWasEncoded🎏sampleBuffer:width:height:renderTimeMs:timestamp:rotation:]):
* Source/WebCore/Modules/webcodecs/WebCodecsVideoEncoder.cpp:
(WebCore::WebCodecsVideoEncoder::configure):
(WebCore::WebCodecsVideoEncoder::createEncodedChunkMetadata):
* Source/WebCore/Modules/webcodecs/WebCodecsVideoEncoder.h:
* Source/WebCore/platform/LibWebRTCVPXVideoEncoder.cpp:
(WebCore::LibWebRTCVPXVideoEncoder::LibWebRTCVPXVideoEncoder):
* Source/WebCore/platform/LibWebRTCVPXVideoEncoder.h:
* Source/WebCore/platform/VideoEncoder.cpp:
(WebCore::VideoEncoder::create):
(WebCore::VideoEncoder::createLocalEncoder):
* Source/WebCore/platform/VideoEncoder.h:
* Source/WebKit/Configurations/WebKit.xcconfig:
* Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.h:
* Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.messages.in:
* Source/WebKit/GPUProcess/webrtc/LibWebRTCCodecsProxy.mm:
(WebKit::LibWebRTCCodecsProxy::createEncoder):
* Source/WebKit/WebProcess/GPU/media/RemoteVideoCodecFactory.cpp:
(WebKit::RemoteVideoEncoderCallbacks::create):
(WebKit::RemoteVideoCodecFactory::createEncoder):
(WebKit::RemoteVideoDecoder::RemoteVideoDecoder): Deleted.
(WebKit::RemoteVideoDecoder::~RemoteVideoDecoder): Deleted.
(WebKit::RemoteVideoDecoder::decode): Deleted.
(WebKit::RemoteVideoDecoder::flush): Deleted.
(WebKit::RemoteVideoDecoder::reset): Deleted.
(WebKit::RemoteVideoDecoder::close): Deleted.
(WebKit::RemoteVideoDecoderCallbacks::RemoteVideoDecoderCallbacks): Deleted.
(WebKit::RemoteVideoDecoderCallbacks::notifyVideoFrame): Deleted.
(WebKit::RemoteVideoEncoder::RemoteVideoEncoder): Deleted.
(WebKit::RemoteVideoEncoder::~RemoteVideoEncoder): Deleted.
(WebKit::RemoteVideoEncoder::encode): Deleted.
(WebKit::RemoteVideoEncoder::flush): Deleted.
(WebKit::RemoteVideoEncoder::reset): Deleted.
(WebKit::RemoteVideoEncoder::close): Deleted.
(WebKit::RemoteVideoEncoderCallbacks::RemoteVideoEncoderCallbacks): Deleted.
(WebKit::RemoteVideoEncoderCallbacks::notifyEncodedChunk): Deleted.
* Source/WebKit/WebProcess/GPU/media/RemoteVideoCodecFactory.h:
* Source/WebKit/WebProcess/GPU/webrtc/LibWebRTCCodecs.cpp:
(WebKit::LibWebRTCCodecs::createEncoder):
(WebKit::LibWebRTCCodecs::createEncoderAndWaitUntilReady):
(WebKit::LibWebRTCCodecs::createEncoderInternal):
(WebKit::LibWebRTCCodecs::registerEncoderDescriptionCallback):
(WebKit::LibWebRTCCodecs::setEncodingDescription):
(WebKit::LibWebRTCCodecs::gpuProcessConnectionDidClose):
* Source/WebKit/WebProcess/GPU/webrtc/LibWebRTCCodecs.h:
* Source/WebKit/WebProcess/GPU/webrtc/LibWebRTCCodecs.messages.in:

Canonical link: https://commits.webkit.org/255430@main
  • Loading branch information
youennf committed Oct 12, 2022
1 parent 13105f5 commit a591b2669d7568d05194a2a2f3d419c855a02a68
Show file tree
Hide file tree
Showing 27 changed files with 253 additions and 61 deletions.
@@ -1,4 +1,3 @@
CONSOLE MESSAGE: Error: assert_unreached: H264 AVC format is not yet supported Reached unreachable code

FAIL Encoding and decoding cycle promise_test: Unhandled rejection with value: object "ReferenceError: Can't find variable: OffscreenCanvas"

@@ -1,3 +1,3 @@

FAIL Reconfiguring encoder promise_test: Unhandled rejection with value: object "InvalidStateError: VideoEncoder is not configured"
FAIL Reconfiguring encoder promise_test: Unhandled rejection with value: object "ReferenceError: Can't find variable: OffscreenCanvas"

@@ -1,4 +1,3 @@
CONSOLE MESSAGE: Error: assert_unreached: H264 AVC format is not yet supported Reached unreachable code

FAIL Reconfiguring encoder promise_test: Unhandled rejection with value: object "InvalidStateError: VideoEncoder is not configured"
FAIL Reconfiguring encoder promise_test: Unhandled rejection with value: object "ReferenceError: Can't find variable: OffscreenCanvas"

@@ -215,7 +215,7 @@ __ZN6webrtc20setLocalEncoderRatesEPvjj
__ZN6webrtc25setLocalEncoderLowLatencyEPvb
__ZN6webrtc22initializeLocalEncoderEPvttjjjj
__ZN6webrtc14SdpVideoFormatC1ERKNSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEERKNS1_3mapIS7_S7_NS1_4lessIS7_EENS5_INS1_4pairIS8_S7_EEEEEE
__ZN6webrtc18createLocalEncoderERKNS_14SdpVideoFormatEU13block_pointerFvPKhmRKNS_22WebKitEncodedFrameInfoEE
__ZN6webrtc18createLocalEncoderERKNS_14SdpVideoFormatEbU13block_pointerFvPKhmRKNS_22WebKitEncodedFrameInfoEEU13block_pointerFvS4_mE
__ZN6webrtc14SdpVideoFormatD1Ev
__ZN6webrtc19releaseLocalEncoderEPv
__ZN6webrtc23encodeLocalEncoderFrameEPvP10__CVBufferxjNS_13VideoRotationEb
@@ -215,7 +215,7 @@ __ZN6webrtc20setLocalEncoderRatesEPvjj
__ZN6webrtc25setLocalEncoderLowLatencyEPvb
__ZN6webrtc22initializeLocalEncoderEPvttjjjj
__ZN6webrtc14SdpVideoFormatC1ERKNSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEERKNS1_3mapIS7_S7_NS1_4lessIS7_EENS5_INS1_4pairIS8_S7_EEEEEE
__ZN6webrtc18createLocalEncoderERKNS_14SdpVideoFormatEU13block_pointerFvPKhmRKNS_22WebKitEncodedFrameInfoEE
__ZN6webrtc18createLocalEncoderERKNS_14SdpVideoFormatEbU13block_pointerFvPKhmRKNS_22WebKitEncodedFrameInfoEEU13block_pointerFvS4_mE
__ZN6webrtc14SdpVideoFormatD1Ev
__ZN6webrtc19releaseLocalEncoderEPv
__ZN6webrtc23encodeLocalEncoderFrameEPvP10__CVBufferxjNS_13VideoRotationEb
@@ -215,7 +215,7 @@ __ZN6webrtc20setLocalEncoderRatesEPvjj
__ZN6webrtc25setLocalEncoderLowLatencyEPvb
__ZN6webrtc22initializeLocalEncoderEPvttjjjj
__ZN6webrtc14SdpVideoFormatC1ERKNSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEERKNS1_3mapIS7_S7_NS1_4lessIS7_EENS5_INS1_4pairIS8_S7_EEEEEE
__ZN6webrtc18createLocalEncoderERKNS_14SdpVideoFormatEU13block_pointerFvPKhmRKNS_22WebKitEncodedFrameInfoEE
__ZN6webrtc18createLocalEncoderERKNS_14SdpVideoFormatEbU13block_pointerFvPKhmRKNS_22WebKitEncodedFrameInfoEEU13block_pointerFvS4_mE
__ZN6webrtc14SdpVideoFormatD1Ev
__ZN6webrtc19releaseLocalEncoderEPv
__ZN6webrtc23encodeLocalEncoderFrameEPvP10__CVBufferxjNS_13VideoRotationEb
@@ -74,7 +74,8 @@ struct WebKitEncodedFrameInfo {

using LocalEncoder = void*;
using LocalEncoderCallback = void (^)(const uint8_t* buffer, size_t size, const webrtc::WebKitEncodedFrameInfo&);
void* createLocalEncoder(const webrtc::SdpVideoFormat&, LocalEncoderCallback);
using LocalEncoderDescriptionCallback = void (^)(const uint8_t* buffer, size_t size);
void* createLocalEncoder(const webrtc::SdpVideoFormat&, bool useAnnexB, LocalEncoderCallback, LocalEncoderDescriptionCallback);
void releaseLocalEncoder(LocalEncoder);
void initializeLocalEncoder(LocalEncoder, uint16_t width, uint16_t height, unsigned int startBitrate, unsigned int maxBitrate, unsigned int minBitrate, uint32_t maxFramerate);
void encodeLocalEncoderFrame(LocalEncoder, CVPixelBufferRef, int64_t timeStampNs, uint32_t timeStamp, webrtc::VideoRotation, bool isKeyframeRequired);
@@ -48,6 +48,8 @@ - (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings numberO
- (NSInteger)encode:(RTCVideoFrame *)frame codecSpecificInfo:(nullable id<RTCCodecSpecificInfo>)info frameTypes:(NSArray<NSNumber *> *)frameTypes;
- (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate;
- (void)setLowLatency:(bool)lowLatencyEnabled;
- (void)setUseAnnexB:(bool)useAnnexB;
- (void)setDescriptionCallback:(RTCVideoEncoderDescriptionCallback)callback;
@end

@implementation WK_RTCLocalVideoH264H265Encoder {
@@ -105,6 +107,22 @@ - (void)setLowLatency:(bool)lowLatencyEnabled {
if (m_h264Encoder)
[m_h264Encoder setH264LowLatencyEncoderEnabled:lowLatencyEnabled];
}

- (void)setUseAnnexB:(bool)useAnnexB {
if (m_h264Encoder) {
[m_h264Encoder setUseAnnexB:useAnnexB];
return;
}
[m_h265Encoder setUseAnnexB:useAnnexB];
}

- (void)setDescriptionCallback:(RTCVideoEncoderDescriptionCallback)callback {
if (m_h264Encoder) {
[m_h264Encoder setDescriptionCallback:callback];
return;
}
[m_h265Encoder setDescriptionCallback:callback];
}
@end

namespace webrtc {
@@ -311,7 +329,7 @@ void encoderVideoTaskComplete(void* callback, webrtc::VideoCodecType codecType,
static_cast<EncodedImageCallback*>(callback)->OnEncodedImage(encodedImage, &codecSpecificInfo);
}

void* createLocalEncoder(const webrtc::SdpVideoFormat& format, LocalEncoderCallback callback)
void* createLocalEncoder(const webrtc::SdpVideoFormat& format, bool useAnnexB, LocalEncoderCallback frameCallback, LocalEncoderDescriptionCallback descriptionCallback)
{
auto *codecInfo = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat: format];
auto *encoder = [[WK_RTCLocalVideoH264H265Encoder alloc] initWithCodecInfo:codecInfo];
@@ -331,12 +349,14 @@ void encoderVideoTaskComplete(void* callback, webrtc::VideoCodecType codecType,
info.qp = encodedImage.qp_;
info.timing = encodedImage.timing_;

callback(encodedImage.data(), encodedImage.size(), info);
frameCallback(encodedImage.data(), encodedImage.size(), info);
return YES;
}];

return (__bridge_retained void*)encoder;
[encoder setUseAnnexB:useAnnexB];
[encoder setDescriptionCallback:descriptionCallback];

return (__bridge_retained void*)encoder;
}

void releaseLocalEncoder(LocalEncoder localEncoder)
@@ -25,6 +25,8 @@ typedef BOOL (^RTCVideoEncoderCallback)(RTCEncodedImage *frame,
id<RTCCodecSpecificInfo> info,
RTCRtpFragmentationHeader* __nullable header);

typedef void (^RTCVideoEncoderDescriptionCallback)(const uint8_t *frame, size_t size);

/** Protocol for encoder implementations. */
RTC_OBJC_EXPORT
@protocol RTCVideoEncoder <NSObject>
@@ -20,4 +20,6 @@ __attribute__((objc_runtime_name("WK_RTCVideoEncoderH264")))

- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo;
- (void)setH264LowLatencyEncoderEnabled:(bool)enabled;
- (void)setUseAnnexB:(bool)useAnnexB;
- (void)setDescriptionCallback:(RTCVideoEncoderDescriptionCallback)callback;
@end
@@ -363,6 +363,8 @@ @implementation RTCVideoEncoderH264 {
bool _isBelowExpectedFrameRate;
uint32_t _frameCount;
int64_t _lastFrameRateEstimationTime;
bool _useAnnexB;
RTCVideoEncoderDescriptionCallback _descriptionCallback;
}

// .5 is set as a mininum to prevent overcompensating for large temporary
@@ -395,6 +397,7 @@ - (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo {
_isBelowExpectedFrameRate = false;
_frameCount = 0;
_lastFrameRateEstimationTime = 0;
_useAnnexB = true;
return self;
}

@@ -437,6 +440,16 @@ - (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings *)settings
return [self resetCompressionSessionWithPixelFormat:kNV12PixelFormat];
}

- (void)setUseAnnexB:(bool)useAnnexB
{
_useAnnexB = useAnnexB;
}

- (void)setDescriptionCallback:(RTCVideoEncoderDescriptionCallback)callback
{
_descriptionCallback = callback;
}

- (bool)hasCompressionSession
{
return _vtCompressionSession || _vcpCompressionSession;
@@ -987,8 +1000,29 @@ - (void)frameWasEncoded:(OSStatus)status
}

__block std::unique_ptr<rtc::Buffer> buffer = std::make_unique<rtc::Buffer>();
if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
return;
if (_useAnnexB) {
if (!webrtc::H264CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
RTC_LOG(LS_WARNING) << "Unable to parse H264 encoded buffer";
return;
}
} else {
if (_descriptionCallback) {
buffer->SetSize(0);
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t currentStart = 0;
size_t size = CMBlockBufferGetDataLength(blockBuffer);
while (currentStart < size) {
char* data = nullptr;
size_t length;
if (auto error = CMBlockBufferGetDataPointer(blockBuffer, currentStart, &length, nullptr, &data)) {
RTC_LOG(LS_ERROR) << "H264 decoder: CMBlockBufferGetDataPointer failed with error " << error;
return;
}
buffer->AppendData(data, size);
currentStart += size;
}
_descriptionCallback(buffer->data(), buffer->size());
}
}

RTCEncodedImage *frame = [[RTCEncodedImage alloc] init];
@@ -19,5 +19,6 @@ __attribute__((objc_runtime_name("WK_RTCVideoEncoderH265")))
@interface RTCVideoEncoderH265 : NSObject <RTCVideoEncoder>

- (instancetype)initWithCodecInfo:(RTCVideoCodecInfo *)codecInfo;

- (void)setUseAnnexB:(bool)useAnnexB;
- (void)setDescriptionCallback:(RTCVideoEncoderDescriptionCallback)callback;
@end
@@ -174,8 +174,9 @@ @implementation RTCVideoEncoderH265 {
VTCompressionSessionRef _compressionSession;
RTCVideoCodecMode _mode;
int framesLeft;

std::vector<uint8_t> _nv12ScaleBuffer;
bool _useAnnexB;
RTCVideoEncoderDescriptionCallback _descriptionCallback;
}

// .5 is set as a mininum to prevent overcompensating for large temporary
@@ -189,6 +190,7 @@ - (instancetype)initWithCodecInfo:(RTCVideoCodecInfo*)codecInfo {
if (self = [super init]) {
_codecInfo = codecInfo;
_bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95));
_useAnnexB = true;
RTC_CHECK([codecInfo.name isEqualToString:@"H265"]);
}
return self;
@@ -214,6 +216,16 @@ - (NSInteger)startEncodeWithSettings:(RTCVideoEncoderSettings*)settings
return [self resetCompressionSession];
}

- (void)setUseAnnexB:(bool)useAnnexB
{
_useAnnexB = useAnnexB;
}

- (void)setDescriptionCallback:(RTCVideoEncoderDescriptionCallback)callback
{
_descriptionCallback = callback;
}

- (NSInteger)encode:(RTCVideoFrame*)frame
codecSpecificInfo:(id<RTCCodecSpecificInfo>)codecSpecificInfo
frameTypes:(NSArray<NSNumber*>*)frameTypes {
@@ -541,8 +553,29 @@ - (void)frameWasEncoded:(OSStatus)status
}

std::unique_ptr<rtc::Buffer> buffer(new rtc::Buffer());
if (!webrtc::H265CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
RTC_LOG(LS_INFO) << "Unable to parse H265 encoded buffer";
if (_useAnnexB) {
if (!webrtc::H265CMSampleBufferToAnnexBBuffer(sampleBuffer, isKeyframe, buffer.get())) {
RTC_LOG(LS_WARNING) << "Unable to parse H265 encoded buffer";
return;
}
} else {
if (_descriptionCallback) {
buffer->SetSize(0);
CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t currentStart = 0;
size_t size = CMBlockBufferGetDataLength(blockBuffer);
while (currentStart < size) {
char* data = nullptr;
size_t length;
if (auto error = CMBlockBufferGetDataPointer(blockBuffer, currentStart, &length, nullptr, &data)) {
RTC_LOG(LS_ERROR) << "H264 decoder: CMBlockBufferGetDataPointer failed with error " << error;
return;
}
buffer->AppendData(data, size);
currentStart += size;
}
_descriptionCallback(buffer->data(), buffer->size());
}
}

RTCEncodedImage* frame = [[RTCEncodedImage alloc] init];
@@ -30,6 +30,7 @@

#include "DOMException.h"
#include "JSWebCodecsVideoEncoderSupport.h"
#include "Logging.h"
#include "WebCodecsEncodedVideoChunkMetadata.h"
#include "WebCodecsEncodedVideoChunkOutputCallback.h"
#include "WebCodecsErrorCallback.h"
@@ -101,8 +102,8 @@ ExceptionOr<void> WebCodecsVideoEncoder::configure(WebCodecsVideoEncoderConfig&&
};
}
bool useAnnexB = config.avc && config.avc->format == AvcBitstreamFormat::Annexb;
VideoEncoder::create(config.codec, { config.width, config.height, useAnnexB }, [this, weakedThis = WeakPtr { *this }](auto&& result) {
if (!weakedThis)
VideoEncoder::create(config.codec, { config.width, config.height, useAnnexB }, [this, weakThis = WeakPtr { *this }](auto&& result) {
if (!weakThis)
return;

if (!result.has_value()) {
@@ -112,8 +113,14 @@ ExceptionOr<void> WebCodecsVideoEncoder::configure(WebCodecsVideoEncoderConfig&&
setInternalEncoder(WTFMove(result.value()));
m_isMessageQueueBlocked = false;
processControlMessageQueue();
}, [this, weakedThis = WeakPtr { *this }](auto&& result) {
if (!weakedThis || m_state != WebCodecsCodecState::Configured)
}, [this, weakThis = WeakPtr { *this }](auto&& configuration) {
if (!weakThis)
return;

m_activeConfiguration = WTFMove(configuration);
m_hasNewActiveConfiguration = true;
}, [this, weakThis = WeakPtr { *this }](auto&& result) {
if (!weakThis || m_state != WebCodecsCodecState::Configured)
return;

RefPtr<JSC::ArrayBuffer> buffer = JSC::ArrayBuffer::create(result.data.data(), result.data.size());
@@ -123,13 +130,43 @@ ExceptionOr<void> WebCodecsVideoEncoder::configure(WebCodecsVideoEncoderConfig&&
result.duration,
BufferSource { WTFMove(buffer) }
});
// FIXME: Implement metadata.
m_output->handleEvent(WTFMove(chunk), { });
m_output->handleEvent(WTFMove(chunk), createEncodedChunkMetadata());
}, WTFMove(postTaskCallback));
});
return { };
}

WebCodecsEncodedVideoChunkMetadata WebCodecsVideoEncoder::createEncodedChunkMetadata()
{
WebCodecsVideoDecoderConfig decoderConfig;
if (!m_hasNewActiveConfiguration)
return { };

m_hasNewActiveConfiguration = false;
// FIXME: Provide more accurate decoder confgiuration
WebCodecsVideoDecoderConfig config {
WTFMove(m_activeConfiguration.codec),
{ },
m_activeConfiguration.visibleWidth,
m_activeConfiguration.visibleHeight,
m_activeConfiguration.displayWidth,
m_activeConfiguration.displayHeight,
{ },
HardwareAcceleration::NoPreference,
{ }
};
if (m_activeConfiguration.description) {
auto arrayBuffer = ArrayBuffer::tryCreateUninitialized(m_activeConfiguration.description->size(), 0);
RELEASE_LOG_ERROR_IF(!!arrayBuffer, Media, "Cannot create array buffer for WebCodecs encoder description");
if (arrayBuffer) {
memcpy(static_cast<uint8_t*>(arrayBuffer->data()), m_activeConfiguration.description->data(), m_activeConfiguration.description->size());
config.description = WTFMove(arrayBuffer);
}
}

return WebCodecsEncodedVideoChunkMetadata { WTFMove(config) };
}

ExceptionOr<void> WebCodecsVideoEncoder::encode(Ref<WebCodecsVideoFrame>&& frame, WebCodecsVideoEncoderEncodeOptions&& options)
{
auto internalFrame = frame->internalFrame();
@@ -40,6 +40,7 @@ class WebCodecsEncodedVideoChunk;
class WebCodecsErrorCallback;
class WebCodecsEncodedVideoChunkOutputCallback;
class WebCodecsVideoFrame;
struct WebCodecsEncodedVideoChunkMetadata;
struct WebCodecsVideoEncoderConfig;
struct WebCodecsVideoEncoderEncodeOptions;

@@ -94,6 +95,7 @@ class WebCodecsVideoEncoder

void queueControlMessageAndProcess(Function<void()>&&);
void processControlMessageQueue();
WebCodecsEncodedVideoChunkMetadata createEncodedChunkMetadata();

WebCodecsCodecState m_state { WebCodecsCodecState::Unconfigured };
size_t m_encodeQueueSize { 0 };
@@ -106,6 +108,8 @@ class WebCodecsVideoEncoder
bool m_isKeyChunkRequired { false };
Deque<Function<void()>> m_controlMessageQueue;
bool m_isMessageQueueBlocked { false };
VideoEncoder::ActiveConfiguration m_activeConfiguration;
bool m_hasNewActiveConfiguration { false };
};

}
@@ -74,9 +74,12 @@ class LibWebRTCVPXInternalVideoEncoder : public ThreadSafeRefCounted<LibWebRTCVP
bool m_isClosed { false };
};

LibWebRTCVPXVideoEncoder::LibWebRTCVPXVideoEncoder(Type type, const VideoEncoder::Config& config, OutputCallback&& outputCallback, PostTaskCallback&& postTaskCallback)
LibWebRTCVPXVideoEncoder::LibWebRTCVPXVideoEncoder(Type type, const VideoEncoder::Config& config, DescriptionCallback&& descriptionCallback, OutputCallback&& outputCallback, PostTaskCallback&& postTaskCallback)
: m_internalEncoder(LibWebRTCVPXInternalVideoEncoder::create(type, config, WTFMove(outputCallback), WTFMove(postTaskCallback)))
{
vpxQueue().dispatch([type, descriptionCallback = WTFMove(descriptionCallback)]() mutable {
descriptionCallback(VideoEncoder::ActiveConfiguration { type == Type::VP8 ? "vp8"_s : "vp9.00"_s, { }, { }, { }, { }, { } });
});
}

LibWebRTCVPXVideoEncoder::~LibWebRTCVPXVideoEncoder()
@@ -39,7 +39,7 @@ class LibWebRTCVPXVideoEncoder : public VideoEncoder {
WTF_MAKE_FAST_ALLOCATED;
public:
enum class Type { VP8, VP9 };
LibWebRTCVPXVideoEncoder(Type, const Config&, OutputCallback&&, PostTaskCallback&&);
LibWebRTCVPXVideoEncoder(Type, const Config&, DescriptionCallback&&, OutputCallback&&, PostTaskCallback&&);
~LibWebRTCVPXVideoEncoder();

private:

0 comments on commit a591b26

Please sign in to comment.