diff --git a/Source/ThirdParty/libwebrtc/Configurations/libwebrtc.xcconfig b/Source/ThirdParty/libwebrtc/Configurations/libwebrtc.xcconfig index 39ddbe721ba5..4d9ece9ff414 100644 --- a/Source/ThirdParty/libwebrtc/Configurations/libwebrtc.xcconfig +++ b/Source/ThirdParty/libwebrtc/Configurations/libwebrtc.xcconfig @@ -61,7 +61,7 @@ INSTALLHDRS_SCRIPT_PHASE = YES; WARNING_CFLAGS = -Wno-deprecated-declarations $(inherited); // FIXME: Set WEBRTC_USE_BUILTIN_ISAC_FIX and WEBRTC_USE_BUILTIN_ISAC_FLOAT for iOS and Mac -GCC_PREPROCESSOR_DEFINITIONS = GTEST_RELATIVE_PATH WEBRTC_OPUS_SUPPORT_120MS_PTIME=0 WEBRTC_POSIX WEBRTC_MAC SSL_USE_OPENSSL FEATURE_ENABLE_SSL HAVE_SRTP HAVE_NETINET_IN_H HAVE_OPENSSL_SSL_H SCTP_PROCESS_LEVEL_LOCKS SCTP_SIMPLE_ALLOCATOR SCTP_USE_OPENSSL_SHA1 __Userspace__ HAVE_SA_LEN HAVE_SCONN_LEN __APPLE_USE_RFC_2292 __Userspace_os_Darwin NON_WINDOWS_DEFINE HAVE_WEBRTC_VIDEO HAVE_WEBRTC_VOICE WEBRTC_INTELLIGIBILITY_ENHANCER=0 WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_NS_FLOAT WEBRTC_USE_BUILTIN_OPUS WEBRTC_CODEC_OPUS WEBRTC_CODEC_ISAC WEBRTC_CODEC_RED HAVE_STDINT_H HAVE_STDLIB_H HAVE_UINT64_T OPENSSL HAVE_CONFIG_H WEBRTC_WEBKIT_BUILD HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE HAVE_SCTP WEBRTC_CODEC_G711 WEBRTC_CODEC_G722 WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 WEBRTC_USE_BUILTIN_ISAC_FIX=1 WEBRTC_USE_BUILTIN_ISAC_FLOAT=0 USE_BUILTIN_SW_CODECS WEBRTC_WEBKIT_BUILD RTC_ENABLE_VP9 NO_MAIN_THREAD_WRAPPING RTC_DISABLE_TRACE_EVENTS WEBRTC_HAVE_SCTP WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR WEBRTC_HAVE_DCSCTP $(inherited); +GCC_PREPROCESSOR_DEFINITIONS = GTEST_RELATIVE_PATH WEBRTC_OPUS_SUPPORT_120MS_PTIME=0 WEBRTC_POSIX WEBRTC_MAC SSL_USE_OPENSSL FEATURE_ENABLE_SSL HAVE_SRTP HAVE_NETINET_IN_H HAVE_OPENSSL_SSL_H SCTP_PROCESS_LEVEL_LOCKS SCTP_SIMPLE_ALLOCATOR SCTP_USE_OPENSSL_SHA1 __Userspace__ HAVE_SA_LEN HAVE_SCONN_LEN __APPLE_USE_RFC_2292 __Userspace_os_Darwin NON_WINDOWS_DEFINE HAVE_WEBRTC_VIDEO HAVE_WEBRTC_VOICE WEBRTC_INTELLIGIBILITY_ENHANCER=0 WEBRTC_APM_DEBUG_DUMP=0 WEBRTC_NS_FLOAT WEBRTC_USE_H265 WEBRTC_USE_BUILTIN_OPUS WEBRTC_CODEC_OPUS WEBRTC_CODEC_ISAC WEBRTC_CODEC_RED HAVE_STDINT_H HAVE_STDLIB_H HAVE_UINT64_T OPENSSL HAVE_CONFIG_H WEBRTC_WEBKIT_BUILD HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE HAVE_SCTP WEBRTC_CODEC_G711 WEBRTC_CODEC_G722 WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 WEBRTC_USE_BUILTIN_ISAC_FIX=1 WEBRTC_USE_BUILTIN_ISAC_FLOAT=0 USE_BUILTIN_SW_CODECS WEBRTC_WEBKIT_BUILD RTC_ENABLE_VP9 NO_MAIN_THREAD_WRAPPING RTC_DISABLE_TRACE_EVENTS WEBRTC_HAVE_SCTP WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR WEBRTC_HAVE_DCSCTP $(inherited); GCC_PREPROCESSOR_DEFINITIONS[sdk=macosx*] = $(inherited) WEBRTC_USE_VTB_HARDWARE_ENCODER $(GCC_PREPROCESSOR_DEFINITIONS_$(WK_IS_CATALYST)); GCC_PREPROCESSOR_DEFINITIONS_YES = WEBRTC_MAC_CATALYST WEBRTC_IOS; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video/video_codec_type.h b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video/video_codec_type.h index 392ebc4bec3d..aad835281dff 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video/video_codec_type.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video/video_codec_type.h @@ -21,9 +21,7 @@ enum VideoCodecType { kVideoCodecVP9, kVideoCodecAV1, kVideoCodecH264, -#ifndef DISABLE_H265 kVideoCodecH265, -#endif kVideoCodecMultiplex, }; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.cc index e1475cc1fdc9..2289037fa89e 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.cc @@ -26,7 +26,7 @@ constexpr char kPayloadNameAv1[] = "AV1"; // needed. constexpr char kPayloadNameAv1x[] = "AV1X"; constexpr char kPayloadNameH264[] = "H264"; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 constexpr char kPayloadNameH265[] = "H265"; #endif constexpr char kPayloadNameGeneric[] = "Generic"; @@ -55,6 +55,15 @@ bool VideoCodecH264::operator==(const VideoCodecH264& other) const { numberOfTemporalLayers == other.numberOfTemporalLayers); } +bool VideoCodecH265::operator==(const VideoCodecH265& other) const { + return (frameDroppingOn == other.frameDroppingOn && + keyFrameInterval == other.keyFrameInterval && + vpsLen == other.vpsLen && spsLen == other.spsLen && + ppsLen == other.ppsLen && + (spsLen == 0 || memcmp(spsData, other.spsData, spsLen) == 0) && + (ppsLen == 0 || memcmp(ppsData, other.ppsData, ppsLen) == 0)); +} + VideoCodec::VideoCodec() : codecType(kVideoCodecGeneric), width(0), @@ -105,6 +114,18 @@ const VideoCodecH264& VideoCodec::H264() const { return codec_specific_.H264; } +#ifdef WEBRTC_USE_H265 +VideoCodecH265* VideoCodec::H265() { + RTC_DCHECK_EQ(codecType, kVideoCodecH265); + return &codec_specific_.H265; +} + +const VideoCodecH265& VideoCodec::H265() const { + RTC_DCHECK_EQ(codecType, kVideoCodecH265); + return codec_specific_.H265; +} +#endif + const char* CodecTypeToPayloadString(VideoCodecType type) { switch (type) { case kVideoCodecVP8: @@ -115,7 +136,7 @@ const char* CodecTypeToPayloadString(VideoCodecType type) { return kPayloadNameAv1; case kVideoCodecH264: return kPayloadNameH264; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case kVideoCodecH265: return kPayloadNameH265; #endif @@ -137,7 +158,7 @@ VideoCodecType PayloadStringToCodecType(const std::string& name) { return kVideoCodecAV1; if (absl::EqualsIgnoreCase(name, kPayloadNameH264)) return kVideoCodecH264; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 if (absl::EqualsIgnoreCase(name, kPayloadNameH265)) return kVideoCodecH265; #endif diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.h b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.h index 496cfb5e22a0..4506d4562dc7 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_codec.h @@ -97,6 +97,21 @@ struct VideoCodecH264 { uint8_t numberOfTemporalLayers; }; +struct VideoCodecH265 { + bool operator==(const VideoCodecH265& other) const; + bool operator!=(const VideoCodecH265& other) const { + return !(*this == other); + } + bool frameDroppingOn; + int keyFrameInterval; + const uint8_t* vpsData; + size_t vpsLen; + const uint8_t* spsData; + size_t spsLen; + const uint8_t* ppsData; + size_t ppsLen; +}; + // Translates from name of codec to codec type and vice versa. RTC_EXPORT const char* CodecTypeToPayloadString(VideoCodecType type); RTC_EXPORT VideoCodecType PayloadStringToCodecType(const std::string& name); @@ -105,6 +120,7 @@ union VideoCodecUnion { VideoCodecVP8 VP8; VideoCodecVP9 VP9; VideoCodecH264 H264; + VideoCodecH265 H265; }; enum class VideoCodecMode { kRealtimeVideo, kScreensharing }; @@ -193,6 +209,8 @@ class RTC_EXPORT VideoCodec { const VideoCodecVP9& VP9() const; VideoCodecH264* H264(); const VideoCodecH264& H264() const; + VideoCodecH265* H265(); + const VideoCodecH265& H265() const; private: // TODO(hta): Consider replacing the union with a pointer type. diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc index 4bb051da4ce9..c191e02a989f 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -167,7 +167,7 @@ void VideoDecoderSoftwareFallbackWrapper::UpdateFallbackDecoderHistograms() { RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H264", hw_decoded_frames_since_last_fallback_); break; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case kVideoCodecH265: RTC_HISTOGRAM_COUNTS_100000(kFallbackHistogramsUmaPrefix + "H265", hw_decoded_frames_since_last_fallback_); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.cc index b0fe078b3781..81ed0b3b2792 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.cc @@ -58,6 +58,23 @@ VideoCodecH264 VideoEncoder::GetDefaultH264Settings() { return h264_settings; } +#ifdef WEBRTC_USE_H265 +VideoCodecH265 VideoEncoder::GetDefaultH265Settings() { + VideoCodecH265 h265_settings; + memset(&h265_settings, 0, sizeof(h265_settings)); + + // h265_settings.profile = kProfileBase; + h265_settings.frameDroppingOn = true; + h265_settings.keyFrameInterval = 3000; + h265_settings.spsData = nullptr; + h265_settings.spsLen = 0; + h265_settings.ppsData = nullptr; + h265_settings.ppsLen = 0; + + return h265_settings; +} +#endif + VideoEncoder::ScalingSettings::ScalingSettings() = default; VideoEncoder::ScalingSettings::ScalingSettings(KOff) : ScalingSettings() {} diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.h b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.h index 49ea6e1c0e53..cd9af70d6f94 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/api/video_codecs/video_encoder.h @@ -338,6 +338,9 @@ class RTC_EXPORT VideoEncoder { static VideoCodecVP8 GetDefaultVp8Settings(); static VideoCodecVP9 GetDefaultVp9Settings(); static VideoCodecH264 GetDefaultH264Settings(); +#ifdef WEBRTC_USE_H265 + static VideoCodecH265 GetDefaultH265Settings(); +#endif virtual ~VideoEncoder() {} diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/call/rtp_payload_params.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/call/rtp_payload_params.cc index 98be21b2b7f1..16a6961d9f54 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/call/rtp_payload_params.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/call/rtp_payload_params.cc @@ -96,12 +96,11 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, info.codecSpecific.H264.packetization_mode; return; } -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case kVideoCodecH265: { auto& h265_header = rtp->video_type_header.emplace(); h265_header.packetization_mode = info.codecSpecific.H265.packetization_mode; - rtp->simulcastIdx = spatial_index.value_or(0); return; } #endif @@ -348,7 +347,7 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, is_keyframe, rtp_video_header); } return; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case VideoCodecType::kVideoCodecH265: // FIXME: Implement H265 to generic descriptor. return; @@ -416,7 +415,7 @@ absl::optional RtpPayloadParams::GenericStructure( } case VideoCodecType::kVideoCodecAV1: case VideoCodecType::kVideoCodecH264: -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case VideoCodecType::kVideoCodecH265: #endif case VideoCodecType::kVideoCodecMultiplex: diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.cc index c639e135e510..6dc8a79ae35d 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.cc @@ -10,50 +10,23 @@ #include "common_video/h265/h265_common.h" +#include "common_video/h264/h264_common.h" + namespace webrtc { namespace H265 { const uint8_t kNaluTypeMask = 0x7E; -std::vector FindNaluIndices(const uint8_t* buffer, +std::vector FindNaluIndices(const uint8_t* buffer, size_t buffer_size) { - // This is sorta like Boyer-Moore, but with only the first optimization step: - // given a 3-byte sequence we're looking at, if the 3rd byte isn't 1 or 0, - // skip ahead to the next 3-byte sequence. 0s and 1s are relatively rare, so - // this will skip the majority of reads/checks. - std::vector sequences; - if (buffer_size < kNaluShortStartSequenceSize) - return sequences; - - const size_t end = buffer_size - kNaluShortStartSequenceSize; - for (size_t i = 0; i < end;) { - if (buffer[i + 2] > 1) { - i += 3; - } else if (buffer[i + 2] == 1 && buffer[i + 1] == 0 && buffer[i] == 0) { - // We found a start sequence, now check if it was a 3 of 4 byte one. - H264::NaluIndex index = {i, i + 3, 0}; - if (index.start_offset > 0 && buffer[index.start_offset - 1] == 0) - --index.start_offset; - - // Update length of previous entry. - auto it = sequences.rbegin(); - if (it != sequences.rend()) - it->payload_size = index.start_offset - it->payload_start_offset; - - sequences.push_back(index); - - i += 3; - } else { - ++i; - } + std::vector indices = + H264::FindNaluIndices(buffer, buffer_size); + std::vector results; + for (auto& index : indices) { + results.push_back( + {index.start_offset, index.payload_start_offset, index.payload_size}); } - - // Update length of last entry, if any. - auto it = sequences.rbegin(); - if (it != sequences.rend()) - it->payload_size = buffer_size - it->payload_start_offset; - - return sequences; + return results; } NaluType ParseNaluType(uint8_t data) { @@ -61,49 +34,26 @@ NaluType ParseNaluType(uint8_t data) { } std::vector ParseRbsp(const uint8_t* data, size_t length) { - std::vector out; - out.reserve(length); - - for (size_t i = 0; i < length;) { - // Be careful about over/underflow here. byte_length_ - 3 can underflow, and - // i + 3 can overflow, but byte_length_ - i can't, because i < byte_length_ - // above, and that expression will produce the number of bytes left in - // the stream including the byte at i. - if (length - i >= 3 && !data[i] && !data[i + 1] && data[i + 2] == 3) { - // Two rbsp bytes. - out.push_back(data[i++]); - out.push_back(data[i++]); - // Skip the emulation byte. - i++; - } else { - // Single rbsp byte. - out.push_back(data[i++]); - } - } - return out; + return H264::ParseRbsp(data, length); } void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination) { - static const uint8_t kZerosInStartSequence = 2; - static const uint8_t kEmulationByte = 0x03u; - size_t num_consecutive_zeros = 0; - destination->EnsureCapacity(destination->size() + length); + H264::WriteRbsp(bytes, length, destination); +} - for (size_t i = 0; i < length; ++i) { - uint8_t byte = bytes[i]; - if (byte <= kEmulationByte && - num_consecutive_zeros >= kZerosInStartSequence) { - // Need to escape. - destination->AppendData(kEmulationByte); - num_consecutive_zeros = 0; - } - destination->AppendData(byte); - if (byte == 0) { - ++num_consecutive_zeros; - } else { - num_consecutive_zeros = 0; - } +uint32_t Log2(uint32_t value) { + uint32_t result = 0; + // If value is not a power of two an additional bit is required + // to account for the ceil() of log2() below. + if ((value & (value - 1)) != 0) { + ++result; + } + while (value > 0) { + value >>= 1; + ++result; } + + return result; } } // namespace H265 diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.h b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.h index d3c5326b72c3..a829195a1007 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_common.h @@ -14,7 +14,6 @@ #include #include -#include "common_video/h264/h264_common.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -28,8 +27,8 @@ const size_t kNaluLongStartSequenceSize = 4; // not the first NALU of an access unit or an SPS or PPS block. const size_t kNaluShortStartSequenceSize = 3; -// The size of the NALU type byte (1). -const size_t kNaluTypeSize = 1; +// The size of the NALU type byte (2). +const size_t kNaluTypeSize = 2; enum NaluType : uint8_t { kTrailN = 0, @@ -57,10 +56,19 @@ enum NaluType : uint8_t { kFU = 49 }; -enum SliceType : uint8_t { kP = 0, kB = 1, kI = 2, kSp = 3, kSi = 4 }; +enum SliceType : uint8_t { kB = 0, kP = 1, kI = 2 }; + +struct NaluIndex { + // Start index of NALU, including start sequence. + size_t start_offset; + // Start index of NALU payload, typically type header. + size_t payload_start_offset; + // Length of NALU payload, in bytes, counting from payload_start_offset. + size_t payload_size; +}; // Returns a vector of the NALU indices in the given buffer. -std::vector FindNaluIndices(const uint8_t* buffer, +std::vector FindNaluIndices(const uint8_t* buffer, size_t buffer_size); // Get the NAL type from the header byte immediately following start sequence. @@ -86,6 +94,8 @@ std::vector ParseRbsp(const uint8_t* data, size_t length); // bytes in order to escape any data the could be interpreted as a start // sequence. void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination); + +uint32_t Log2(uint32_t value); } // namespace H265 } // namespace webrtc diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.cc index 1df2667d2eae..6857f8142925 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.cc @@ -13,14 +13,16 @@ #include #include -#include "common_video/h264/h264_common.h" +#include "absl/types/optional.h" #include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_sps_parser.h" +#include "rtc_base/bit_buffer.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/logging.h" #define RETURN_EMPTY_ON_FAIL(x) \ if (!(x)) { \ - return absl::nullopt; \ + return absl::nullopt; \ } namespace { @@ -30,7 +32,7 @@ const int kMinPicInitQpDeltaValue = -26; namespace webrtc { -// General note: this is based off the 02/2018 version of the H.265 standard. +// General note: this is based off the 06/2019 version of the H.265 standard. // You can find it on this page: // http://www.itu.int/rec/T-REC-H.265 @@ -39,10 +41,8 @@ absl::optional H265PpsParser::ParsePps( size_t length) { // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in - // section 7.3.1 of the H.264 standard. - std::vector unpacked_buffer = H264::ParseRbsp(data, length); - BitstreamReader bit_buffer(unpacked_buffer); - return ParseInternal(&bit_buffer); + // section 7.3.1.1 of the H.265 standard. + return ParseInternal(H265::ParseRbsp(data, length)); } bool H265PpsParser::ParsePpsIds(const uint8_t* data, @@ -53,20 +53,26 @@ bool H265PpsParser::ParsePpsIds(const uint8_t* data, RTC_DCHECK(sps_id); // First, parse out rbsp, which is basically the source buffer minus emulation // bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in - // section 7.3.1 of the H.265 standard. - std::vector unpacked_buffer = H264::ParseRbsp(data, length); - BitstreamReader bit_buffer(unpacked_buffer); - return ParsePpsIdsInternal(&bit_buffer, pps_id, sps_id); + // section 7.3.1.1 of the H.265 standard. + std::vector unpacked_buffer = H265::ParseRbsp(data, length); + BitstreamReader reader(unpacked_buffer); + *pps_id = reader.ReadExponentialGolomb(); + *sps_id = reader.ReadExponentialGolomb(); + return reader.Ok(); } absl::optional H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( const uint8_t* data, size_t length, uint8_t nalu_type) { - BitstreamReader slice_reader(rtc::MakeArrayView(data, length)); + std::vector unpacked_buffer = H265::ParseRbsp(data, length); + BitstreamReader slice_reader(unpacked_buffer); // first_slice_segment_in_pic_flag: u(1) slice_reader.ConsumeBits(1); + if (!slice_reader.Ok()) { + return absl::nullopt; + } if (nalu_type >= H265::NaluType::kBlaWLp && nalu_type <= H265::NaluType::kRsvIrapVcl23) { @@ -76,116 +82,139 @@ absl::optional H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( // slice_pic_parameter_set_id: ue(v) uint32_t slice_pic_parameter_set_id = slice_reader.ReadExponentialGolomb(); + if (!slice_reader.Ok()) { + return absl::nullopt; + } return slice_pic_parameter_set_id; } absl::optional H265PpsParser::ParseInternal( - BitstreamReader* bit_buffer) { + rtc::ArrayView buffer) { + BitstreamReader reader(buffer); PpsState pps; - RETURN_EMPTY_ON_FAIL(ParsePpsIdsInternal(bit_buffer, &pps.id, &pps.sps_id)); - - uint32_t bits_tmp; - uint32_t golomb_ignored; - // entropy_coding_mode_flag: u(1) - uint32_t entropy_coding_mode_flag = bit_buffer->ReadBits(1); - pps.entropy_coding_mode_flag = entropy_coding_mode_flag != 0; - // bottom_field_pic_order_in_frame_present_flag: u(1) - uint32_t bottom_field_pic_order_in_frame_present_flag = bit_buffer->ReadBits(1); - pps.bottom_field_pic_order_in_frame_present_flag = - bottom_field_pic_order_in_frame_present_flag != 0; - - // num_slice_groups_minus1: ue(v) - uint32_t num_slice_groups_minus1 = bit_buffer->ReadExponentialGolomb(); - if (num_slice_groups_minus1 > 0) { - // slice_group_map_type: ue(v) - uint32_t slice_group_map_type = bit_buffer->ReadExponentialGolomb(); - if (slice_group_map_type == 0) { - for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1; - ++i_group) { - // run_length_minus1[iGroup]: ue(v) - golomb_ignored = bit_buffer->ReadExponentialGolomb(); - } - } else if (slice_group_map_type == 1) { - // TODO(sprang): Implement support for dispersed slice group map type. - // See 8.2.2.2 Specification for dispersed slice group map type. - } else if (slice_group_map_type == 2) { - for (uint32_t i_group = 0; i_group <= num_slice_groups_minus1; - ++i_group) { - // top_left[iGroup]: ue(v) - golomb_ignored = bit_buffer->ReadExponentialGolomb(); - // bottom_right[iGroup]: ue(v) - golomb_ignored = bit_buffer->ReadExponentialGolomb(); - } - } else if (slice_group_map_type == 3 || slice_group_map_type == 4 || - slice_group_map_type == 5) { - // slice_group_change_direction_flag: u(1) - bits_tmp = bit_buffer->ReadBits(1); - // slice_group_change_rate_minus1: ue(v) - golomb_ignored = bit_buffer->ReadExponentialGolomb(); - } else if (slice_group_map_type == 6) { - // pic_size_in_map_units_minus1: ue(v) - uint32_t pic_size_in_map_units_minus1 = bit_buffer->ReadExponentialGolomb(); - uint32_t slice_group_id_bits = 0; - uint32_t num_slice_groups = num_slice_groups_minus1 + 1; - // If num_slice_groups is not a power of two an additional bit is required - // to account for the ceil() of log2() below. - if ((num_slice_groups & (num_slice_groups - 1)) != 0) - ++slice_group_id_bits; - while (num_slice_groups > 0) { - num_slice_groups >>= 1; - ++slice_group_id_bits; - } - for (uint32_t i = 0; i <= pic_size_in_map_units_minus1; i++) { - // slice_group_id[i]: u(v) - // Represented by ceil(log2(num_slice_groups_minus1 + 1)) bits. - bits_tmp = bit_buffer->ReadBits(slice_group_id_bits); - } - } + if(!ParsePpsIdsInternal(reader, pps.id, pps.sps_id)){ + return absl::nullopt; } + + // dependent_slice_segments_enabled_flag: u(1) + pps.dependent_slice_segments_enabled_flag = reader.Read(); + // output_flag_present_flag: u(1) + pps.output_flag_present_flag = reader.Read(); + // num_extra_slice_header_bits: u(3) + pps.num_extra_slice_header_bits = reader.ReadBits(3); + // sign_data_hiding_enabled_flag: u(1) + reader.ConsumeBits(1); + // cabac_init_present_flag: u(1) + pps.cabac_init_present_flag = reader.Read(); // num_ref_idx_l0_default_active_minus1: ue(v) - bit_buffer->ReadExponentialGolomb(); + pps.num_ref_idx_l0_default_active_minus1 = reader.ReadExponentialGolomb(); // num_ref_idx_l1_default_active_minus1: ue(v) - bit_buffer->ReadExponentialGolomb(); - // weighted_pred_flag: u(1) - uint32_t weighted_pred_flag; - weighted_pred_flag = bit_buffer->ReadBits(1); - pps.weighted_pred_flag = weighted_pred_flag != 0; - // weighted_bipred_idc: u(2) - pps.weighted_bipred_idc = bit_buffer->ReadBits(2); - - // pic_init_qp_minus26: se(v) - pps.pic_init_qp_minus26 = bit_buffer->ReadSignedExponentialGolomb(); + pps.num_ref_idx_l1_default_active_minus1 = reader.ReadExponentialGolomb(); + // init_qp_minus26: se(v) + pps.pic_init_qp_minus26 = reader.ReadSignedExponentialGolomb(); // Sanity-check parsed value if (pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue || pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) { - RETURN_EMPTY_ON_FAIL(false); + return absl::nullopt; } - // pic_init_qs_minus26: se(v) - bit_buffer->ReadExponentialGolomb(); - // chroma_qp_index_offset: se(v) - bit_buffer->ReadExponentialGolomb(); - // deblocking_filter_control_present_flag: u(1) // constrained_intra_pred_flag: u(1) - bits_tmp = bit_buffer->ReadBits(2); - // redundant_pic_cnt_present_flag: u(1) - pps.redundant_pic_cnt_present_flag = bit_buffer->ReadBits(1); - - // Ignore -Wunused-but-set-variable warnings. - (void)bits_tmp; - (void)golomb_ignored; + reader.ConsumeBits(1); + // transform_skip_enabled_flag: u(1) + reader.ConsumeBits(1); + // cu_qp_delta_enabled_flag: u(1) + bool cu_qp_delta_enabled_flag = reader.Read(); + if (cu_qp_delta_enabled_flag) { + // diff_cu_qp_delta_depth: ue(v) + reader.ReadExponentialGolomb(); + } + // pps_cb_qp_offset: se(v) + reader.ReadSignedExponentialGolomb(); + // pps_cr_qp_offset: se(v) + reader.ReadSignedExponentialGolomb(); + // pps_slice_chroma_qp_offsets_present_flag: u(1) + reader.ConsumeBits(1); + // weighted_pred_flag: u(1) + pps.weighted_pred_flag = reader.Read(); + // weighted_bipred_flag: u(1) + pps.weighted_bipred_flag = reader.Read(); + // transquant_bypass_enabled_flag: u(1) + reader.ConsumeBits(1); + // tiles_enabled_flag: u(1) + bool tiles_enabled_flag = reader.Read(); + // entropy_coding_sync_enabled_flag: u(1) + reader.ConsumeBits(1); + if (tiles_enabled_flag) { + // num_tile_columns_minus1: ue(v) + uint32_t num_tile_columns_minus1 = reader.ReadExponentialGolomb(); + // num_tile_rows_minus1: ue(v) + uint32_t num_tile_rows_minus1 = reader.ReadExponentialGolomb(); + // uniform_spacing_flag: u(1) + bool uniform_spacing_flag = reader.Read(); + if (!uniform_spacing_flag) { + for (uint32_t i = 0; i < num_tile_columns_minus1; i++) { + // column_width_minus1: ue(v) + reader.ReadExponentialGolomb(); + } + for (uint32_t i = 0; i < num_tile_rows_minus1; i++) { + // row_height_minus1: ue(v) + reader.ReadExponentialGolomb(); + } + // loop_filter_across_tiles_enabled_flag: u(1) + reader.ConsumeBits(1); + } + } + // pps_loop_filter_across_slices_enabled_flag: u(1) + reader.ConsumeBits(1); + // deblocking_filter_control_present_flag: u(1) + bool deblocking_filter_control_present_flag = reader.Read(); + if (deblocking_filter_control_present_flag) { + // deblocking_filter_override_enabled_flag: u(1) + reader.ConsumeBits(1); + // pps_deblocking_filter_disabled_flag: u(1) + bool pps_deblocking_filter_disabled_flag = reader.Read(); + if (!pps_deblocking_filter_disabled_flag) { + // pps_beta_offset_div2: se(v) + reader.ReadSignedExponentialGolomb(); + // pps_tc_offset_div2: se(v) + reader.ReadSignedExponentialGolomb(); + } + } + // pps_scaling_list_data_present_flag: u(1) + bool pps_scaling_list_data_present_flag = 0; + pps_scaling_list_data_present_flag = reader.Read(); + if (pps_scaling_list_data_present_flag) { + // scaling_list_data() + if (!H265SpsParser::ParseScalingListData(reader)) { + return absl::nullopt; + } + } + // lists_modification_present_flag: u(1) + pps.lists_modification_present_flag = reader.Read(); + // log2_parallel_merge_level_minus2: ue(v) + reader.ReadExponentialGolomb(); + // slice_segment_header_extension_present_flag: u(1) + reader.ConsumeBits(1); + + if (!reader.Ok()) { + return absl::nullopt; + } return pps; } -bool H265PpsParser::ParsePpsIdsInternal(BitstreamReader* bit_buffer, - uint32_t* pps_id, - uint32_t* sps_id) { +bool H265PpsParser::ParsePpsIdsInternal(BitstreamReader& reader, + uint32_t& pps_id, + uint32_t& sps_id) { // pic_parameter_set_id: ue(v) - *pps_id = bit_buffer->ReadExponentialGolomb(); + pps_id = reader.ReadExponentialGolomb(); + if (!reader.Ok()) + return false; // seq_parameter_set_id: ue(v) - *sps_id = bit_buffer->ReadExponentialGolomb(); + sps_id = reader.ReadExponentialGolomb(); + if (!reader.Ok()) + return false; return true; } diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.h b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.h index 62f86b21fefd..aebc77d6e72c 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_pps_parser.h @@ -12,15 +12,15 @@ #define COMMON_VIDEO_H265_PPS_PARSER_H_ #include "absl/types/optional.h" +#include "api/array_view.h" +#include "rtc_base/bitstream_reader.h" -#ifdef HAVE_STDINT_H -#include -#endif +namespace rtc { +class BitBuffer; +} namespace webrtc { -class BitstreamReader; - // A class for parsing out picture parameter set (PPS) data from a H265 NALU. class H265PpsParser { public: @@ -29,12 +29,16 @@ class H265PpsParser { struct PpsState { PpsState() = default; - bool bottom_field_pic_order_in_frame_present_flag = false; + bool dependent_slice_segments_enabled_flag = false; + bool cabac_init_present_flag = false; + bool output_flag_present_flag = false; + uint32_t num_extra_slice_header_bits = 0; + uint32_t num_ref_idx_l0_default_active_minus1 = 0; + uint32_t num_ref_idx_l1_default_active_minus1 = 0; + int32_t pic_init_qp_minus26 = 0; bool weighted_pred_flag = false; - bool entropy_coding_mode_flag = false; - uint32_t weighted_bipred_idc = false; - uint32_t redundant_pic_cnt_present_flag = 0; - int pic_init_qp_minus26 = 0; + bool weighted_bipred_flag = false; + bool lists_modification_present_flag = false; uint32_t id = 0; uint32_t sps_id = 0; }; @@ -55,10 +59,11 @@ class H265PpsParser { protected: // Parse the PPS state, for a bit buffer where RBSP decoding has already been // performed. - static absl::optional ParseInternal(BitstreamReader* bit_buffer); - static bool ParsePpsIdsInternal(BitstreamReader* bit_buffer, - uint32_t* pps_id, - uint32_t* sps_id); + static absl::optional ParseInternal( + rtc::ArrayView buffer); + static bool ParsePpsIdsInternal(BitstreamReader& reader, + uint32_t& pps_id, + uint32_t& sps_id); }; } // namespace webrtc diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.cc index 0d157624f533..7a3c8c6cb387 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.cc @@ -8,28 +8,28 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "common_video/h265/h265_sps_parser.h" + #include #include #include "common_video/h265/h265_common.h" -#include "common_video/h265/h265_sps_parser.h" -#include "rtc_base/bitstream_reader.h" +#include "rtc_base/bit_buffer.h" #include "rtc_base/logging.h" namespace { typedef absl::optional OptionalSps; - -#define RETURN_EMPTY_ON_FAIL(x) \ - if (!(x)) { \ - return OptionalSps(); \ - } +typedef absl::optional + OptionalShortTermRefPicSet; } // namespace namespace webrtc { H265SpsParser::SpsState::SpsState() = default; -// General note: this is based off the 02/2018 version of the H.265 standard. +H265SpsParser::ShortTermRefPicSet::ShortTermRefPicSet() = default; + +// General note: this is based off the 06/2019 version of the H.265 standard. // You can find it on this page: // http://www.itu.int/rec/T-REC-H.265 @@ -37,12 +37,131 @@ H265SpsParser::SpsState::SpsState() = default; absl::optional H265SpsParser::ParseSps( const uint8_t* data, size_t length) { - std::vector unpacked_buffer = H265::ParseRbsp(data, length); - BitstreamReader bit_buffer(unpacked_buffer); - return ParseSpsUpToVui(&bit_buffer); + return ParseSpsInternal(H265::ParseRbsp(data, length)); +} + +bool H265SpsParser::ParseScalingListData(BitstreamReader& reader) { + uint32_t scaling_list_pred_mode_flag[4][6]; + uint32_t scaling_list_pred_matrix_id_delta[4][6]; + int32_t scaling_list_dc_coef_minus8[4][6]; + int32_t scaling_list[4][6][64]; + for (int size_id = 0; size_id < 4; size_id++) { + for (int matrix_id = 0; matrix_id < 6; + matrix_id += (size_id == 3) ? 3 : 1) { + // scaling_list_pred_mode_flag: u(1) + scaling_list_pred_mode_flag[size_id][matrix_id] = reader.Read(); + if (!scaling_list_pred_mode_flag[size_id][matrix_id]) { + // scaling_list_pred_matrix_id_delta: ue(v) + scaling_list_pred_matrix_id_delta[size_id][matrix_id] = + reader.ReadExponentialGolomb(); + } else { + int32_t next_coef = 8; + uint32_t coef_num = std::min(64, 1 << (4 + (size_id << 1))); + if (size_id > 1) { + // scaling_list_dc_coef_minus8: se(v) + scaling_list_dc_coef_minus8[size_id - 2][matrix_id] = + reader.ReadSignedExponentialGolomb(); + next_coef = scaling_list_dc_coef_minus8[size_id - 2][matrix_id]; + } + for (uint32_t i = 0; i < coef_num; i++) { + // scaling_list_delta_coef: se(v) + int32_t scaling_list_delta_coef = + reader.ReadSignedExponentialGolomb(); + next_coef = (next_coef + scaling_list_delta_coef + 256) % 256; + scaling_list[size_id][matrix_id][i] = next_coef; + } + } + } + } + return true; +} + +absl::optional +H265SpsParser::ParseShortTermRefPicSet( + uint32_t st_rps_idx, + uint32_t num_short_term_ref_pic_sets, + const std::vector& + short_term_ref_pic_set, + H265SpsParser::SpsState& sps, + BitstreamReader& reader) { + H265SpsParser::ShortTermRefPicSet ref_pic_set; + + bool inter_ref_pic_set_prediction_flag = false; + if (st_rps_idx != 0) { + // inter_ref_pic_set_prediction_flag: u(1) + inter_ref_pic_set_prediction_flag = reader.Read(); + } + if (inter_ref_pic_set_prediction_flag) { + uint32_t delta_idx_minus1 = 0; + if (st_rps_idx == num_short_term_ref_pic_sets) { + // delta_idx_minus1: ue(v) + delta_idx_minus1 = reader.ReadExponentialGolomb(); + } + // delta_rps_sign: u(1) + reader.ConsumeBits(1); + // abs_delta_rps_minus1: ue(v) + reader.ReadExponentialGolomb(); + uint32_t ref_rps_idx = st_rps_idx - (delta_idx_minus1 + 1); + uint32_t num_delta_pocs = 0; + if (short_term_ref_pic_set[ref_rps_idx].inter_ref_pic_set_prediction_flag) { + auto& used_by_curr_pic_flag = + short_term_ref_pic_set[ref_rps_idx].used_by_curr_pic_flag; + auto& use_delta_flag = short_term_ref_pic_set[ref_rps_idx].use_delta_flag; + if (used_by_curr_pic_flag.size() != use_delta_flag.size()) { + return OptionalShortTermRefPicSet(); + } + for (uint32_t i = 0; i < used_by_curr_pic_flag.size(); i++) { + if (used_by_curr_pic_flag[i] || use_delta_flag[i]) { + num_delta_pocs++; + } + } + } else { + num_delta_pocs = short_term_ref_pic_set[ref_rps_idx].num_negative_pics + + short_term_ref_pic_set[ref_rps_idx].num_positive_pics; + } + ref_pic_set.used_by_curr_pic_flag.resize(num_delta_pocs + 1, 0); + ref_pic_set.use_delta_flag.resize(num_delta_pocs + 1, 1); + for (uint32_t j = 0; j <= num_delta_pocs; j++) { + // used_by_curr_pic_flag: u(1) + ref_pic_set.used_by_curr_pic_flag[j] = reader.Read(); + if (!ref_pic_set.used_by_curr_pic_flag[j]) { + // use_delta_flag: u(1) + ref_pic_set.use_delta_flag[j] = reader.Read(); + } + } + } else { + // num_negative_pics: ue(v) + ref_pic_set.num_negative_pics = reader.ReadExponentialGolomb(); + // num_positive_pics: ue(v) + ref_pic_set.num_positive_pics = reader.ReadExponentialGolomb(); + + ref_pic_set.delta_poc_s0_minus1.resize(ref_pic_set.num_negative_pics, 0); + ref_pic_set.used_by_curr_pic_s0_flag.resize(ref_pic_set.num_negative_pics, + 0); + for (uint32_t i = 0; i < ref_pic_set.num_negative_pics; i++) { + // delta_poc_s0_minus1: ue(v) + ref_pic_set.delta_poc_s0_minus1[i] = reader.ReadExponentialGolomb(); + // used_by_curr_pic_s0_flag: u(1) + ref_pic_set.used_by_curr_pic_s0_flag[i] = reader.Read(); + } + ref_pic_set.delta_poc_s1_minus1.resize(ref_pic_set.num_positive_pics, 0); + ref_pic_set.used_by_curr_pic_s1_flag.resize(ref_pic_set.num_positive_pics, + 0); + for (uint32_t i = 0; i < ref_pic_set.num_positive_pics; i++) { + // delta_poc_s1_minus1: ue(v) + ref_pic_set.delta_poc_s1_minus1[i] = reader.ReadExponentialGolomb(); + // used_by_curr_pic_s1_flag: u(1) + ref_pic_set.used_by_curr_pic_s1_flag[i] = reader.Read(); + } + } + + return OptionalShortTermRefPicSet(ref_pic_set); } -absl::optional H265SpsParser::ParseSpsUpToVui(BitstreamReader* buffer) { +absl::optional H265SpsParser::ParseSpsInternal( + rtc::ArrayView buffer) { + BitstreamReader reader(buffer); + // Now, we need to use a bit buffer to parse through the actual HEVC SPS // format. See Section 7.3.2.2.1 ("General sequence parameter set data // syntax") of the H.265 standard for a complete description. @@ -53,41 +172,33 @@ absl::optional H265SpsParser::ParseSpsUpToVui(Bitstream // chroma_format_idc -> affects crop units // pic_{width,height}_* -> resolution of the frame in macroblocks (16x16). // frame_crop_*_offset -> crop information - SpsState sps; - // The golomb values we have to read, not just consume. - uint32_t golomb_ignored; - - // separate_colour_plane_flag is optional (assumed 0), but has implications - // about the ChromaArrayType, which modifies how we treat crop coordinates. - uint32_t separate_colour_plane_flag = 0; - - // chroma_format_idc will be ChromaArrayType if separate_colour_plane_flag is - // 0. It defaults to 1, when not specified. - uint32_t chroma_format_idc = 1; - // sps_video_parameter_set_id: u(4) - uint32_t sps_video_parameter_set_id = buffer->ReadBits(4); + uint32_t sps_video_parameter_set_id = 0; + sps_video_parameter_set_id = reader.ReadBits(4); // sps_max_sub_layers_minus1: u(3) - uint32_t sps_max_sub_layers_minus1 = buffer->ReadBits(3); + uint32_t sps_max_sub_layers_minus1 = 0; + sps_max_sub_layers_minus1 = reader.ReadBits(3); + sps.sps_max_sub_layers_minus1 = sps_max_sub_layers_minus1; + sps.sps_max_dec_pic_buffering_minus1.resize(sps_max_sub_layers_minus1 + 1, 0); // sps_temporal_id_nesting_flag: u(1) - buffer->ConsumeBits(1); + reader.ConsumeBits(1); // profile_tier_level(1, sps_max_sub_layers_minus1). We are acutally not // using them, so read/skip over it. // general_profile_space+general_tier_flag+general_prfile_idc: u(8) - buffer->ConsumeBits(1 * 8); + reader.ConsumeBits(8); // general_profile_compatabilitiy_flag[32] - buffer->ConsumeBits(4 * 8); + reader.ConsumeBits(32); // general_progressive_source_flag + interlaced_source_flag+ // non-packed_constraint flag + frame_only_constraint_flag: u(4) - buffer->ConsumeBits(4 * 8); + reader.ConsumeBits(4); // general_profile_idc decided flags or reserved. u(43) - buffer->ConsumeBits(43); + reader.ConsumeBits(43); // general_inbld_flag or reserved 0: u(1) - buffer->ConsumeBits(1); + reader.ConsumeBits(1); // general_level_idc: u(8) - buffer->ConsumeBits(1 * 8); + reader.ConsumeBits(8); // if max_sub_layers_minus1 >=1, read the sublayer profile information std::vector sub_layer_profile_present_flags; std::vector sub_layer_level_present_flags; @@ -95,55 +206,53 @@ absl::optional H265SpsParser::ParseSpsUpToVui(Bitstream uint32_t sub_layer_level_present = 0; for (uint32_t i = 0; i < sps_max_sub_layers_minus1; i++) { // sublayer_profile_present_flag and sublayer_level_presnet_flag: u(2) - sub_layer_profile_present = buffer->ReadBits(1); - sub_layer_level_present = buffer->ReadBits(1); + sub_layer_profile_present = reader.Read(); + sub_layer_level_present = reader.Read(); sub_layer_profile_present_flags.push_back(sub_layer_profile_present); sub_layer_level_present_flags.push_back(sub_layer_level_present); } if (sps_max_sub_layers_minus1 > 0) { for (uint32_t j = sps_max_sub_layers_minus1; j < 8; j++) { // reserved 2 bits: u(2) - buffer->ConsumeBits(2); + reader.ConsumeBits(2); } } for (uint32_t k = 0; k < sps_max_sub_layers_minus1; k++) { if (sub_layer_profile_present_flags[k]) { // // sub_layer profile_space/tier_flag/profile_idc. ignored. u(8) - buffer->ConsumeBits(1 * 8); + reader.ConsumeBits(8); // profile_compatability_flag: u(32) - buffer->ConsumeBits(4 * 8); + reader.ConsumeBits(32); // sub_layer progressive_source_flag/interlaced_source_flag/ // non_packed_constraint_flag/frame_only_constraint_flag: u(4) - buffer->ConsumeBits(4); + reader.ConsumeBits(4); // following 43-bits are profile_idc specific. We simply read/skip it. // u(43) - buffer->ConsumeBits(43); + reader.ConsumeBits(43); // 1-bit profile_idc specific inbld flag. We simply read/skip it. u(1) - buffer->ConsumeBits(1); + reader.ConsumeBits(1); } if (sub_layer_level_present_flags[k]) { // sub_layer_level_idc: u(8) - buffer->ConsumeBits(1 * 8); + reader.ConsumeBits(8); } } // sps_seq_parameter_set_id: ue(v) - golomb_ignored = buffer->ReadExponentialGolomb(); - (void)golomb_ignored; // Ignore -Wunused-but-set-variable warning. + sps.id = reader.ReadExponentialGolomb(); // chrome_format_idc: ue(v) - chroma_format_idc = buffer->ReadExponentialGolomb(); - if (chroma_format_idc == 3) { + sps.chroma_format_idc = reader.ReadExponentialGolomb(); + if (sps.chroma_format_idc == 3) { // seperate_colour_plane_flag: u(1) - separate_colour_plane_flag = buffer->ReadBits(1); + sps.separate_colour_plane_flag = reader.Read(); } uint32_t pic_width_in_luma_samples = 0; uint32_t pic_height_in_luma_samples = 0; // pic_width_in_luma_samples: ue(v) - pic_width_in_luma_samples = buffer->ReadExponentialGolomb(); + pic_width_in_luma_samples = reader.ReadExponentialGolomb(); // pic_height_in_luma_samples: ue(v) - pic_height_in_luma_samples = buffer->ReadExponentialGolomb(); + pic_height_in_luma_samples = reader.ReadExponentialGolomb(); // conformance_window_flag: u(1) - uint32_t conformance_window_flag = 0; - conformance_window_flag = buffer->ReadBits(1); + bool conformance_window_flag = reader.Read(); uint32_t conf_win_left_offset = 0; uint32_t conf_win_right_offset = 0; @@ -151,36 +260,145 @@ absl::optional H265SpsParser::ParseSpsUpToVui(Bitstream uint32_t conf_win_bottom_offset = 0; if (conformance_window_flag) { // conf_win_left_offset: ue(v) - conf_win_left_offset = buffer->ReadExponentialGolomb(); + conf_win_left_offset = reader.ReadExponentialGolomb(); // conf_win_right_offset: ue(v) - conf_win_right_offset = buffer->ReadExponentialGolomb(); + conf_win_right_offset = reader.ReadExponentialGolomb(); // conf_win_top_offset: ue(v) - conf_win_top_offset = buffer->ReadExponentialGolomb(); + conf_win_top_offset = reader.ReadExponentialGolomb(); // conf_win_bottom_offset: ue(v) - conf_win_bottom_offset = buffer->ReadExponentialGolomb(); + conf_win_bottom_offset = reader.ReadExponentialGolomb(); + } + + // bit_depth_luma_minus8: ue(v) + reader.ReadExponentialGolomb(); + // bit_depth_chroma_minus8: ue(v) + reader.ReadExponentialGolomb(); + // log2_max_pic_order_cnt_lsb_minus4: ue(v) + sps.log2_max_pic_order_cnt_lsb_minus4 = reader.ReadExponentialGolomb(); + uint32_t sps_sub_layer_ordering_info_present_flag = 0; + // sps_sub_layer_ordering_info_present_flag: u(1) + sps_sub_layer_ordering_info_present_flag = reader.Read(); + for (uint32_t i = (sps_sub_layer_ordering_info_present_flag != 0) + ? 0 + : sps_max_sub_layers_minus1; + i <= sps_max_sub_layers_minus1; i++) { + // sps_max_dec_pic_buffering_minus1: ue(v) + sps.sps_max_dec_pic_buffering_minus1[i] = reader.ReadExponentialGolomb(); + // sps_max_num_reorder_pics: ue(v) + reader.ReadExponentialGolomb(); + // sps_max_latency_increase_plus1: ue(v) + reader.ReadExponentialGolomb(); + } + // log2_min_luma_coding_block_size_minus3: ue(v) + sps.log2_min_luma_coding_block_size_minus3 = reader.ReadExponentialGolomb(); + // log2_diff_max_min_luma_coding_block_size: ue(v) + sps.log2_diff_max_min_luma_coding_block_size = reader.ReadExponentialGolomb(); + // log2_min_luma_transform_block_size_minus2: ue(v) + reader.ReadExponentialGolomb(); + // log2_diff_max_min_luma_transform_block_size: ue(v) + reader.ReadExponentialGolomb(); + // max_transform_hierarchy_depth_inter: ue(v) + reader.ReadExponentialGolomb(); + // max_transform_hierarchy_depth_intra: ue(v) + reader.ReadExponentialGolomb(); + // scaling_list_enabled_flag: u(1) + bool scaling_list_enabled_flag = reader.Read(); + if (scaling_list_enabled_flag) { + // sps_scaling_list_data_present_flag: u(1) + bool sps_scaling_list_data_present_flag = reader.Read(); + if (sps_scaling_list_data_present_flag) { + // scaling_list_data() + if (!ParseScalingListData(reader)) { + return OptionalSps(); + } + } + } + + // amp_enabled_flag: u(1) + reader.ConsumeBits(1); + // sample_adaptive_offset_enabled_flag: u(1) + sps.sample_adaptive_offset_enabled_flag = reader.Read(); + // pcm_enabled_flag: u(1) + bool pcm_enabled_flag = reader.Read(); + if (pcm_enabled_flag) { + // pcm_sample_bit_depth_luma_minus1: u(4) + reader.ConsumeBits(4); + // pcm_sample_bit_depth_chroma_minus1: u(4) + reader.ConsumeBits(4); + // log2_min_pcm_luma_coding_block_size_minus3: ue(v) + reader.ReadExponentialGolomb(); + // log2_diff_max_min_pcm_luma_coding_block_size: ue(v) + reader.ReadExponentialGolomb(); + // pcm_loop_filter_disabled_flag: u(1) + reader.ConsumeBits(1); } + // num_short_term_ref_pic_sets: ue(v) + sps.num_short_term_ref_pic_sets = reader.ReadExponentialGolomb(); + sps.short_term_ref_pic_set.resize(sps.num_short_term_ref_pic_sets); + for (uint32_t st_rps_idx = 0; st_rps_idx < sps.num_short_term_ref_pic_sets; + st_rps_idx++) { + // st_ref_pic_set() + OptionalShortTermRefPicSet ref_pic_set = + ParseShortTermRefPicSet(st_rps_idx, sps.num_short_term_ref_pic_sets, + sps.short_term_ref_pic_set, sps, reader); + if (ref_pic_set) { + sps.short_term_ref_pic_set[st_rps_idx] = *ref_pic_set; + } else { + return OptionalSps(); + } + } + + // long_term_ref_pics_present_flag: u(1) + sps.long_term_ref_pics_present_flag = reader.Read(); + if (sps.long_term_ref_pics_present_flag) { + // num_long_term_ref_pics_sps: ue(v) + sps.num_long_term_ref_pics_sps = reader.ReadExponentialGolomb(); + sps.used_by_curr_pic_lt_sps_flag.resize(sps.num_long_term_ref_pics_sps, 0); + for (uint32_t i = 0; i < sps.num_long_term_ref_pics_sps; i++) { + // lt_ref_pic_poc_lsb_sps: u(v) + uint32_t lt_ref_pic_poc_lsb_sps_bits = + sps.log2_max_pic_order_cnt_lsb_minus4 + 4; + reader.ConsumeBits(lt_ref_pic_poc_lsb_sps_bits); + // used_by_curr_pic_lt_sps_flag: u(1) + sps.used_by_curr_pic_lt_sps_flag[i] = reader.Read(); + } + } + + // sps_temporal_mvp_enabled_flag: u(1) + sps.sps_temporal_mvp_enabled_flag = reader.Read(); + // Far enough! We don't use the rest of the SPS. sps.vps_id = sps_video_parameter_set_id; + sps.pic_width_in_luma_samples = pic_width_in_luma_samples; + sps.pic_height_in_luma_samples = pic_height_in_luma_samples; + // Start with the resolution determined by the pic_width/pic_height fields. sps.width = pic_width_in_luma_samples; sps.height = pic_height_in_luma_samples; if (conformance_window_flag) { - int sub_width_c = ((1 == chroma_format_idc) || (2 == chroma_format_idc)) && - (0 == separate_colour_plane_flag) - ? 2 - : 1; + int sub_width_c = + ((1 == sps.chroma_format_idc) || (2 == sps.chroma_format_idc)) && + (0 == sps.separate_colour_plane_flag) + ? 2 + : 1; int sub_height_c = - (1 == chroma_format_idc) && (0 == separate_colour_plane_flag) ? 2 : 1; + (1 == sps.chroma_format_idc) && (0 == sps.separate_colour_plane_flag) + ? 2 + : 1; // the offset includes the pixel within conformance window. so don't need to // +1 as per spec sps.width -= sub_width_c * (conf_win_right_offset + conf_win_left_offset); sps.height -= sub_height_c * (conf_win_top_offset + conf_win_bottom_offset); } + if (!reader.Ok()) { + return absl::nullopt; + } + return OptionalSps(sps); } diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.h b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.h index a6bc48ccc4bd..56292be40e69 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_sps_parser.h @@ -11,30 +11,58 @@ #ifndef COMMON_VIDEO_H265_H265_SPS_PARSER_H_ #define COMMON_VIDEO_H265_H265_SPS_PARSER_H_ +#include + #include "absl/types/optional.h" +#include "api/array_view.h" +#include "rtc_base/bitstream_reader.h" -namespace webrtc { +namespace rtc { +class BitBuffer; +} -class BitstreamReader; +namespace webrtc { // A class for parsing out sequence parameter set (SPS) data from an H265 NALU. class H265SpsParser { public: + struct ShortTermRefPicSet { + ShortTermRefPicSet(); + + uint32_t inter_ref_pic_set_prediction_flag = 0; + std::vector used_by_curr_pic_flag; + std::vector use_delta_flag; + uint32_t num_negative_pics = 0; + uint32_t num_positive_pics = 0; + std::vector delta_poc_s0_minus1; + std::vector used_by_curr_pic_s0_flag; + std::vector delta_poc_s1_minus1; + std::vector used_by_curr_pic_s1_flag; + }; + // The parsed state of the SPS. Only some select values are stored. // Add more as they are actually needed. struct SpsState { SpsState(); - uint32_t width = 0; - uint32_t height = 0; - uint32_t delta_pic_order_always_zero_flag = 0; + uint32_t sps_max_sub_layers_minus1; + uint32_t chroma_format_idc = 0; uint32_t separate_colour_plane_flag = 0; - uint32_t frame_mbs_only_flag = 0; - uint32_t log2_max_frame_num_minus4 = 0; + uint32_t pic_width_in_luma_samples = 0; + uint32_t pic_height_in_luma_samples = 0; uint32_t log2_max_pic_order_cnt_lsb_minus4 = 0; - uint32_t pic_order_cnt_type = 0; - uint32_t max_num_ref_frames = 0; - uint32_t vui_params_present = 0; + std::vector sps_max_dec_pic_buffering_minus1; + uint32_t log2_min_luma_coding_block_size_minus3 = 0; + uint32_t log2_diff_max_min_luma_coding_block_size = 0; + uint32_t sample_adaptive_offset_enabled_flag = 0; + uint32_t num_short_term_ref_pic_sets = 0; + std::vector short_term_ref_pic_set; + uint32_t long_term_ref_pics_present_flag = 0; + uint32_t num_long_term_ref_pics_sps = 0; + std::vector used_by_curr_pic_lt_sps_flag; + uint32_t sps_temporal_mvp_enabled_flag = 0; + uint32_t width = 0; + uint32_t height = 0; uint32_t id = 0; uint32_t vps_id = 0; }; @@ -42,10 +70,18 @@ class H265SpsParser { // Unpack RBSP and parse SPS state from the supplied buffer. static absl::optional ParseSps(const uint8_t* data, size_t length); + static bool ParseScalingListData(BitstreamReader& reader); + + static absl::optional ParseShortTermRefPicSet( + uint32_t st_rps_idx, uint32_t num_short_term_ref_pic_sets, + const std::vector& ref_pic_sets, + SpsState& sps, BitstreamReader& reader); + protected: - // Parse the SPS state, up till the VUI part, for a bit buffer where RBSP - // decoding has already been performed. - static absl::optional ParseSpsUpToVui(BitstreamReader* buffer); + // Parse the SPS state, for a bit buffer where RBSP decoding has already been + // performed. + static absl::optional ParseSpsInternal( + rtc::ArrayView buffer); }; } // namespace webrtc diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.cc index 99da2ec831e4..4823880a166f 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.cc @@ -8,28 +8,22 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "common_video/h265/h265_vps_parser.h" + #include #include #include "common_video/h265/h265_common.h" -#include "common_video/h265/h265_vps_parser.h" -#include "rtc_base/bitstream_reader.h" +#include "rtc_base/bit_buffer.h" #include "rtc_base/logging.h" -namespace { -typedef absl::optional OptionalVps; - -#define RETURN_EMPTY_ON_FAIL(x) \ - if (!(x)) { \ - return OptionalVps(); \ - } -} // namespace +#include "rtc_base/bitstream_reader.h" namespace webrtc { H265VpsParser::VpsState::VpsState() = default; -// General note: this is based off the 02/2018 version of the H.265 standard. +// General note: this is based off the 06/2019 version of the H.265 standard. // You can find it on this page: // http://www.itu.int/rec/T-REC-H.265 @@ -37,12 +31,13 @@ H265VpsParser::VpsState::VpsState() = default; absl::optional H265VpsParser::ParseVps( const uint8_t* data, size_t length) { - std::vector unpacked_buffer = H265::ParseRbsp(data, length); - BitstreamReader bit_buffer(unpacked_buffer); - return ParseInternal(&bit_buffer); + return ParseInternal(H265::ParseRbsp(data, length)); } -absl::optional H265VpsParser::ParseInternal(BitstreamReader* buffer) { +absl::optional H265VpsParser::ParseInternal( + rtc::ArrayView buffer) { + BitstreamReader reader(buffer); + // Now, we need to use a bit buffer to parse through the actual HEVC VPS // format. See Section 7.3.2.1 ("Video parameter set RBSP syntax") of the // H.265 standard for a complete description. @@ -50,11 +45,13 @@ absl::optional H265VpsParser::ParseInternal(BitstreamRe VpsState vps; // vps_video_parameter_set_id: u(4) - uint32_t vps_video_parameter_set_id = buffer->ReadBits(4); + vps.id = reader.ReadBits(4); + + if (!reader.Ok()) { + return absl::nullopt; + } - vps.id = vps_video_parameter_set_id; - vps.id = 0; - return OptionalVps(vps); + return vps; } } // namespace webrtc diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.h b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.h index 2508a91c6118..a17d0ea8224b 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/common_video/h265/h265_vps_parser.h @@ -12,10 +12,13 @@ #define COMMON_VIDEO_H265_H265_VPS_PARSER_H_ #include "absl/types/optional.h" +#include "api/array_view.h" -namespace webrtc { +namespace rtc { +class BitBuffer; +} -class BitstreamReader; +namespace webrtc { // A class for parsing out sequence parameter set (VPS) data from an H265 NALU. class H265VpsParser { @@ -34,7 +37,8 @@ class H265VpsParser { protected: // Parse the VPS state, for a bit buffer where RBSP decoding has already been // performed. - static absl::optional ParseInternal(BitstreamReader* bit_buffer); + static absl::optional ParseInternal( + rtc::ArrayView buffer); }; } // namespace webrtc diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc index 75f2fdebcf68..ebdb86bee165 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc @@ -261,6 +261,10 @@ VideoCodecType GetRuntimeCodecType(rtclog2::FrameDecodedEvents::Codec codec) { return VideoCodecType::kVideoCodecAV1; case rtclog2::FrameDecodedEvents::CODEC_H264: return VideoCodecType::kVideoCodecH264; +#ifdef WEBRTC_USE_H265 + case rtclog2::FrameDecodedEvents::CODEC_H265: + return VideoCodecType::kVideoCodecH265; +#endif case rtclog2::FrameDecodedEvents::CODEC_UNKNOWN: RTC_LOG(LS_ERROR) << "Unknown codec type. Assuming " "VideoCodecType::kVideoCodecMultiplex"; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.cc index 911f510afef5..2a2c457e283b 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.cc @@ -103,6 +103,7 @@ const char kVp8CodecName[] = "VP8"; const char kVp9CodecName[] = "VP9"; const char kAv1CodecName[] = "AV1"; const char kH264CodecName[] = "H264"; +const char kH265CodecName[] = "H265"; // RFC 6184 RTP Payload Format for H.264 video const char kH264FmtpProfileLevelId[] = "profile-level-id"; @@ -112,6 +113,13 @@ const char kH264FmtpSpropParameterSets[] = "sprop-parameter-sets"; const char kH264FmtpSpsPpsIdrInKeyframe[] = "sps-pps-idr-in-keyframe"; const char kH264ProfileLevelConstrainedBaseline[] = "42e01f"; const char kH264ProfileLevelConstrainedHigh[] = "640c1f"; +#ifdef WEBRTC_USE_H265 +// RFC 7798 RTP Payload Format for H.265 video +const char kH265FmtpProfileSpace[] = "profile-space"; +const char kH265FmtpProfileId[] = "profile-id"; +const char kH265FmtpTierFlag[] = "tier-flag"; +const char kH265FmtpLevelId[] = "level-id"; +#endif const char kVP9ProfileId[] = "profile-id"; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.h b/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.h index ca3213ea0f15..ea4a245eb51f 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/media/base/media_constants.h @@ -123,6 +123,7 @@ RTC_EXPORT extern const char kVp8CodecName[]; RTC_EXPORT extern const char kVp9CodecName[]; RTC_EXPORT extern const char kAv1CodecName[]; RTC_EXPORT extern const char kH264CodecName[]; +RTC_EXPORT extern const char kH265CodecName[]; // RFC 6184 RTP Payload Format for H.264 video RTC_EXPORT extern const char kH264FmtpProfileLevelId[]; @@ -135,6 +136,13 @@ extern const char kH264ProfileLevelConstrainedHigh[]; extern const char kVP9ProfileId[]; +#ifdef WEBRTC_USE_H265 +// RFC 7798 RTP Payload Format for H.265 video +RTC_EXPORT extern const char kH265FmtpProfileSpace[]; +RTC_EXPORT extern const char kH265FmtpProfileId[]; +RTC_EXPORT extern const char kH265FmtpTierFlag[]; +RTC_EXPORT extern const char kH265FmtpLevelId[]; +#endif extern const int kDefaultVideoMaxFramerate; extern const size_t kConferenceMaxNumSpatialLayers; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/media/engine/internal_decoder_factory.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/media/engine/internal_decoder_factory.cc index e761fd60c885..5a7727e8eb59 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/media/engine/internal_decoder_factory.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/media/engine/internal_decoder_factory.cc @@ -48,6 +48,9 @@ std::vector InternalDecoderFactory::GetSupportedFormats() formats.push_back(format); for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) formats.push_back(h264_format); +#ifdef WEBRTC_USE_H265 + formats.push_back(SdpVideoFormat(cricket::kH265CodecName)); +#endif if (kDav1dIsIncluded) { formats.push_back(SdpVideoFormat(cricket::kAv1CodecName)); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc index 50e492a49cda..69ff30a1adf9 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc @@ -13,7 +13,7 @@ #include #include "api/video/video_codec_type.h" -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 #include "modules/rtp_rtcp/source/rtp_format_h265.h" #endif #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" @@ -22,6 +22,7 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" namespace webrtc { @@ -30,9 +31,9 @@ std::unique_ptr CreateVideoRtpDepacketizer( switch (codec) { case kVideoCodecH264: return std::make_unique(); -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case kVideoCodecH265: - return std::make_unique(); + return std::make_unique(); #endif case kVideoCodecVP8: return std::make_unique(); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format.cc index 7550b70f69a9..a07337e69fdc 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format.cc @@ -14,11 +14,13 @@ #include "absl/types/variant.h" #include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "modules/rtp_rtcp/source/rtp_format_h265.h" #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" #include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/rtp_rtcp/source/rtp_format_vp9.h" #include "modules/rtp_rtcp/source/rtp_packetizer_av1.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#include "modules/video_coding/codecs/h265/include/h265_globals.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "rtc_base/checks.h" @@ -43,6 +45,14 @@ std::unique_ptr RtpPacketizer::Create( return std::make_unique(payload, limits, h264.packetization_mode); } +#ifdef WEBRTC_USE_H265 + case kVideoCodecH265: { + const auto& h265 = + absl::get(rtp_video_header.video_type_header); + return absl::make_unique(payload, limits, + h265.packetization_mode); + } +#endif case kVideoCodecVP8: { const auto& vp8 = absl::get(rtp_video_header.video_type_header); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.cc index 9ae7052dc1f1..b354bb84f16e 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.cc @@ -1,11 +1,13 @@ /* * Intel License - * See https://01.org/open-webrtc-toolkit - * This is released under Apache License 2.0 and it is free for both academic and commercial use. */ +#include "modules/rtp_rtcp/source/rtp_format_h265.h" + #include +#include "absl/types/optional.h" +#include "absl/types/variant.h" #include "common_video/h264/h264_common.h" #include "common_video/h265/h265_common.h" #include "common_video/h265/h265_pps_parser.h" @@ -13,10 +15,9 @@ #include "common_video/h265/h265_vps_parser.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtp_format_h265.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/video_coding/codecs/h265/include/h265_globals.h" #include "rtc_base/logging.h" + using namespace rtc; namespace webrtc { @@ -59,8 +60,6 @@ static const size_t kHevcNalHeaderSize = 2; // H.265's FU is constructed of 2-byte payload header, and 1-byte FU header static const size_t kHevcFuHeaderSize = 1; static const size_t kHevcLengthFieldSize = 2; -static const size_t kHevcApHeaderSize = - kHevcNalHeaderSize + kHevcLengthFieldSize; enum HevcNalHdrMasks { kHevcFBit = 0x80, @@ -75,44 +74,20 @@ enum HevcNalHdrMasks { // Bit masks for FU headers. enum HevcFuDefs { kHevcSBit = 0x80, kHevcEBit = 0x40, kHevcFuTypeBit = 0x3F }; -// TODO(pbos): Avoid parsing this here as well as inside the jitter buffer. -bool ParseApStartOffsets(const uint8_t* nalu_ptr, - size_t length_remaining, - std::vector* offsets) { - size_t offset = 0; - while (length_remaining > 0) { - // Buffer doesn't contain room for additional nalu length. - if (length_remaining < sizeof(uint16_t)) - return false; - uint16_t nalu_size = ByteReader::ReadBigEndian(nalu_ptr); - nalu_ptr += sizeof(uint16_t); - length_remaining -= sizeof(uint16_t); - if (nalu_size > length_remaining) - return false; - nalu_ptr += nalu_size; - length_remaining -= nalu_size; - - offsets->push_back(offset + kHevcApHeaderSize); - offset += kHevcLengthFieldSize + nalu_size; - } - return true; -} - } // namespace -RtpPacketizerH265::RtpPacketizerH265( - rtc::ArrayView payload, - PayloadSizeLimits limits, - H265PacketizationMode packetization_mode) - : limits_(limits), - num_packets_left_(0) { +RtpPacketizerH265::RtpPacketizerH265(rtc::ArrayView payload, + PayloadSizeLimits limits, + H265PacketizationMode packetization_mode) + : limits_(limits), num_packets_left_(0) { // Guard against uninitialized memory in packetization_mode. RTC_CHECK(packetization_mode == H265PacketizationMode::NonInterleaved || packetization_mode == H265PacketizationMode::SingleNalUnit); for (const auto& nalu : - H265::FindNaluIndices(payload.data(), payload.size())) { - input_fragments_.push_back(Fragment(payload.data() + nalu.payload_start_offset, nalu.payload_size)); + H264::FindNaluIndices(payload.data(), payload.size())) { + input_fragments_.push_back( + payload.subview(nalu.payload_start_offset, nalu.payload_size)); } if (!GeneratePackets(packetization_mode)) { @@ -132,18 +107,12 @@ size_t RtpPacketizerH265::NumPackets() const { return num_packets_left_; } -RtpPacketizerH265::Fragment::Fragment(const uint8_t* buffer, size_t length) - : buffer(buffer), length(length) {} -RtpPacketizerH265::Fragment::Fragment(const Fragment& fragment) - : buffer(fragment.buffer), length(fragment.length) {} - - bool RtpPacketizerH265::GeneratePackets( H265PacketizationMode packetization_mode) { // For HEVC we follow non-interleaved mode for the packetization, // and don't support single-nalu mode at present. for (size_t i = 0; i < input_fragments_.size();) { - int fragment_len = input_fragments_[i].length; + int fragment_len = input_fragments_[i].size(); int single_packet_capacity = limits_.max_payload_len; if (input_fragments_.size() == 1) single_packet_capacity -= limits_.single_packet_reduction_len; @@ -168,7 +137,7 @@ bool RtpPacketizerH265::GeneratePackets( bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) { // Fragment payload into packets (FU). // Strip out the original header and leave room for the FU header. - const Fragment& fragment = input_fragments_[fragment_index]; + rtc::ArrayView fragment = input_fragments_[fragment_index]; PayloadSizeLimits limits = limits_; limits.max_payload_len -= kHevcFuHeaderSize + kHevcNalHeaderSize; @@ -191,7 +160,7 @@ bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) { limits.last_packet_reduction_len = 0; // Strip out the original header. - size_t payload_left = fragment.length - kHevcNalHeaderSize; + size_t payload_left = fragment.size() - kHevcNalHeaderSize; int offset = kHevcNalHeaderSize; std::vector payload_sizes = SplitAboutEqually(payload_left, limits); @@ -201,8 +170,8 @@ bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) { for (size_t i = 0; i < payload_sizes.size(); ++i) { int packet_length = payload_sizes[i]; RTC_CHECK_GT(packet_length, 0); - uint16_t header = (fragment.buffer[0] << 8) | fragment.buffer[1]; - packets_.push(PacketUnit(Fragment(fragment.buffer + offset, packet_length), + uint16_t header = (fragment[0] << 8) | fragment[1]; + packets_.push(PacketUnit(fragment.subview(offset, packet_length), /*first_fragment=*/i == 0, /*last_fragment=*/i == payload_sizes.size() - 1, false, header)); @@ -214,7 +183,6 @@ bool RtpPacketizerH265::PacketizeFu(size_t fragment_index) { return true; } - bool RtpPacketizerH265::PacketizeSingleNalu(size_t fragment_index) { // Add a single NALU to the queue, no aggregation. size_t payload_size_left = limits_.max_payload_len; @@ -224,18 +192,18 @@ bool RtpPacketizerH265::PacketizeSingleNalu(size_t fragment_index) { payload_size_left -= limits_.first_packet_reduction_len; else if (fragment_index + 1 == input_fragments_.size()) payload_size_left -= limits_.last_packet_reduction_len; - const Fragment* fragment = &input_fragments_[fragment_index]; - if (payload_size_left < fragment->length) { + rtc::ArrayView fragment = input_fragments_[fragment_index]; + if (payload_size_left < fragment.size()) { RTC_LOG(LS_ERROR) << "Failed to fit a fragment to packet in SingleNalu " "packetization mode. Payload size left " << payload_size_left << ", fragment length " - << fragment->length << ", packet capacity " + << fragment.size() << ", packet capacity " << limits_.max_payload_len; return false; } - RTC_CHECK_GT(fragment->length, 0u); - packets_.push(PacketUnit(*fragment, true /* first */, true /* last */, - false /* aggregated */, fragment->buffer[0])); + RTC_CHECK_GT(fragment.size(), 0u); + packets_.push(PacketUnit(fragment, true /* first */, true /* last */, + false /* aggregated */, fragment[0])); ++num_packets_left_; return true; } @@ -249,12 +217,12 @@ int RtpPacketizerH265::PacketizeAp(size_t fragment_index) { payload_size_left -= limits_.first_packet_reduction_len; int aggregated_fragments = 0; size_t fragment_headers_length = 0; - const Fragment* fragment = &input_fragments_[fragment_index]; - RTC_CHECK_GE(payload_size_left, fragment->length); + rtc::ArrayView fragment = input_fragments_[fragment_index]; + RTC_CHECK_GE(payload_size_left, fragment.size()); ++num_packets_left_; auto payload_size_needed = [&] { - size_t fragment_size = fragment->length + fragment_headers_length; + size_t fragment_size = fragment.size() + fragment_headers_length; if (input_fragments_.size() == 1) { // Single fragment, single packet, payload_size_left already adjusted // with limits_.single_packet_reduction_len. @@ -268,10 +236,10 @@ int RtpPacketizerH265::PacketizeAp(size_t fragment_index) { }; while (payload_size_left >= payload_size_needed()) { - RTC_CHECK_GT(fragment->length, 0); - packets_.push(PacketUnit(*fragment, aggregated_fragments == 0, false, true, - fragment->buffer[0])); - payload_size_left -= fragment->length; + RTC_CHECK_GT(fragment.size(), 0); + packets_.push(PacketUnit(fragment, aggregated_fragments == 0, false, true, + fragment[0])); + payload_size_left -= fragment.size(); payload_size_left -= fragment_headers_length; fragment_headers_length = kHevcLengthFieldSize; @@ -286,7 +254,7 @@ int RtpPacketizerH265::PacketizeAp(size_t fragment_index) { ++fragment_index; if (fragment_index == input_fragments_.size()) break; - fragment = &input_fragments_[fragment_index]; + fragment = input_fragments_[fragment_index]; } RTC_CHECK_GT(aggregated_fragments, 0); packets_.back().last_fragment = true; @@ -304,9 +272,9 @@ bool RtpPacketizerH265::NextPacket(RtpPacketToSend* rtp_packet) { if (packet.first_fragment && packet.last_fragment) { // Single NAL unit packet. - size_t bytes_to_send = packet.source_fragment.length; + size_t bytes_to_send = packet.source_fragment.size(); uint8_t* buffer = rtp_packet->AllocatePayload(bytes_to_send); - memcpy(buffer, packet.source_fragment.buffer, bytes_to_send); + memcpy(buffer, packet.source_fragment.data(), bytes_to_send); packets_.pop(); input_fragments_.pop_front(); } else if (packet.aggregated) { @@ -341,12 +309,12 @@ void RtpPacketizerH265::NextAggregatePacket(RtpPacketToSend* rtp_packet, bool is_last_fragment = packet->last_fragment; while (packet->aggregated) { // Add NAL unit length field. - const Fragment& fragment = packet->source_fragment; - ByteWriter::WriteBigEndian(&buffer[index], fragment.length); + rtc::ArrayView fragment = packet->source_fragment; + ByteWriter::WriteBigEndian(&buffer[index], fragment.size()); index += kHevcLengthFieldSize; // Add NAL unit. - memcpy(&buffer[index], fragment.buffer, fragment.length); - index += fragment.length; + memcpy(&buffer[index], fragment.data(), fragment.size()); + index += fragment.size(); packets_.pop(); input_fragments_.pop_front(); if (is_last_fragment) @@ -376,273 +344,22 @@ void RtpPacketizerH265::NextFragmentPacket(RtpPacketToSend* rtp_packet) { // Now update payload_hdr_h with FU type. payload_hdr_h = (payload_hdr_h & kHevcTypeMaskN) | (kHevcFu << 1) | layer_id_h; - const Fragment& fragment = packet->source_fragment; + rtc::ArrayView fragment = packet->source_fragment; uint8_t* buffer = rtp_packet->AllocatePayload( - kHevcFuHeaderSize + kHevcNalHeaderSize + fragment.length); + kHevcFuHeaderSize + kHevcNalHeaderSize + fragment.size()); + RTC_CHECK(buffer); buffer[0] = payload_hdr_h; buffer[1] = payload_hdr_l; buffer[2] = fu_header; if (packet->last_fragment) { - memcpy(buffer + kHevcFuHeaderSize + kHevcNalHeaderSize, fragment.buffer, - fragment.length); + memcpy(buffer + kHevcFuHeaderSize + kHevcNalHeaderSize, fragment.data(), + fragment.size()); } else { - memcpy(buffer + kHevcFuHeaderSize + kHevcNalHeaderSize, fragment.buffer, - fragment.length); - } + memcpy(buffer + kHevcFuHeaderSize + kHevcNalHeaderSize, fragment.data(), + fragment.size()); + } packets_.pop(); } -absl::optional VideoRtpDepacketizerH265::Parse( - rtc::CopyOnWriteBuffer rtp_payload) { - size_t payload_data_length = rtp_payload.size(); - if (payload_data_length == 0) { - RTC_LOG(LS_ERROR) << "Empty payload."; - return absl::nullopt; - } - - ParsedRtpPayload parsed_payload; - - const uint8_t* payload_data = rtp_payload.data(); - - offset_ = 0; - length_ = payload_data_length; - modified_buffer_.reset(); - - uint8_t nal_type = (payload_data[0] & kHevcTypeMask) >> 1; - parsed_payload.video_header - .video_type_header.emplace(); - - if (nal_type == H265::NaluType::kFU) { - // Fragmented NAL units (FU-A). - if (!ParseFuNalu(&parsed_payload, payload_data)) - return absl::nullopt; - } else { - // We handle STAP-A and single NALU's the same way here. The jitter buffer - // will depacketize the STAP-A into NAL units later. - // TODO(sprang): Parse STAP-A offsets here and store in fragmentation vec. - if (!ProcessApOrSingleNalu(&parsed_payload, payload_data)) - return absl::nullopt; - } - - const uint8_t* payload = - modified_buffer_ ? modified_buffer_->data() : payload_data; - - parsed_payload.video_payload = { payload + offset_, length_ }; - return parsed_payload; -} - -bool VideoRtpDepacketizerH265::ProcessApOrSingleNalu( - ParsedRtpPayload* parsed_payload, - const uint8_t* payload_data) { - parsed_payload->video_header.width = 0; - parsed_payload->video_header.height = 0; - parsed_payload->video_header.codec = kVideoCodecH265; - parsed_payload->video_header.is_first_packet_in_frame = true; - auto& h265_header = absl::get( - parsed_payload->video_header.video_type_header); - - const uint8_t* nalu_start = payload_data + kHevcNalHeaderSize; - const size_t nalu_length = length_ - kHevcNalHeaderSize; - uint8_t nal_type = (payload_data[0] & kHevcTypeMask) >> 1; - std::vector nalu_start_offsets; - if (nal_type == H265::NaluType::kAP) { - // Skip the StapA header (StapA NAL type + length). - if (length_ <= kHevcApHeaderSize) { - RTC_LOG(LS_ERROR) << "AP header truncated."; - return false; - } - - if (!ParseApStartOffsets(nalu_start, nalu_length, &nalu_start_offsets)) { - RTC_LOG(LS_ERROR) << "AP packet with incorrect NALU packet lengths."; - return false; - } - - h265_header.packetization_type = kH265AP; - // nal_type = (payload_data[kHevcApHeaderSize] & kHevcTypeMask) >> 1; - } else { - h265_header.packetization_type = kH265SingleNalu; - nalu_start_offsets.push_back(0); - } - h265_header.nalu_type = nal_type; - parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; - - nalu_start_offsets.push_back(length_ + kHevcLengthFieldSize); // End offset. - for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) { - size_t start_offset = nalu_start_offsets[i]; - // End offset is actually start offset for next unit, excluding length field - // so remove that from this units length. - size_t end_offset = nalu_start_offsets[i + 1] - kHevcLengthFieldSize; - if (end_offset - start_offset < kHevcNalHeaderSize) { // Same as H.264. - RTC_LOG(LS_ERROR) << "AP packet too short"; - return false; - } - - H265NaluInfo nalu; - nalu.type = (payload_data[start_offset] & kHevcTypeMask) >> 1; - nalu.vps_id = -1; - nalu.sps_id = -1; - nalu.pps_id = -1; - start_offset += kHevcNalHeaderSize; - switch (nalu.type) { - case H265::NaluType::kVps: { - absl::optional vps = H265VpsParser::ParseVps( - &payload_data[start_offset], end_offset - start_offset); - if (vps) { - nalu.vps_id = vps->id; - } else { - RTC_LOG(LS_WARNING) << "Failed to parse VPS id from VPS slice."; - } - break; - } - case H265::NaluType::kSps: { - // Check if VUI is present in SPS and if it needs to be modified to - // avoid excessive decoder latency. - - // Copy any previous data first (likely just the first header). - std::unique_ptr output_buffer(new rtc::Buffer()); - if (start_offset) - output_buffer->AppendData(payload_data, start_offset); - - absl::optional sps = H265SpsParser::ParseSps( - &payload_data[start_offset], end_offset - start_offset); - - if (sps) { - parsed_payload->video_header.width = sps->width; - parsed_payload->video_header.height = sps->height; - nalu.sps_id = sps->id; - nalu.vps_id = sps->vps_id; - } else { - RTC_LOG(LS_WARNING) - << "Failed to parse SPS and VPS id from SPS slice."; - } - parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; - break; - } - case H265::NaluType::kPps: { - uint32_t pps_id; - uint32_t sps_id; - if (H265PpsParser::ParsePpsIds(&payload_data[start_offset], - end_offset - start_offset, &pps_id, - &sps_id)) { - nalu.pps_id = pps_id; - nalu.sps_id = sps_id; - } else { - RTC_LOG(LS_WARNING) - << "Failed to parse PPS id and SPS id from PPS slice."; - } - break; - } - case H265::NaluType::kIdrWRadl: - case H265::NaluType::kIdrNLp: - case H265::NaluType::kCra: - parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; - case H265::NaluType::kTrailN: - case H265::NaluType::kTrailR: { - absl::optional pps_id = - H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( - &payload_data[start_offset], end_offset - start_offset, - nalu.type); - if (pps_id) { - nalu.pps_id = *pps_id; - } else { - RTC_LOG(LS_WARNING) << "Failed to parse PPS id from slice of type: " - << static_cast(nalu.type); - } - break; - } - // Slices below don't contain SPS or PPS ids. - case H265::NaluType::kAud: - case H265::NaluType::kTsaN: - case H265::NaluType::kTsaR: - case H265::NaluType::kStsaN: - case H265::NaluType::kStsaR: - case H265::NaluType::kRadlN: - case H265::NaluType::kRadlR: - case H265::NaluType::kBlaWLp: - case H265::NaluType::kBlaWRadl: - case H265::NaluType::kPrefixSei: - case H265::NaluType::kSuffixSei: - break; - case H265::NaluType::kAP: - case H265::NaluType::kFU: - RTC_LOG(LS_WARNING) << "Unexpected AP or FU received."; - return false; - } - - if (h265_header.nalus_length == kMaxNalusPerPacket) { - RTC_LOG(LS_WARNING) - << "Received packet containing more than " << kMaxNalusPerPacket - << " NAL units. Will not keep track sps and pps ids for all of them."; - } else { - h265_header.nalus[h265_header.nalus_length++] = nalu; - } - } - return true; -} - -bool VideoRtpDepacketizerH265::ParseFuNalu( - ParsedRtpPayload* parsed_payload, - const uint8_t* payload_data) { - if (length_ < kHevcFuHeaderSize + kHevcNalHeaderSize) { - RTC_LOG(LS_ERROR) << "FU NAL units truncated."; - return false; - } - uint8_t f = payload_data[0] & kHevcFBit; - uint8_t layer_id_h = payload_data[0] & kHevcLayerIDHMask; - uint8_t layer_id_l_unshifted = payload_data[1] & kHevcLayerIDLMask; - uint8_t tid = payload_data[1] & kHevcTIDMask; - - uint8_t original_nal_type = payload_data[2] & kHevcTypeMaskInFuHeader; - bool first_fragment = payload_data[2] & kHevcSBit; - H265NaluInfo nalu; - nalu.type = original_nal_type; - nalu.vps_id = -1; - nalu.sps_id = -1; - nalu.pps_id = -1; - if (first_fragment) { - offset_ = 1; - length_ -= 1; - absl::optional pps_id = - H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( - payload_data + kHevcNalHeaderSize + kHevcFuHeaderSize, - length_ - kHevcFuHeaderSize, nalu.type); - if (pps_id) { - nalu.pps_id = *pps_id; - } else { - RTC_LOG(LS_WARNING) - << "Failed to parse PPS from first fragment of FU NAL " - "unit with original type: " - << static_cast(nalu.type); - } - uint8_t* payload = const_cast(payload_data + offset_); - payload[0] = f | original_nal_type << 1 | layer_id_h; - payload[1] = layer_id_l_unshifted | tid; - } else { - offset_ = kHevcNalHeaderSize + kHevcFuHeaderSize; - length_ -= (kHevcNalHeaderSize + kHevcFuHeaderSize); - } - - if (original_nal_type == H265::NaluType::kIdrWRadl - || original_nal_type == H265::NaluType::kIdrNLp - || original_nal_type == H265::NaluType::kCra) { - parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; - } else { - parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; - } - parsed_payload->video_header.width = 0; - parsed_payload->video_header.height = 0; - parsed_payload->video_header.codec = kVideoCodecH265; - parsed_payload->video_header.is_first_packet_in_frame = first_fragment; - auto& h265_header = absl::get( - parsed_payload->video_header.video_type_header); - h265_header.packetization_type = kH265FU; - h265_header.nalu_type = original_nal_type; - if (first_fragment) { - h265_header.nalus[h265_header.nalus_length] = nalu; - h265_header.nalus_length = 1; - } - return true; -} - } // namespace webrtc diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h index 70ff0ffb797f..164895e1ae03 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_format_h265.h @@ -1,20 +1,24 @@ /* - * Intel License - * See https://01.org/open-webrtc-toolkit - * This is released under Apache License 2.0 and it is free for both academic and commercial use. + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. */ #ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H265_H_ #define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H265_H_ +#include #include #include + #include "api/array_view.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" -#include "modules/rtp_rtcp/source/rtp_format.h" -#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/video_coding/codecs/h265/include/h265_globals.h" #include "rtc_base/buffer.h" @@ -28,7 +32,10 @@ class RtpPacketizerH265 : public RtpPacketizer { PayloadSizeLimits limits, H265PacketizationMode packetization_mode); - ~RtpPacketizerH265() override; + ~RtpPacketizerH265() override; + + RtpPacketizerH265(const RtpPacketizerH265&) = delete; + RtpPacketizerH265& operator=(const RtpPacketizerH265&) = delete; size_t NumPackets() const override; @@ -63,15 +70,8 @@ class RtpPacketizerH265 : public RtpPacketizer { bool aggregated; uint16_t header; // Different from H264 }; - struct Fragment { - Fragment(const uint8_t* buffer, size_t length); - explicit Fragment(const Fragment& fragment); - const uint8_t* buffer = nullptr; - size_t length = 0; - std::unique_ptr tmp_buffer; - }; struct PacketUnit { - PacketUnit(const Fragment& source_fragment, + PacketUnit(rtc::ArrayView source_fragment, bool first_fragment, bool last_fragment, bool aggregated, @@ -82,14 +82,14 @@ class RtpPacketizerH265 : public RtpPacketizer { aggregated(aggregated), header(header) {} - const Fragment source_fragment; + rtc::ArrayView source_fragment; bool first_fragment; bool last_fragment; bool aggregated; uint16_t header; }; typedef std::queue PacketQueue; - std::deque input_fragments_; + std::deque> input_fragments_; std::queue packets_; bool GeneratePackets(H265PacketizationMode packetization_mode); @@ -103,24 +103,5 @@ class RtpPacketizerH265 : public RtpPacketizer { const PayloadSizeLimits limits_; size_t num_packets_left_; }; - -// Depacketizer for H.265. -class VideoRtpDepacketizerH265 : public VideoRtpDepacketizer { - public: - virtual ~VideoRtpDepacketizerH265() {} - - absl::optional Parse( - rtc::CopyOnWriteBuffer rtp_payload) override; - - private: - bool ParseFuNalu(ParsedRtpPayload* parsed_payload, - const uint8_t* payload_data); - bool ProcessApOrSingleNalu(ParsedRtpPayload* parsed_payload, - const uint8_t* payload_data); - - size_t offset_; - size_t length_; - std::unique_ptr modified_buffer_; -}; } // namespace webrtc #endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H265_H_ diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index c30e8e4e4df2..dd70b2650733 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -836,7 +836,7 @@ uint8_t RTPSenderVideo::GetTemporalId(const RTPVideoHeader& header) { return vp9.temporal_idx; } uint8_t operator()(const RTPVideoHeaderH264&) { return kNoTemporalIdx; } -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 uint8_t operator()(const RTPVideoHeaderH265&) { return kNoTemporalIdx; } #endif uint8_t operator()(const RTPVideoHeaderLegacyGeneric&) { diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_video_header.h b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_video_header.h index f47f9fe8eebc..3f1fe7aaed28 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_video_header.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/rtp_video_header.h @@ -26,7 +26,7 @@ #include "api/video/video_rotation.h" #include "api/video/video_timing.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 #include "modules/video_coding/codecs/h265/include/h265_globals.h" #endif #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" @@ -44,7 +44,7 @@ using RTPVideoTypeHeader = absl::variant; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc new file mode 100644 index 000000000000..8ca1b433b212 --- /dev/null +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.cc @@ -0,0 +1,354 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifdef WEBRTC_USE_H265 +#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h" + +#include +#include +#include +#include + +#include "absl/base/macros.h" +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "common_video/h264/h264_common.h" +#include "common_video/h265/h265_common.h" +#include "common_video/h265/h265_pps_parser.h" +#include "common_video/h265/h265_sps_parser.h" +#include "common_video/h265/h265_vps_parser.h" +#include "modules/rtp_rtcp/source/byte_io.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/logging.h" + +namespace webrtc { +namespace { + +enum NaluType { + kTrailN = 0, + kTrailR = 1, + kTsaN = 2, + kTsaR = 3, + kStsaN = 4, + kStsaR = 5, + kRadlN = 6, + kRadlR = 7, + kBlaWLp = 16, + kBlaWRadl = 17, + kBlaNLp = 18, + kIdrWRadl = 19, + kIdrNLp = 20, + kCra = 21, + kVps = 32, + kHevcSps = 33, + kHevcPps = 34, + kHevcAud = 35 +}; + +/* + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | PayloadHdr (Type=49) | FU header | DONL (cond) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-| +*/ +// Unlike H.264, HEVC NAL header is 2-bytes. +static const size_t kHevcNalHeaderSize = 2; +// H.265's FU is constructed of 2-byte payload header, and 1-byte FU header +static const size_t kHevcFuHeaderSize = 1; +static const size_t kHevcLengthFieldSize = 2; +static const size_t kHevcApHeaderSize = + kHevcNalHeaderSize + kHevcLengthFieldSize; + +enum HevcNalHdrMasks { + kHevcFBit = 0x80, + kHevcTypeMask = 0x7E, + kHevcLayerIDHMask = 0x1, + kHevcLayerIDLMask = 0xF8, + kHevcTIDMask = 0x7, + kHevcTypeMaskN = 0x81, + kHevcTypeMaskInFuHeader = 0x3F +}; + +// Bit masks for FU headers. +enum HevcFuDefs { kHevcSBit = 0x80, kHevcEBit = 0x40, kHevcFuTypeBit = 0x3F }; + +// TODO(pbos): Avoid parsing this here as well as inside the jitter buffer. +bool ParseApStartOffsets(const uint8_t* nalu_ptr, + size_t length_remaining, + std::vector* offsets) { + size_t offset = 0; + while (length_remaining > 0) { + // Buffer doesn't contain room for additional nalu length. + if (length_remaining < sizeof(uint16_t)) + return false; + uint16_t nalu_size = ByteReader::ReadBigEndian(nalu_ptr); + nalu_ptr += sizeof(uint16_t); + length_remaining -= sizeof(uint16_t); + if (nalu_size > length_remaining) + return false; + nalu_ptr += nalu_size; + length_remaining -= nalu_size; + + offsets->push_back(offset + kHevcApHeaderSize); + offset += kHevcLengthFieldSize + nalu_size; + } + return true; +} + +absl::optional ProcessApOrSingleNalu( + rtc::CopyOnWriteBuffer rtp_payload) { + const uint8_t* const payload_data = rtp_payload.cdata(); + absl::optional parsed_payload( + absl::in_place); + parsed_payload->video_payload = rtp_payload; + parsed_payload->video_header.width = 0; + parsed_payload->video_header.height = 0; + parsed_payload->video_header.codec = kVideoCodecH265; + parsed_payload->video_header.is_first_packet_in_frame = true; + auto& h265_header = parsed_payload->video_header.video_type_header + .emplace(); + + const uint8_t* nalu_start = payload_data + kHevcNalHeaderSize; + const size_t nalu_length = rtp_payload.size() - kHevcNalHeaderSize; + uint8_t nal_type = (payload_data[0] & kHevcTypeMask) >> 1; + std::vector nalu_start_offsets; + if (nal_type == H265::NaluType::kAP) { + // Skip the StapA header (StapA NAL type + length). + if (rtp_payload.size() <= kHevcApHeaderSize) { + RTC_LOG(LS_ERROR) << "AP header truncated."; + return absl::nullopt; + } + + if (!ParseApStartOffsets(nalu_start, nalu_length, &nalu_start_offsets)) { + RTC_LOG(LS_ERROR) << "AP packet with incorrect NALU packet lengths."; + return absl::nullopt; + } + + h265_header.packetization_type = kH265AP; + // nal_type = (payload_data[kHevcApHeaderSize] & kHevcTypeMask) >> 1; + } else { + h265_header.packetization_type = kH265SingleNalu; + nalu_start_offsets.push_back(0); + } + h265_header.nalu_type = nal_type; + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + + nalu_start_offsets.push_back(rtp_payload.size() + + kHevcLengthFieldSize); // End offset. + for (size_t i = 0; i < nalu_start_offsets.size() - 1; ++i) { + size_t start_offset = nalu_start_offsets[i]; + // End offset is actually start offset for next unit, excluding length field + // so remove that from this units length. + size_t end_offset = nalu_start_offsets[i + 1] - kHevcLengthFieldSize; + if (end_offset - start_offset < kHevcNalHeaderSize) { // Same as H.264. + RTC_LOG(LS_ERROR) << "AP packet too short"; + return absl::nullopt; + } + + H265NaluInfo nalu; + nalu.type = (payload_data[start_offset] & kHevcTypeMask) >> 1; + nalu.vps_id = -1; + nalu.sps_id = -1; + nalu.pps_id = -1; + start_offset += kHevcNalHeaderSize; + switch (nalu.type) { + case H265::NaluType::kVps: { + absl::optional vps = H265VpsParser::ParseVps( + &payload_data[start_offset], end_offset - start_offset); + if (vps) { + nalu.vps_id = vps->id; + } else { + RTC_LOG(LS_WARNING) << "Failed to parse VPS id from VPS slice."; + } + break; + } + case H265::NaluType::kSps: { + // TODO: Check if VUI is present in SPS and if it needs to be modified + // to avoid excessive decoder latency. + + // Copy any previous data first (likely just the first header). + std::unique_ptr output_buffer(new rtc::Buffer()); + if (start_offset) + output_buffer->AppendData(payload_data, start_offset); + + absl::optional sps = H265SpsParser::ParseSps( + &payload_data[start_offset], end_offset - start_offset); + + if (sps) { + parsed_payload->video_header.width = sps->width; + parsed_payload->video_header.height = sps->height; + nalu.sps_id = sps->id; + nalu.vps_id = sps->vps_id; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse SPS and VPS id from SPS slice."; + } + parsed_payload->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + break; + } + case H265::NaluType::kPps: { + uint32_t pps_id; + uint32_t sps_id; + if (H265PpsParser::ParsePpsIds(&payload_data[start_offset], + end_offset - start_offset, &pps_id, + &sps_id)) { + nalu.pps_id = pps_id; + nalu.sps_id = sps_id; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse PPS id and SPS id from PPS slice."; + } + break; + } + case H265::NaluType::kIdrWRadl: + case H265::NaluType::kIdrNLp: + case H265::NaluType::kCra: + parsed_payload->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + ABSL_FALLTHROUGH_INTENDED; + case H265::NaluType::kTrailN: + case H265::NaluType::kTrailR: { + absl::optional pps_id = + H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( + &payload_data[start_offset], end_offset - start_offset, + nalu.type); + if (pps_id) { + nalu.pps_id = *pps_id; + } else { + RTC_LOG(LS_WARNING) << "Failed to parse PPS id from slice of type: " + << static_cast(nalu.type); + } + break; + } + // Slices below don't contain SPS or PPS ids. + case H265::NaluType::kAud: + case H265::NaluType::kTsaN: + case H265::NaluType::kTsaR: + case H265::NaluType::kStsaN: + case H265::NaluType::kStsaR: + case H265::NaluType::kRadlN: + case H265::NaluType::kRadlR: + case H265::NaluType::kBlaWLp: + case H265::NaluType::kBlaWRadl: + case H265::NaluType::kPrefixSei: + case H265::NaluType::kSuffixSei: + break; + case H265::NaluType::kAP: + case H265::NaluType::kFU: + RTC_LOG(LS_WARNING) << "Unexpected AP or FU received."; + return absl::nullopt; + } + + if (h265_header.nalus_length == kMaxNalusPerPacket) { + RTC_LOG(LS_WARNING) + << "Received packet containing more than " << kMaxNalusPerPacket + << " NAL units. Will not keep track sps and pps ids for all of them."; + } else { + h265_header.nalus[h265_header.nalus_length++] = nalu; + } + } + return parsed_payload; +} + +absl::optional ParseFuNalu( + rtc::CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.size() < kHevcFuHeaderSize + kHevcNalHeaderSize) { + RTC_LOG(LS_ERROR) << "FU-A NAL units truncated."; + return absl::nullopt; + } + absl::optional parsed_payload( + absl::in_place); + + uint8_t f = rtp_payload.cdata()[0] & kHevcFBit; + uint8_t layer_id_h = rtp_payload.cdata()[0] & kHevcLayerIDHMask; + uint8_t layer_id_l_unshifted = rtp_payload.cdata()[1] & kHevcLayerIDLMask; + uint8_t tid = rtp_payload.cdata()[1] & kHevcTIDMask; + + uint8_t original_nal_type = rtp_payload.cdata()[2] & kHevcTypeMaskInFuHeader; + bool first_fragment = rtp_payload.cdata()[2] & kHevcSBit; + H265NaluInfo nalu; + nalu.type = original_nal_type; + nalu.vps_id = -1; + nalu.sps_id = -1; + nalu.pps_id = -1; + if (first_fragment) { + absl::optional pps_id = + H265PpsParser::ParsePpsIdFromSliceSegmentLayerRbsp( + rtp_payload.cdata() + kHevcNalHeaderSize + kHevcFuHeaderSize, + rtp_payload.size() - kHevcFuHeaderSize, nalu.type); + if (pps_id) { + nalu.pps_id = *pps_id; + } else { + RTC_LOG(LS_WARNING) + << "Failed to parse PPS from first fragment of FU NAL " + "unit with original type: " + << static_cast(nalu.type); + } + rtp_payload = rtp_payload.Slice(1, rtp_payload.size() - 1); + rtp_payload.MutableData()[0] = f | original_nal_type << 1 | layer_id_h; + rtp_payload.MutableData()[1] = layer_id_l_unshifted | tid; + parsed_payload->video_payload = std::move(rtp_payload); + } else { + parsed_payload->video_payload = rtp_payload.Slice( + kHevcNalHeaderSize + kHevcFuHeaderSize, + rtp_payload.size() - kHevcNalHeaderSize - kHevcFuHeaderSize); + } + + if (original_nal_type == H265::NaluType::kIdrWRadl || + original_nal_type == H265::NaluType::kIdrNLp || + original_nal_type == H265::NaluType::kCra) { + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; + } else { + parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameDelta; + } + parsed_payload->video_header.width = 0; + parsed_payload->video_header.height = 0; + parsed_payload->video_header.codec = kVideoCodecH265; + parsed_payload->video_header.is_first_packet_in_frame = first_fragment; + auto& h265_header = parsed_payload->video_header.video_type_header + .emplace(); + h265_header.packetization_type = kH265FU; + h265_header.nalu_type = original_nal_type; + if (first_fragment) { + h265_header.nalus[h265_header.nalus_length] = nalu; + h265_header.nalus_length = 1; + } + return parsed_payload; +} + +} // namespace + +absl::optional +VideoRtpDepacketizerH265::Parse(rtc::CopyOnWriteBuffer rtp_payload) { + if (rtp_payload.size() == 0) { + RTC_LOG(LS_ERROR) << "Empty payload."; + return absl::nullopt; + } + + uint8_t nal_type = (rtp_payload.cdata()[0] & kHevcTypeMask) >> 1; + + if (nal_type == H265::NaluType::kFU) { + // Fragmented NAL units (FU-A). + return ParseFuNalu(std::move(rtp_payload)); + } else { + // We handle STAP-A and single NALU's the same way here. The jitter buffer + // will depacketize the STAP-A into NAL units later. + // TODO(sprang): Parse STAP-A offsets here and store in fragmentation vec. + return ProcessApOrSingleNalu(std::move(rtp_payload)); + } +} + +} // namespace webrtc +#endif diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h new file mode 100644 index 000000000000..4ae90cb6fe42 --- /dev/null +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h265.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ +#define MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ + +#include "absl/types/optional.h" +#include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { +class VideoRtpDepacketizerH265 : public VideoRtpDepacketizer { + public: + ~VideoRtpDepacketizerH265() override = default; + + absl::optional Parse( + rtc::CopyOnWriteBuffer rtp_payload) override; +}; +} // namespace webrtc + +#endif // MODULES_RTP_RTCP_SOURCE_VIDEO_RTP_DEPACKETIZER_H265_H_ diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/codecs/h265/include/h265_globals.h b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/codecs/h265/include/h265_globals.h index bc0eef236cfe..df577722aacf 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/codecs/h265/include/h265_globals.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/codecs/h265/include/h265_globals.h @@ -14,8 +14,6 @@ #ifndef MODULES_VIDEO_CODING_CODECS_H265_INCLUDE_H265_GLOBALS_H_ #define MODULES_VIDEO_CODING_CODECS_H265_INCLUDE_H265_GLOBALS_H_ -#ifndef DISABLE_H265 - #include "modules/video_coding/codecs/h264/include/h264_globals.h" namespace webrtc { @@ -57,6 +55,4 @@ struct RTPVideoHeaderH265 { } // namespace webrtc -#endif - #endif // MODULES_VIDEO_CODING_CODECS_H265_INCLUDE_H265_GLOBALS_H_ diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/encoded_frame.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/encoded_frame.cc index 637a20cfc9ed..34cb1af2ade6 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/encoded_frame.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/encoded_frame.cc @@ -140,6 +140,12 @@ void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) { _codecSpecificInfo.codecType = kVideoCodecAV1; break; } +#ifdef WEBRTC_USE_H265 + case kVideoCodecH265: { + _codecSpecificInfo.codecType = kVideoCodecH265; + break; + } +#endif default: { _codecSpecificInfo.codecType = kVideoCodecGeneric; break; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.cc index fefdb29e1a0a..fc865e205a45 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.cc @@ -7,12 +7,13 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ - #include "modules/video_coding/h265_vps_sps_pps_tracker.h" +#include #include #include +#include "absl/types/variant.h" #include "common_video/h264/h264_common.h" #include "common_video/h265/h265_common.h" #include "common_video/h265/h265_pps_parser.h" @@ -21,6 +22,7 @@ #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "modules/video_coding/codecs/h265/include/h265_globals.h" #include "modules/video_coding/packet_buffer.h" +#include "modules/rtp_rtcp/source/frame_object.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -31,15 +33,14 @@ namespace { const uint8_t start_code_h265[] = {0, 0, 0, 1}; } // namespace -H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(rtc::ArrayView bitstream, - RTPVideoHeader* video_header_pointer) { - const uint8_t* data = bitstream.data(); - const size_t data_size = bitstream.size(); - RTPVideoHeader& video_header = *video_header_pointer; - RTC_DCHECK(video_header.codec == kVideoCodecH265); +H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream( + rtc::ArrayView bitstream, + RTPVideoHeader* video_header) { + RTC_DCHECK(video_header); + RTC_DCHECK(video_header->codec == kVideoCodecH265); auto& h265_header = - absl::get(video_header.video_type_header); + absl::get(video_header->video_type_header); bool append_vps_sps_pps = false; auto vps = vps_data_.end(); @@ -55,8 +56,8 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r } case H265::NaluType::kSps: { sps_data_[nalu.sps_id].vps_id = nalu.vps_id; - sps_data_[nalu.sps_id].width = video_header.width; - sps_data_[nalu.sps_id].height = video_header.height; + sps_data_[nalu.sps_id].width = video_header->width; + sps_data_[nalu.sps_id].height = video_header->height; break; } case H265::NaluType::kPps: { @@ -69,7 +70,7 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r // If this is the first packet of an IDR, make sure we have the required // SPS/PPS and also calculate how much extra space we need in the buffer // to prepend the SPS/PPS to the bitstream with start codes. - if (video_header.is_first_packet_in_frame) { + if (video_header->is_first_packet_in_frame) { if (nalu.pps_id == -1) { RTC_LOG(LS_WARNING) << "No PPS id in IDR nalu."; return {kRequestKeyframe}; @@ -99,8 +100,8 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r // Since the first packet of every keyframe should have its width and // height set we set it here in the case of it being supplied out of // band. - video_header.width = sps->second.width; - video_header.height = sps->second.height; + video_header->width = sps->second.width; + video_header->height = sps->second.height; // If the VPS/SPS/PPS was supplied out of band then we will have saved // the actual bitstream in |data|. @@ -132,9 +133,9 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r } if (h265_header.packetization_type == kH265AP) { - const uint8_t* nalu_ptr = data + 1; - while (nalu_ptr < data + data_size) { - RTC_DCHECK(video_header.is_first_packet_in_frame); + const uint8_t* nalu_ptr = bitstream.data() + 1; + while (nalu_ptr < bitstream.data() + bitstream.size()) { + RTC_DCHECK(video_header->is_first_packet_in_frame); required_size += sizeof(start_code_h265); // The first two bytes describe the length of a segment. @@ -145,13 +146,14 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r nalu_ptr += segment_length; } } else { - if (video_header.is_first_packet_in_frame) + // TODO: in h.264 this is "h264_header.nalus_length > 0" + if (video_header->is_first_packet_in_frame) required_size += sizeof(start_code_h265); - required_size += data_size; + required_size += bitstream.size(); } // Then we copy to the new buffer. - FixedBitstream fixed; + H265VpsSpsPpsTracker::FixedBitstream fixed; fixed.bitstream.EnsureCapacity(required_size); if (append_vps_sps_pps) { @@ -183,7 +185,7 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r pps_info.vps_id = vps->first; pps_info.sps_id = sps->first; pps_info.pps_id = pps->first; - if (h265_header.nalus_length + 2 <= kMaxNalusPerPacket) { + if (h265_header.nalus_length + 3 <= kMaxNalusPerPacket) { h265_header.nalus[h265_header.nalus_length++] = vps_info; h265_header.nalus[h265_header.nalus_length++] = sps_info; h265_header.nalus[h265_header.nalus_length++] = pps_info; @@ -195,16 +197,16 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r // Copy the rest of the bitstream and insert start codes. if (h265_header.packetization_type == kH265AP) { - const uint8_t* nalu_ptr = data + 1; - while (nalu_ptr < data + data_size) { + const uint8_t* nalu_ptr = bitstream.data() + 1; + while (nalu_ptr < bitstream.data() + bitstream.size()) { fixed.bitstream.AppendData(start_code_h265); // The first two bytes describe the length of a segment. uint16_t segment_length = nalu_ptr[0] << 8 | nalu_ptr[1]; nalu_ptr += 2; - size_t copy_end = nalu_ptr - data + segment_length; - if (copy_end > data_size) { + size_t copy_end = nalu_ptr - bitstream.data() + segment_length; + if (copy_end > bitstream.size()) { return {kDrop}; } @@ -212,7 +214,8 @@ H265VpsSpsPpsTracker::FixedBitstream H265VpsSpsPpsTracker::CopyAndFixBitstream(r nalu_ptr += segment_length; } } else { - if (video_header.is_first_packet_in_frame) { + // For h.264 it is "h264_header.nalus_length > 0" + if (video_header->is_first_packet_in_frame) { fixed.bitstream.AppendData(start_code_h265); } fixed.bitstream.AppendData(bitstream.data(), bitstream.size()); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.h b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.h index 7ac5e2a001cb..5df10cb6d5cf 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/h265_vps_sps_pps_tracker.h @@ -11,20 +11,17 @@ #ifndef MODULES_VIDEO_CODING_H265_VPS_SPS_PPS_TRACKER_H_ #define MODULES_VIDEO_CODING_H265_VPS_SPS_PPS_TRACKER_H_ +#include #include #include #include #include #include "api/array_view.h" -#include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/copy_on_write_buffer.h" namespace webrtc { - -class VCMPacket; - namespace video_coding { class H265VpsSpsPpsTracker { @@ -35,7 +32,6 @@ class H265VpsSpsPpsTracker { rtc::CopyOnWriteBuffer bitstream; }; - // Returns fixed bitstream and modifies |video_header|. FixedBitstream CopyAndFixBitstream(rtc::ArrayView bitstream, RTPVideoHeader* video_header); @@ -71,4 +67,4 @@ class H265VpsSpsPpsTracker { } // namespace video_coding } // namespace webrtc -#endif // MODULES_VIDEO_CODING_H264_SPS_PPS_TRACKER_H_ +#endif // MODULES_VIDEO_CODING_H265_SPS_PPS_TRACKER_H_ diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/include/video_codec_interface.h b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/include/video_codec_interface.h index e8312c521015..ba92ce56f025 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/include/video_codec_interface.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/include/video_codec_interface.h @@ -21,7 +21,7 @@ #include "api/video_codecs/video_encoder.h" #include "common_video/generic_frame_descriptor/generic_frame_info.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 #include "modules/video_coding/codecs/h265/include/h265_globals.h" #endif #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" @@ -96,7 +96,7 @@ struct CodecSpecificInfoH264 { }; static_assert(std::is_pod::value, ""); -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 struct CodecSpecificInfoH265 { H265PacketizationMode packetization_mode; bool idr_frame; @@ -107,7 +107,7 @@ union CodecSpecificInfoUnion { CodecSpecificInfoVP8 VP8; CodecSpecificInfoVP9 VP9; CodecSpecificInfoH264 H264; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 CodecSpecificInfoH265 H265; #endif }; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/packet_buffer.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/packet_buffer.cc index 52ef5c2d85fb..749c73e3f790 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/packet_buffer.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/modules/video_coding/packet_buffer.cc @@ -23,10 +23,16 @@ #include "api/rtp_packet_info.h" #include "api/video/video_frame_type.h" #include "common_video/h264/h264_common.h" +#ifdef WEBRTC_USE_H265 +#include "common_video/h265/h265_common.h" +#endif #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" +#ifdef WEBRTC_USE_H265 +#include "modules/video_coding/codecs/h265/include/h265_globals.h" +#endif #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/mod_ops.h" @@ -263,13 +269,23 @@ std::vector> PacketBuffer::FindFrames( bool has_h264_pps = false; bool has_h264_idr = false; bool is_h264_keyframe = false; + bool is_h265_descriptor = false; +#ifdef WEBRTC_USE_H265 + is_h265_descriptor = + (buffer_[start_index]->codec() == kVideoCodecH265) && !is_generic; + bool has_h265_sps = false; + bool has_h265_pps = false; + bool has_h265_idr = false; + bool is_h265_keyframe = false; +#endif + int idr_width = -1; int idr_height = -1; bool full_frame_found = false; while (true) { ++tested_packets; - if (!is_h264_descriptor) { + if (!is_h264_descriptor && !is_h265_descriptor) { if (buffer_[start_index] == nullptr || buffer_[start_index]->is_first_packet_in_frame()) { full_frame_found = buffer_[start_index] != nullptr; @@ -307,6 +323,34 @@ std::vector> PacketBuffer::FindFrames( } } } +#ifdef WEBRTC_USE_H265 + if (is_h265_descriptor && !is_h265_keyframe) { + const auto* h265_header = absl::get_if( + &buffer_[start_index]->video_header.video_type_header); + if (!h265_header || h265_header->nalus_length >= kMaxNalusPerPacket) + return found_frames; + for (size_t j = 0; j < h265_header->nalus_length; ++j) { + if (h265_header->nalus[j].type == H265::NaluType::kSps) { + has_h265_sps = true; + } else if (h265_header->nalus[j].type == H265::NaluType::kPps) { + has_h265_pps = true; + } else if (h265_header->nalus[j].type == + H265::NaluType::kIdrWRadl || + h265_header->nalus[j].type == H265::NaluType::kIdrNLp || + h265_header->nalus[j].type == H265::NaluType::kCra) { + has_h265_idr = true; + } + } + if ((has_h265_sps && has_h265_pps) || has_h265_idr) { + is_h265_keyframe = true; + if (buffer_[start_index]->width() > 0 && + buffer_[start_index]->height() > 0) { + idr_width = buffer_[start_index]->width(); + idr_height = buffer_[start_index]->height(); + } + } + } +#endif if (tested_packets == buffer_.size()) break; @@ -319,7 +363,7 @@ std::vector> PacketBuffer::FindFrames( // the timestamp of that packet is the same as this one. This may cause // the PacketBuffer to hand out incomplete frames. // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=7106 - if (is_h264_descriptor && + if ((is_h264_descriptor || is_h265_descriptor) && (buffer_[start_index] == nullptr || buffer_[start_index]->timestamp != frame_timestamp)) { break; @@ -366,7 +410,44 @@ std::vector> PacketBuffer::FindFrames( } } - if (is_h264_descriptor || full_frame_found) { +#ifdef WEBRTC_USE_H265 + if (is_h265_descriptor) { + // Warn if this is an unsafe frame. + if (has_h265_idr && (!has_h265_sps || !has_h265_pps)) { + RTC_LOG(LS_WARNING) + << "Received H.265-IDR frame " + << "(SPS: " << has_h265_sps << ", PPS: " << has_h265_pps << "). " + << "Treating as delta frame since " + << "WebRTC-SpsPpsIdrIsH265Keyframe is always enabled."; + } + + // Now that we have decided whether to treat this frame as a key frame + // or delta frame in the frame buffer, we update the field that + // determines if the RtpFrameObject is a key frame or delta frame. + const size_t first_packet_index = start_seq_num % buffer_.size(); + if (is_h265_keyframe) { + buffer_[first_packet_index]->video_header.frame_type = + VideoFrameType::kVideoFrameKey; + if (idr_width > 0 && idr_height > 0) { + // IDR frame was finalized and we have the correct resolution for + // IDR; update first packet to have same resolution as IDR. + buffer_[first_packet_index]->video_header.width = idr_width; + buffer_[first_packet_index]->video_header.height = idr_height; + } + } else { + buffer_[first_packet_index]->video_header.frame_type = + VideoFrameType::kVideoFrameDelta; + } + + // If this is not a key frame, make sure there are no gaps in the + // packet sequence numbers up until this point. + if (!is_h265_keyframe && missing_packets_.upper_bound(start_seq_num) != + missing_packets_.begin()) { + return found_frames; + } + } +#endif + if (is_h264_descriptor || is_h265_descriptor || full_frame_found) { const uint16_t end_seq_num = seq_num + 1; // Use uint16_t type to handle sequence number wrap around case. uint16_t num_packets = end_seq_num - start_seq_num; diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/rtc_base/experiments/min_video_bitrate_experiment.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/rtc_base/experiments/min_video_bitrate_experiment.cc index 324d5b6b5468..250fa669cdf0 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/rtc_base/experiments/min_video_bitrate_experiment.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/rtc_base/experiments/min_video_bitrate_experiment.cc @@ -99,7 +99,7 @@ absl::optional GetExperimentalMinVideoBitrate(VideoCodecType type) { case kVideoCodecAV1: return min_bitrate_av1.GetOptional(); case kVideoCodecH264: -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case kVideoCodecH265: #endif return min_bitrate_h264.GetOptional(); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.cc index 3200f0862391..124dc5afded5 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.cc @@ -509,7 +509,7 @@ AnnexBBufferReader::AnnexBBufferReader(const uint8_t* annexb_buffer, : start_(annexb_buffer), length_(length) { RTC_DCHECK(annexb_buffer); - offsets_ = isH264 ? H264::FindNaluIndices(annexb_buffer, length) : H265::FindNaluIndices(annexb_buffer, length); + offsets_ = H264::FindNaluIndices(annexb_buffer, length); offset_ = offsets_.begin(); } diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.h b/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.h index 83563deb0682..708b5d069da7 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/sdk/objc/components/video_codec/nalu_rewriter.h @@ -18,7 +18,9 @@ #include #include "common_video/h264/h264_common.h" +#ifdef WEBRTC_USE_H265 #include "common_video/h265/h265_common.h" +#endif #include "rtc_base/buffer.h" using webrtc::H264::NaluIndex; @@ -44,7 +46,7 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer, CMSampleBufferRef* out_sample_buffer, CMMemoryPoolRef memory_pool); -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 // Converts a sample buffer emitted from the VideoToolbox encoder into a buffer // suitable for RTP. The sample buffer is in avcc format whereas the rtp buffer // needs to be in Annex B format. Data is written directly to |annexb_buffer|. diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/test/scenario/video_stream.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/test/scenario/video_stream.cc index 937ad4f29b20..dc3bca9aed0d 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/test/scenario/video_stream.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/test/scenario/video_stream.cc @@ -191,10 +191,27 @@ CreateH264SpecificSettings(VideoStreamConfig config) { return nullptr; } +rtc::scoped_refptr +CreateH265SpecificSettings(VideoStreamConfig config) { + RTC_DCHECK_EQ(config.encoder.layers.temporal, 1); + RTC_DCHECK_EQ(config.encoder.layers.spatial, 1); + + VideoCodecH265 h265_settings = VideoEncoder::GetDefaultH265Settings(); + h265_settings.frameDroppingOn = config.encoder.frame_dropping; + h265_settings.keyFrameInterval = + config.encoder.key_frame_interval.value_or(0); + return new rtc::RefCountedObject< + VideoEncoderConfig::H265EncoderSpecificSettings>(h265_settings); +} + rtc::scoped_refptr CreateEncoderSpecificSettings(VideoStreamConfig config) { using Codec = VideoStreamConfig::Encoder::Codec; switch (config.encoder.codec) { +#ifdef WEBRTC_USE_H265 + case Codec::kVideoCodecH265: + return CreateH265SpecificSettings(config); +#endif case Codec::kVideoCodecH264: return CreateH264SpecificSettings(config); case Codec::kVideoCodecVP8: diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/test/video_codec_settings.h b/Source/ThirdParty/libwebrtc/Source/webrtc/test/video_codec_settings.h index 5ef4ed3e4ae8..52e2925aa049 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/test/video_codec_settings.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/test/video_codec_settings.h @@ -54,6 +54,12 @@ static void CodecSettings(VideoCodecType codec_type, VideoCodec* settings) { case kVideoCodecVP9: *(settings->VP9()) = VideoEncoder::GetDefaultVp9Settings(); return; +#ifdef WEBRTC_USE_H265 +alsfdj + case kVideoCodecH265: + *(settings->H265()) = VideoEncoder::GetDefaultH265Settings(); + return; +#endif case kVideoCodecH264: // TODO(brandtr): Set `qpMax` here, when the OpenH264 wrapper supports it. *(settings->H264()) = VideoEncoder::GetDefaultH264Settings(); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.cc index 6ea205213800..71163d3a2b22 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.cc @@ -96,12 +96,21 @@ void VideoEncoderConfig::EncoderSpecificSettings::FillEncoderSpecificSettings( FillVideoCodecVp8(codec->VP8()); } else if (codec->codecType == kVideoCodecVP9) { FillVideoCodecVp9(codec->VP9()); +#ifdef WEBRTC_USE_H265 + } else if (codec->codecType == kVideoCodecH265) { + FillVideoCodecH265(codec->H265()); +#endif } else { RTC_DCHECK_NOTREACHED() << "Encoder specifics set/used for unknown codec type."; } } +void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecH265( + VideoCodecH265* h265_settings) const { + RTC_DCHECK_NOTREACHED(); +} + void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecVp8( VideoCodecVP8* vp8_settings) const { RTC_DCHECK_NOTREACHED(); @@ -112,6 +121,15 @@ void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecVp9( RTC_DCHECK_NOTREACHED(); } +VideoEncoderConfig::H265EncoderSpecificSettings::H265EncoderSpecificSettings( + const VideoCodecH265& specifics) + : specifics_(specifics) {} + +void VideoEncoderConfig::H265EncoderSpecificSettings::FillVideoCodecH265( + VideoCodecH265* h265_settings) const { + *h265_settings = specifics_; +} + VideoEncoderConfig::Vp8EncoderSpecificSettings::Vp8EncoderSpecificSettings( const VideoCodecVP8& specifics) : specifics_(specifics) {} diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.h b/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.h index 59c9a39f827f..bbc5e2b24b6f 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/config/video_encoder_config.h @@ -99,12 +99,22 @@ class VideoEncoderConfig { virtual void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const; virtual void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const; + virtual void FillVideoCodecH265(VideoCodecH265* h265_settings) const; private: ~EncoderSpecificSettings() override {} friend class VideoEncoderConfig; }; + class H265EncoderSpecificSettings : public EncoderSpecificSettings { + public: + explicit H265EncoderSpecificSettings(const VideoCodecH265& specifics); + void FillVideoCodecH265(VideoCodecH265* h265_settings) const override; + + private: + VideoCodecH265 specifics_; + }; + class Vp8EncoderSpecificSettings : public EncoderSpecificSettings { public: explicit Vp8EncoderSpecificSettings(const VideoCodecVP8& specifics); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/encoder_overshoot_detector.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/video/encoder_overshoot_detector.cc index 0f903a3aba54..da7691c4fd65 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/encoder_overshoot_detector.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/encoder_overshoot_detector.cc @@ -266,7 +266,7 @@ void EncoderOvershootDetector::UpdateHistograms() { RTC_HISTOGRAMS_COUNTS_10000(index, overshoot_histogram_prefix + "H264", average_overshoot_percent); break; -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 case VideoCodecType::kVideoCodecH265: RTC_HISTOGRAMS_COUNTS_10000(index, rmse_histogram_prefix + "H265", bitrate_rmse); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.cc index 34ad2a754624..b21ec5cdfa45 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.cc @@ -39,7 +39,7 @@ #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h" #include "modules/video_coding/h264_sprop_parameter_sets.h" #include "modules/video_coding/h264_sps_pps_tracker.h" -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 #include "modules/video_coding/h265_vps_sps_pps_tracker.h" #endif #include "modules/video_coding/nack_requester.h" @@ -689,12 +689,21 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( packet->video_payload = std::move(fixed.bitstream); break; } -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 } else if (packet->codec() == kVideoCodecH265) { + // Only when we start to receive packets will we know what payload type + // that will be used. When we know the payload type insert the correct + // sps/pps into the tracker. + if (packet->payload_type != last_payload_type_) { + last_payload_type_ = packet->payload_type; + InsertSpsPpsIntoTracker(packet->payload_type); + } + video_coding::H265VpsSpsPpsTracker::FixedBitstream fixed = h265_tracker_.CopyAndFixBitstream( rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()), &packet->video_header); + switch (fixed.action) { case video_coding::H265VpsSpsPpsTracker::kRequestKeyframe: rtcp_feedback_buffer_.RequestKeyFrame(); diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.h b/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.h index d9d90bf5ac90..0bd463f5f4c4 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.h +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/rtp_video_stream_receiver2.h @@ -39,7 +39,7 @@ #include "modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.h" #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h" #include "modules/video_coding/h264_sps_pps_tracker.h" -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 #include "modules/video_coding/h265_vps_sps_pps_tracker.h" #endif #include "modules/video_coding/loss_notification_controller.h" @@ -392,7 +392,7 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, std::map> payload_type_map_ RTC_GUARDED_BY(packet_sequence_checker_); -#ifndef DISABLE_H265 +#ifdef WEBRTC_USE_H265 video_coding::H265VpsSpsPpsTracker h265_tracker_; #endif diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/send_statistics_proxy.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/video/send_statistics_proxy.cc index fd576bb38b63..827a833cfb2d 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/send_statistics_proxy.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/send_statistics_proxy.cc @@ -47,6 +47,9 @@ enum HistogramCodecType { kVideoVp9 = 2, kVideoH264 = 3, kVideoAv1 = 4, +#ifdef WEBRTC_USE_H265 + kVideoH265 = 5, +#endif kVideoMax = 64, }; @@ -74,6 +77,10 @@ HistogramCodecType PayloadNameToHistogramCodecType( return kVideoVp9; case kVideoCodecH264: return kVideoH264; +#ifdef WEBRTC_USE_H265 + case kVideoCodecH265: + return kVideoH265; +#endif case kVideoCodecAV1: return kVideoAv1; default: diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/video/video_stream_encoder.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/video/video_stream_encoder.cc index c3d855c1c6c4..e05b97c0ce3b 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/video/video_stream_encoder.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/video/video_stream_encoder.cc @@ -136,6 +136,14 @@ bool RequiresEncoderReset(const VideoCodec& prev_send_codec, } break; +#ifdef WEBRTC_USE_H265 + case kVideoCodecH265: + if (new_send_codec.H265() != prev_send_codec.H265()) { + return true; + } + break; +#endif + default: break; } diff --git a/Source/ThirdParty/libwebrtc/libwebrtc.xcodeproj/project.pbxproj b/Source/ThirdParty/libwebrtc/libwebrtc.xcodeproj/project.pbxproj index 6550be991328..8fed2994b6df 100644 --- a/Source/ThirdParty/libwebrtc/libwebrtc.xcodeproj/project.pbxproj +++ b/Source/ThirdParty/libwebrtc/libwebrtc.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 54; + objectVersion = 52; objects = { /* Begin PBXBuildFile section */ @@ -4218,6 +4218,8 @@ 5CFD53921E4BD3A300482908 /* row_neon64.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5CFD538A1E4BD3A300482908 /* row_neon64.cc */; }; 5CFD53931E4BD3A300482908 /* scale_neon.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5CFD538B1E4BD3A300482908 /* scale_neon.cc */; }; 5CFD53941E4BD3A300482908 /* scale_neon64.cc in Sources */ = {isa = PBXBuildFile; fileRef = 5CFD538C1E4BD3A300482908 /* scale_neon64.cc */; }; + 9869CBB42A4D5B820031C401 /* video_rtp_depacketizer_h265.h in Headers */ = {isa = PBXBuildFile; fileRef = 9869CBB22A4D5B820031C401 /* video_rtp_depacketizer_h265.h */; }; + 9869CBB52A4D5B820031C401 /* video_rtp_depacketizer_h265.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9869CBB32A4D5B820031C401 /* video_rtp_depacketizer_h265.cc */; }; CD381F272581591F0077DEC8 /* WebKitVP8Decoder.h in Headers */ = {isa = PBXBuildFile; fileRef = CD381F252581591F0077DEC8 /* WebKitVP8Decoder.h */; settings = {ATTRIBUTES = (Public, ); }; }; CD381F292581591F0077DEC8 /* WebKitVP8Decoder.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD381F282581591F0077DEC8 /* WebKitVP8Decoder.cpp */; }; CD381F2B2581FEBA0077DEC8 /* WebKitDecoderReceiver.cpp in Sources */ = {isa = PBXBuildFile; fileRef = CD381F2A2581FEBA0077DEC8 /* WebKitDecoderReceiver.cpp */; }; @@ -10008,6 +10010,8 @@ 5D7C59C51208C68B001C873E /* libwebrtc.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = libwebrtc.xcconfig; sourceTree = ""; }; 5D7C59C61208C68B001C873E /* Base.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Base.xcconfig; sourceTree = ""; }; 5D7C59C71208C68B001C873E /* DebugRelease.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = DebugRelease.xcconfig; sourceTree = ""; }; + 9869CBB22A4D5B820031C401 /* video_rtp_depacketizer_h265.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = video_rtp_depacketizer_h265.h; sourceTree = ""; }; + 9869CBB32A4D5B820031C401 /* video_rtp_depacketizer_h265.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = video_rtp_depacketizer_h265.cc; sourceTree = ""; }; CD381F252581591F0077DEC8 /* WebKitVP8Decoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WebKitVP8Decoder.h; sourceTree = ""; }; CD381F282581591F0077DEC8 /* WebKitVP8Decoder.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WebKitVP8Decoder.cpp; sourceTree = ""; }; CD381F2A2581FEBA0077DEC8 /* WebKitDecoderReceiver.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = WebKitDecoderReceiver.cpp; sourceTree = ""; }; @@ -18009,6 +18013,8 @@ 4189390F242A704D007FDC41 /* video_rtp_depacketizer_generic.h */, 41893910242A704D007FDC41 /* video_rtp_depacketizer_h264.cc */, 418938FC242A7049007FDC41 /* video_rtp_depacketizer_h264.h */, + 9869CBB32A4D5B820031C401 /* video_rtp_depacketizer_h265.cc */, + 9869CBB22A4D5B820031C401 /* video_rtp_depacketizer_h265.h */, 418938FE242A7049007FDC41 /* video_rtp_depacketizer_raw.cc */, 41893907242A704B007FDC41 /* video_rtp_depacketizer_raw.h */, 4189390A242A704C007FDC41 /* video_rtp_depacketizer_vp8.cc */, @@ -21903,6 +21909,7 @@ 412FFA50254B43F5001DF036 /* video_rtp_depacketizer_av1.h in Headers */, 41893927242A704E007FDC41 /* video_rtp_depacketizer_generic.h in Headers */, 41893914242A704E007FDC41 /* video_rtp_depacketizer_h264.h in Headers */, + 9869CBB42A4D5B820031C401 /* video_rtp_depacketizer_h265.h in Headers */, 4189391F242A704E007FDC41 /* video_rtp_depacketizer_raw.h in Headers */, 41893923242A704E007FDC41 /* video_rtp_depacketizer_vp8.h in Headers */, 414035F324AA0F5400BCE9B2 /* video_rtp_depacketizer_vp9.h in Headers */, @@ -24673,6 +24680,7 @@ 412FFA63254B45A6001DF036 /* video_rtp_depacketizer_av1.cc in Sources */, 4189391D242A704E007FDC41 /* video_rtp_depacketizer_generic.cc in Sources */, 41893928242A704E007FDC41 /* video_rtp_depacketizer_h264.cc in Sources */, + 9869CBB52A4D5B820031C401 /* video_rtp_depacketizer_h265.cc in Sources */, 41893916242A704E007FDC41 /* video_rtp_depacketizer_raw.cc in Sources */, 41893922242A704E007FDC41 /* video_rtp_depacketizer_vp8.cc in Sources */, 414035F224AA0F5400BCE9B2 /* video_rtp_depacketizer_vp9.cc in Sources */,