From 90d26c6d0786761a731ce1bfd63c56909aa19f68 Mon Sep 17 00:00:00 2001 From: Yury Date: Sun, 22 Jan 2023 00:36:28 +0200 Subject: [PATCH] Linux: Remove filter_NAL and avoid copy --- alvr/server/cpp/platform/linux/CEncoder.cpp | 9 +-- .../cpp/platform/linux/EncodePipeline.cpp | 78 ++++--------------- .../cpp/platform/linux/EncodePipeline.h | 10 ++- .../cpp/platform/linux/EncodePipelineAMF.cpp | 21 ++--- .../cpp/platform/linux/EncodePipelineAMF.h | 2 +- 5 files changed, 41 insertions(+), 79 deletions(-) diff --git a/alvr/server/cpp/platform/linux/CEncoder.cpp b/alvr/server/cpp/platform/linux/CEncoder.cpp index 09df8dea0e..60adceee65 100644 --- a/alvr/server/cpp/platform/linux/CEncoder.cpp +++ b/alvr/server/cpp/platform/linux/CEncoder.cpp @@ -224,7 +224,6 @@ void CEncoder::Run() { fprintf(stderr, "CEncoder starting to read present packets"); present_packet frame_info; - std::vector encoded_data; while (not m_exiting) { read_latest(client, (char *)&frame_info, sizeof(frame_info), m_exiting); @@ -250,9 +249,8 @@ void CEncoder::Run() { static_assert(sizeof(frame_info.pose) == sizeof(vr::HmdMatrix34_t&)); - encoded_data.clear(); - uint64_t pts; - if (!encode_pipeline->GetEncoded(encoded_data, &pts)) { + alvr::FramePacket packet; + if (!encode_pipeline->GetEncoded(packet)) { Error("Failed to get encoded data!"); continue; } @@ -279,10 +277,11 @@ void CEncoder::Run() { ReportPresent(pose->targetTimestampNs, present_offset); ReportComposed(pose->targetTimestampNs, composed_offset); - m_listener->SendVideo(encoded_data.data(), encoded_data.size(), pts); + m_listener->SendVideo(packet.data, packet.size, packet.pts); m_listener->GetStatistics()->EncodeOutput(); + encode_pipeline->Free(); } } catch (std::exception &e) { diff --git a/alvr/server/cpp/platform/linux/EncodePipeline.cpp b/alvr/server/cpp/platform/linux/EncodePipeline.cpp index 619219220c..e986d7404e 100644 --- a/alvr/server/cpp/platform/linux/EncodePipeline.cpp +++ b/alvr/server/cpp/platform/linux/EncodePipeline.cpp @@ -12,59 +12,6 @@ extern "C" { #include } -namespace { - -bool should_keep_nal_h264(const uint8_t * header_start) -{ - uint8_t nal_type = (header_start[2] == 0 ? header_start[4] : header_start[3]) & 0x1F; - switch (nal_type) - { - case 6: // supplemental enhancement information - case 9: // access unit delimiter - return false; - default: - return true; - } -} - -bool should_keep_nal_h265(const uint8_t * header_start) -{ - uint8_t nal_type = ((header_start[2] == 0 ? header_start[4] : header_start[3]) >> 1) & 0x3F; - switch (nal_type) - { - case 35: // access unit delimiter - case 39: // supplemental enhancement information - return false; - default: - return true; - } -} - -void filter_NAL(const uint8_t* input, size_t input_size, std::vector &out) -{ - if (input_size < 4) - return; - auto codec = Settings::Instance().m_codec; - std::array header = {{0, 0, 1}}; - auto end = input + input_size; - auto header_start = input; - while (header_start != end) - { - auto next_header = std::search(header_start + 3, end, header.begin(), header.end()); - if (next_header != end and next_header[-1] == 0) - { - next_header--; - } - if (codec == ALVR_CODEC_H264 and should_keep_nal_h264(header_start)) - out.insert(out.end(), header_start, next_header); - if (codec == ALVR_CODEC_H265 and should_keep_nal_h265(header_start)) - out.insert(out.end(), header_start, next_header); - header_start = next_header; - } -} - -} - void alvr::EncodePipeline::SetBitrate(int64_t bitrate) { encoder_ctx->bit_rate = bitrate; encoder_ctx->rc_buffer_size = bitrate / Settings::Instance().m_refreshRate; @@ -111,17 +58,24 @@ alvr::EncodePipeline::~EncodePipeline() avcodec_free_context(&encoder_ctx); } -bool alvr::EncodePipeline::GetEncoded(std::vector &out, uint64_t *pts) +bool alvr::EncodePipeline::GetEncoded(FramePacket &packet) { - AVPacket * enc_pkt = av_packet_alloc(); - int err = avcodec_receive_packet(encoder_ctx, enc_pkt); - if (err == AVERROR(EAGAIN)) { - return false; - } else if (err) { + encoder_packet = av_packet_alloc(); + int err = avcodec_receive_packet(encoder_ctx, encoder_packet); + if (err != 0) { + av_packet_free(&encoder_packet); + if (err == AVERROR(EAGAIN)) { + return false; + } throw alvr::AvException("failed to encode", err); } - filter_NAL(enc_pkt->data, enc_pkt->size, out); - *pts = enc_pkt->pts; - av_packet_free(&enc_pkt); + packet->data = encoder_packet->data; + packet->size = encoder_packet->size; + packet->pts = encoder_packet->pts; return true; } + +void alvr::EncodePipeline::Free() +{ + av_packet_free(&encoder_packet); +} diff --git a/alvr/server/cpp/platform/linux/EncodePipeline.h b/alvr/server/cpp/platform/linux/EncodePipeline.h index bf651e875d..bd07d1577b 100644 --- a/alvr/server/cpp/platform/linux/EncodePipeline.h +++ b/alvr/server/cpp/platform/linux/EncodePipeline.h @@ -14,6 +14,12 @@ class VkFrame; class VkFrameCtx; class VkContext; +struct FramePacket { + uint8_t *data; + int size; + uint64_t pts; +}; + class EncodePipeline { public: @@ -25,13 +31,15 @@ class EncodePipeline virtual ~EncodePipeline(); virtual void PushFrame(uint64_t targetTimestampNs, bool idr) = 0; - virtual bool GetEncoded(std::vector & out, uint64_t *pts); + virtual bool GetEncoded(FramePacket &data); + virtual void Free(); virtual Timestamp GetTimestamp() { return timestamp; } virtual void SetBitrate(int64_t bitrate); static std::unique_ptr Create(Renderer *render, VkContext &vk_ctx, VkFrame &input_frame, VkFrameCtx &vk_frame_ctx, uint32_t width, uint32_t height); protected: AVCodecContext *encoder_ctx = nullptr; //shall be initialized by child class + AVPacket *encoder_packet = NULL; Timestamp timestamp = {}; }; diff --git a/alvr/server/cpp/platform/linux/EncodePipelineAMF.cpp b/alvr/server/cpp/platform/linux/EncodePipelineAMF.cpp index d68c3fd707..ad9aeac047 100644 --- a/alvr/server/cpp/platform/linux/EncodePipelineAMF.cpp +++ b/alvr/server/cpp/platform/linux/EncodePipelineAMF.cpp @@ -456,26 +456,24 @@ void EncodePipelineAMF::PushFrame(uint64_t targetTimestampNs, bool idr) m_amfComponents.front()->SubmitInput(surface); } -bool EncodePipelineAMF::GetEncoded(std::vector &out, uint64_t *pts) +bool EncodePipelineAMF::GetEncoded(FramePacket &packet) { if (m_hasQueryTimeout) { m_pipeline->Run(); } else { uint32_t timeout = 4 * 1000; // 1 second - while (m_outBuffer.empty() && --timeout != 0) { + while (m_framePacket->data == nullptr && --timeout != 0) { std::this_thread::sleep_for(std::chrono::microseconds(250)); m_pipeline->Run(); } } - if (m_outBuffer.empty()) { + if (m_framePacket->data == nullptr) { Error("Timed out waiting for encoder data"); return false; } - out = m_outBuffer; - *pts = m_targetTimestampNs; - m_outBuffer.clear(); + packet = m_framePacket; uint64_t query; VK_CHECK(vkGetQueryPoolResults(m_render->m_dev, m_queryPool, 0, 1, sizeof(uint64_t), &query, sizeof(uint64_t), VK_QUERY_RESULT_64_BIT)); @@ -484,6 +482,10 @@ bool EncodePipelineAMF::GetEncoded(std::vector &out, uint64_t *pts) return true; } +void EncodePipelineAMF::Free() { + m_framePacket = {nullptr, 0, 0}; +} + void EncodePipelineAMF::SetBitrate(int64_t bitrate) { if (m_codec == ALVR_CODEC_H264) { @@ -501,10 +503,9 @@ void EncodePipelineAMF::Receive(amf::AMFDataPtr data) { amf::AMFBufferPtr buffer(data); // query for buffer interface - char *p = reinterpret_cast(buffer->GetNative()); - int length = static_cast(buffer->GetSize()); - - m_outBuffer = std::vector(p, p + length); + m_framePacket->data = reinterpret_cast(buffer->GetNative()); + m_framePacket->size = static_cast(buffer->GetSize()); + m_framePacket->pts = m_targetTimestampNs; } void EncodePipelineAMF::ApplyFrameProperties(const amf::AMFSurfacePtr &surface, bool insertIDR) diff --git a/alvr/server/cpp/platform/linux/EncodePipelineAMF.h b/alvr/server/cpp/platform/linux/EncodePipelineAMF.h index 6bc94a0df7..5f70256bcb 100644 --- a/alvr/server/cpp/platform/linux/EncodePipelineAMF.h +++ b/alvr/server/cpp/platform/linux/EncodePipelineAMF.h @@ -96,7 +96,7 @@ class EncodePipelineAMF : public EncodePipeline int m_bitrateInMBits; bool m_hasQueryTimeout = false; - std::vector m_outBuffer; + FramePacket m_framePacket = {nullptr, 0, 0}; uint64_t m_targetTimestampNs; };