Skip to content

Commit

Permalink
data streamに限り、AV_NOPTS_VALUEのパケットをそのまま転送するようにする。( #335 )
Browse files Browse the repository at this point in the history
  • Loading branch information
rigaya committed May 5, 2021
1 parent 9c0220c commit 165eb70
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 8 deletions.
14 changes: 12 additions & 2 deletions NVEncCore/rgy_input_avcodec.cpp
Expand Up @@ -2199,6 +2199,14 @@ int64_t RGYInputAvcodec::convertTimebaseVidToStream(int64_t pts, const AVDemuxSt
}

bool RGYInputAvcodec::checkStreamPacketToAdd(AVPacket *pkt, AVDemuxStream *stream) {
// EPGやbin_dataなど、data streamでtimestampがついていない
// 一度もtimestampが設定されていない場合でもそれはすべて転送する
if (stream->aud0_fin == AV_NOPTS_VALUE //一度もtimestampが設定されていない
&& pkt->pts == AV_NOPTS_VALUE //timestampが設定されていない
&& avcodec_get_type(stream->stream->codecpar->codec_id) == AVMEDIA_TYPE_DATA // data stream
) {
return true;
}
if (pkt->pts != AV_NOPTS_VALUE) { //pkt->ptsがAV_NOPTS_VALUEの場合は、以前のフレームの継続とみなして更新しない
stream->lastVidIndex = getVideoFrameIdx(pkt->pts, stream->timebase, stream->lastVidIndex);
}
Expand Down Expand Up @@ -2595,7 +2603,8 @@ void RGYInputAvcodec::GetAudioDataPacketsWhenNoVideoRead(int inputFrame) {
} else {
AVDemuxStream *pStream = getPacketStreamData(&pkt);
const auto delay_ts = av_rescale_q(pStream->addDelayMs, av_make_q(1, 1000), pStream->timebase);
pkt.pts += delay_ts;
if (pkt.pts == AV_NOPTS_VALUE) pkt.pts += delay_ts;
if (pkt.dts == AV_NOPTS_VALUE) pkt.dts += delay_ts;
if (checkStreamPacketToAdd(&pkt, pStream)) {
m_Demux.qStreamPktL1.push_back(pkt);
} else {
Expand Down Expand Up @@ -2663,7 +2672,8 @@ void RGYInputAvcodec::CheckAndMoveStreamPacketList() {
&& 0 < av_compare_ts(pkt.pts + delay_ts, pStream->timebase, fixedLastFrame.pts, vid_pkt_timebase)) {
break;
}
pkt.pts += delay_ts;
if (pkt.pts != AV_NOPTS_VALUE) pkt.pts += delay_ts;
if (pkt.dts != AV_NOPTS_VALUE) pkt.dts += delay_ts;
if (checkStreamPacketToAdd(&pkt, pStream)) {
pkt.flags = (pkt.flags & 0xffff) | ((uint32_t)pStream->trackId << 16); //flagsの上位16bitには、trackIdへのポインタを格納しておく
m_Demux.qStreamPktL2.push(pkt); //Writer側に渡したパケットはWriter側で開放する
Expand Down
16 changes: 10 additions & 6 deletions NVEncCore/rgy_output_avcodec.cpp
Expand Up @@ -37,7 +37,7 @@
#include "rgy_bitstream.h"
#include "rgy_codepage.h"

#define WRITE_PTS_DEBUG (0)
#define WRITE_PTS_DEBUG (1)

#if ENABLE_AVSW_READER
#if USE_CUSTOM_IO
Expand Down Expand Up @@ -3016,13 +3016,13 @@ RGY_ERR RGYOutputAvcodec::WriteOtherPacket(AVPacket *pkt) {
const AVRational vid_pkt_timebase = av_isvalid_q(m_Mux.video.inputStreamTimebase) ? m_Mux.video.inputStreamTimebase : av_inv_q(m_Mux.video.outputFps);
const int64_t pts_offset = av_rescale_q(m_Mux.video.inputFirstKeyPts, vid_pkt_timebase, pMuxOther->streamInTimebase);
const AVRational timebase_conv = (pMuxOther->outCodecDecodeCtx) ? pMuxOther->outCodecDecodeCtx->pkt_timebase : pMuxOther->streamOut->time_base;
pkt->pts = av_rescale_q(std::max<int64_t>(0, pkt->pts - pts_offset), pMuxOther->streamInTimebase, timebase_conv);
if (pkt->pts != AV_NOPTS_VALUE) pkt->pts = av_rescale_q(std::max<int64_t>(0, pkt->pts - pts_offset), pMuxOther->streamInTimebase, timebase_conv);
if (pkt->dts != AV_NOPTS_VALUE) pkt->dts = av_rescale_q(std::max<int64_t>(0, pkt->dts - pts_offset), pMuxOther->streamInTimebase, timebase_conv);
if (WRITE_PTS_DEBUG) {
AddMessage((pkt->pts == AV_NOPTS_VALUE) ? RGY_LOG_ERROR : RGY_LOG_WARN, _T("%3d, %12s, pts, %lld (%d/%d) [%s]\n"),
pMuxOther->streamOut->index, char_to_tstring(avcodec_get_name(m_Mux.format.formatCtx->streams[pMuxOther->streamOut->index]->codecpar->codec_id)).c_str(),
pkt->pts, timebase_conv.num, timebase_conv.den, getTimestampString(pkt->pts, timebase_conv).c_str());
}
pkt->dts = av_rescale_q(std::max<int64_t>(0, pkt->dts - pts_offset), pMuxOther->streamInTimebase, timebase_conv);
pkt->flags &= 0x0000ffff; //元のpacketの上位16bitにはトラック番号を紛れ込ませているので、av_interleaved_write_frame前に消すこと
pkt->duration = (int)av_rescale_q(pkt->duration, pMuxOther->streamInTimebase, pMuxOther->streamOut->time_base);
pkt->stream_index = pMuxOther->streamOut->index;
Expand Down Expand Up @@ -3408,7 +3408,7 @@ RGY_ERR RGYOutputAvcodec::WriteThreadFunc() {
//syncIgnoreDtsは映像と音声の同期を行う必要がないことを意味する
//dtsThresholdを加算したときにオーバーフローしないよう、dtsThresholdを引いておく
const int64_t syncIgnoreDts = INT64_MAX - dtsThreshold;
int64_t audioDts = (m_Mux.audio.size()) ? 0 : syncIgnoreDts;
int64_t audioDts = (m_Mux.audio.size() + m_Mux.other.size()) ? 0 : syncIgnoreDts;
int64_t videoDts = (m_Mux.video.streamOut) ? 0 : syncIgnoreDts;
WaitForSingleObject(m_Mux.thread.heEventPktAddedOutput, INFINITE);
//bThAudProcessは出力開始した後で取得する(この前だとまだ起動していないことがある)
Expand Down Expand Up @@ -3472,7 +3472,9 @@ RGY_ERR RGYOutputAvcodec::WriteThreadFunc() {
//音声処理スレッドが別にあるなら、出力スレッドがすべきことは単に出力するだけ
(bThAudProcess) ? writeProcessedPacket(&pktData) : WriteNextPacketInternal(&pktData, maxDts);
//複数のstreamがあり得るので最大値をとる
audioDts = (std::max)(audioDts, (std::max)(pktData.dts, m_Mux.thread.streamOutMaxDts.load()));
if (pktData.dts != AV_NOPTS_VALUE) {
audioDts = (std::max)(audioDts, (std::max)(pktData.dts, m_Mux.thread.streamOutMaxDts.load()));
}
nWaitAudio = 0;
const int log_level = RGY_LOG_TRACE;
if (m_printMes && log_level >= m_printMes->getLogLevel()) {
Expand Down Expand Up @@ -3539,7 +3541,9 @@ RGY_ERR RGYOutputAvcodec::WriteThreadFunc() {
const int64_t maxDts = (videoDts >= 0) ? videoDts + dtsThreshold : INT64_MAX;
(bThAudProcess) ? writeProcessedPacket(&pktData) : WriteNextPacketInternal(&pktData, maxDts);
//複数のstreamがあり得るので最大値をとる
audioDts = (std::max)(audioDts, pktData.dts);
if (pktData.dts != AV_NOPTS_VALUE) {
audioDts = (std::max)(audioDts, pktData.dts);
}
}
RGYBitstream bitstream = RGYBitstreamInit();
while (videoDts <= audioDts + dtsThreshold
Expand Down

0 comments on commit 165eb70

Please sign in to comment.