Skip to content

Commit

Permalink
avswで使用するデコーダを指定可能に。
Browse files Browse the repository at this point in the history
  • Loading branch information
rigaya committed May 11, 2024
1 parent 615b31d commit 0a29eeb
Show file tree
Hide file tree
Showing 11 changed files with 91 additions and 28 deletions.
8 changes: 4 additions & 4 deletions NVEncC_Options.en.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
- [--avi](#--avi)
- [--avs](#--avs)
- [--vpy](#--vpy)
- [--avsw](#--avsw)
- [--avsw \[\<string\>\]](#--avsw-string)
- [--avhw](#--avhw)
- [--interlace \<string\>](#--interlace-string)
- [--video-track \<int\>](#--video-track-int)
Expand Down Expand Up @@ -431,11 +431,11 @@ you will need to add "[--process-codepage](#--process-codepage-string-windows-os
### --vpy
Read VapourSynth script file using vpy reader.

### --avsw
Read input file using avformat + ffmpeg's sw decoder.
### --avsw [&lt;string&gt;]
Read input file using avformat + libavcodec's sw decoder. The optional parameter will set decoder name to be used, otherwise decoder will be selected automatically.

### --avhw
Read using avformat + cuvid hw decoder. Using this mode will provide maximum performance,
Read input file using avformat + QSV hw decoder. Using this mode will provide maximum performance,
since entire transcode process will be run on the GPU.

**Codecs supported by avhw reader**
Expand Down
9 changes: 5 additions & 4 deletions NVEncC_Options.ja.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
- [--avi](#--avi)
- [--avs](#--avs)
- [--vpy](#--vpy)
- [--avsw](#--avsw)
- [--avsw \[\<string\>\]](#--avsw-string)
- [--avhw](#--avhw)
- [--interlace \<string\>](#--interlace-string)
- [--crop \<int\>,\<int\>,\<int\>,\<int\>](#--crop-intintintint)
Expand Down Expand Up @@ -443,9 +443,10 @@ NVEncCはデフォルトではUTF-8モードで動作するため、Avisynthス
### --vpy
入力ファイルをVapourSynthで読み込む。

### --avsw
avformat + sw decoderを使用して読み込む。
ffmpegの対応するほとんどのコーデックを読み込み可能。
### --avsw [&lt;string&gt;]
avformat + sw decoderを使用して読み込む。ffmpegの対応するほとんどのコーデックを読み込み可能。

追加のパラメータで使用するデコーダ名を指定可能。特に指定のない場合、デコーダは自動的に選択される。

### --avhw
avformat + cuvid decoderを使用して読み込む。
Expand Down
2 changes: 1 addition & 1 deletion NVEncCore/NVEncCore.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,7 @@ NVENCSTATUS NVEncCore::InitInput(InEncodeVideoParam *inputParam, const std::vect
m_poolFrame = std::make_unique<RGYPoolAVFrame>();

//入力モジュールの初期化
if (initReaders(m_pFileReader, m_AudioReaders, &inputParam->input, inputCspOfRawReader,
if (initReaders(m_pFileReader, m_AudioReaders, &inputParam->input, &inputParam->inprm, inputCspOfRawReader,
m_pStatus, &inputParam->common, &inputParam->ctrl, HWDecCodecCsp, subburnTrackId,
inputParam->vpp.rff.enable, inputParam->vpp.afs.enable,
m_poolPkt.get(), m_poolFrame.get(), m_qpTable.get(), m_pPerfMonitor.get(), m_pNVLog) != RGY_ERR_NONE) {
Expand Down
4 changes: 4 additions & 0 deletions NVEncCore/rgy_cmd.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3579,6 +3579,10 @@ int parse_one_input_option(const TCHAR *option_name, const TCHAR *strInput[], in
if (IS_OPTION("avsw")) {
#if ENABLE_AVSW_READER
input->type = RGY_INPUT_FMT_AVSW;
if (i + 1 <= nArgNum && strInput[i+1][0] != _T('-')) {
i++;
inprm->avswDecoder = strInput[i];
}
return 0;
#else
_ftprintf(stderr, _T("avsw reader not supported in this build.\n"));
Expand Down
8 changes: 7 additions & 1 deletion NVEncCore/rgy_input.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ const ConvertCSP *RGYConvertCSP::getFunc(RGY_CSP csp_from, RGY_CSP csp_to, bool
return m_csp;
}

const ConvertCSP *RGYConvertCSP::getFunc(RGY_CSP csp_from, RGY_CSP csp_to, RGY_SIMD simd) {
return getFunc(csp_from, csp_to, m_uv_only, simd);
}

int RGYConvertCSP::run(int interlaced, void **dst, const void **src, int width, int src_y_pitch_byte, int src_uv_pitch_byte, int dst_y_pitch_byte, int height, int dst_height, int *crop) {
if (m_threads == 0) {
const int div = (m_csp->simd == RGY_SIMD::NONE) ? 2 : 4;
Expand Down Expand Up @@ -439,6 +443,7 @@ RGY_ERR initReaders(
shared_ptr<RGYInput>& pFileReader,
vector<shared_ptr<RGYInput>>& otherReaders,
VideoInfo *input,
const RGYParamInput *inprm,
const RGY_CSP inputCspOfRawReader,
const shared_ptr<EncodeStatus> pStatus,
const RGYParamCommon *common,
Expand Down Expand Up @@ -637,8 +642,9 @@ RGY_ERR initReaders(
inputInfoAVCuvid.lowLatency = ctrl->lowLatency;
inputInfoAVCuvid.timestampPassThrough = common->timestampPassThrough;
inputInfoAVCuvid.hevcbsf = common->hevcbsf;
inputInfoAVCuvid.avswDecoder = inprm->avswDecoder;
pInputPrm = &inputInfoAVCuvid;
log->write(RGY_LOG_DEBUG, RGY_LOGT_IN, _T("avhw reader selected.\n"));
log->write(RGY_LOG_DEBUG, RGY_LOGT_IN, _T("avhw/sw reader selected.\n"));
pFileReader.reset(new RGYInputAvcodec());
} break;
#endif //#if ENABLE_AVSW_READER
Expand Down
2 changes: 2 additions & 0 deletions NVEncCore/rgy_input.h
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ class RGYConvertCSP {
RGYConvertCSP();
RGYConvertCSP(int threads, RGYParamThread threadParam);
~RGYConvertCSP();
const ConvertCSP *getFunc(RGY_CSP csp_from, RGY_CSP csp_to, RGY_SIMD simd);
const ConvertCSP *getFunc(RGY_CSP csp_from, RGY_CSP csp_to, bool uv_only, RGY_SIMD simd);
const ConvertCSP *getFunc() const { return m_csp; };

Expand Down Expand Up @@ -334,6 +335,7 @@ RGY_ERR initReaders(
shared_ptr<RGYInput> &pFileReader,
vector<shared_ptr<RGYInput>> &otherReaders,
VideoInfo *input,
const RGYParamInput *inprm,
const RGY_CSP inputCspOfRawReader,
const shared_ptr<EncodeStatus> pStatus,
const RGYParamCommon *common,
Expand Down
72 changes: 59 additions & 13 deletions NVEncCore/rgy_input_avcodec.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ AVDemuxVideo::AVDemuxVideo() :
hevcNaluLengthSize(0),
hdr10plusMetadataCopy(false),
doviRpuCopy(false),
simdCsp(RGY_SIMD::SIMD_ALL),
masteringDisplay(std::unique_ptr<AVMasteringDisplayMetadata, RGYAVDeleter<AVMasteringDisplayMetadata>>(nullptr, RGYAVDeleter<AVMasteringDisplayMetadata>(av_freep))),
contentLight(std::unique_ptr<AVContentLightMetadata, RGYAVDeleter<AVContentLightMetadata>>(nullptr, RGYAVDeleter<AVContentLightMetadata>(av_freep))),
qpTableListRef(nullptr),
Expand Down Expand Up @@ -260,7 +261,8 @@ RGYInputAvcodecPrm::RGYInputAvcodecPrm(RGYInputPrm base) :
lowLatency(false),
timestampPassThrough(false),
inputOpt(),
hevcbsf(RGYHEVCBsf::INTERNAL) {
hevcbsf(RGYHEVCBsf::INTERNAL),
avswDecoder() {

}

Expand Down Expand Up @@ -1799,6 +1801,10 @@ RGY_ERR RGYInputAvcodec::Init(const TCHAR *strFileName, VideoInfo *inputInfo, co
AddMessage(RGY_LOG_DEBUG, _T("can be decoded by %s.\n"), _T(DECODER_NAME));
}
}
tstring avswDecoder;
if (m_inputVideoInfo.codec == RGY_CODEC_UNKNOWN) { //swデコードの場合
avswDecoder = input_prm->avswDecoder;
}
m_readerName = (m_Demux.video.HWDecodeDeviceId >= 0) ? _T("av" DECODER_NAME) : _T("avsw");
m_inputVideoInfo.type = (m_Demux.video.HWDecodeDeviceId >= 0) ? RGY_INPUT_FMT_AVHW : RGY_INPUT_FMT_AVSW;
//念のため初期化
Expand Down Expand Up @@ -1842,9 +1848,28 @@ RGY_ERR RGYInputAvcodec::Init(const TCHAR *strFileName, VideoInfo *inputInfo, co
if (sts == RGY_ERR_MORE_DATA
&& (m_Demux.video.bsfcCtx || m_Demux.video.bUseHEVCmp42AnnexB)
&& !m_Demux.video.hdr10plusMetadataCopy) {
AddMessage(RGY_LOG_WARN, _T("Failed to get header for hardware decoder, switching to software decoder...\n"));
m_inputVideoInfo.codec = RGY_CODEC_UNKNOWN; //hwデコードをオフにする
m_Demux.video.HWDecodeDeviceId = -1;
if (m_inputVideoInfo.codec != RGY_CODEC_UNKNOWN) { //hwデコードを使用していた場合
AddMessage(RGY_LOG_WARN, _T("Failed to get header for hardware decoder, switching to software decoder...\n"));
if (input_prm->avswDecoder.length() != 0) {
avswDecoder = input_prm->avswDecoder;
} else if (ENCODER_QSV) {
switch (m_inputVideoInfo.codec) {
case RGY_CODEC_H264: avswDecoder = _T("h264_qsv"); break;
case RGY_CODEC_HEVC: avswDecoder = _T("hevc_qsv"); break;
case RGY_CODEC_AV1: avswDecoder = _T("av1_qsv"); break;
default: break;
}
} else if (ENCODER_NVENC) {
switch (m_inputVideoInfo.codec) {
case RGY_CODEC_H264: avswDecoder = _T("h264_cuvid"); break;
case RGY_CODEC_HEVC: avswDecoder = _T("hevc_cuvid"); break;
case RGY_CODEC_AV1: avswDecoder = _T("av1_cuvid"); break;
default: break;
}
}
m_inputVideoInfo.codec = RGY_CODEC_UNKNOWN; //hwデコードをオフにする
m_Demux.video.HWDecodeDeviceId = -1;
}
//close bitstreamfilter
if (m_Demux.video.bsfcCtx) {
AddMessage(RGY_LOG_DEBUG, _T("Free bsf...\n"));
Expand Down Expand Up @@ -2019,9 +2044,21 @@ RGY_ERR RGYInputAvcodec::Init(const TCHAR *strFileName, VideoInfo *inputInfo, co
const bool bAspectRatioUnknown = aspectRatio.num * aspectRatio.den <= 0;

if (!(m_Demux.video.HWDecodeDeviceId >= 0)) {
if (nullptr == (m_Demux.video.codecDecode = avcodec_find_decoder(m_Demux.video.stream->codecpar->codec_id))) {
AddMessage(RGY_LOG_ERROR, errorMesForCodec(_T("Failed to find decoder"), m_Demux.video.stream->codecpar->codec_id).c_str());
return RGY_ERR_NOT_FOUND;
if (avswDecoder.length() != 0) {
// swデコーダの指定がある場合はまずはそれを使用する
if (nullptr == (m_Demux.video.codecDecode = avcodec_find_decoder_by_name(tchar_to_string(avswDecoder).c_str()))) {
AddMessage(RGY_LOG_WARN, _T("Failed to find decoder %s, switching to default decoder.\n"), avswDecoder.c_str());
} else if (m_Demux.video.codecDecode->id != m_Demux.video.stream->codecpar->codec_id) {
AddMessage(RGY_LOG_WARN, _T("decoder %s cannot decode codec %s, switching to default decoder.\n"),
avswDecoder.c_str(), char_to_tstring(avcodec_get_name(m_Demux.video.stream->codecpar->codec_id)).c_str());
m_Demux.video.codecDecode = nullptr;
}
}
if (m_Demux.video.codecDecode == nullptr) {
if (nullptr == (m_Demux.video.codecDecode = avcodec_find_decoder(m_Demux.video.stream->codecpar->codec_id))) {
AddMessage(RGY_LOG_ERROR, errorMesForCodec(_T("Failed to find decoder"), m_Demux.video.stream->codecpar->codec_id).c_str());
return RGY_ERR_NOT_FOUND;
}
}
if (nullptr == (m_Demux.video.codecCtxDecode = avcodec_alloc_context3(m_Demux.video.codecDecode))) {
AddMessage(RGY_LOG_ERROR, errorMesForCodec(_T("Failed to allocate decoder"), m_Demux.video.stream->codecpar->codec_id).c_str());
Expand Down Expand Up @@ -2059,7 +2096,7 @@ RGY_ERR RGYInputAvcodec::Init(const TCHAR *strFileName, VideoInfo *inputInfo, co
AddMessage(RGY_LOG_ERROR, _T("Failed to open decoder for %s: %s\n"), char_to_tstring(avcodec_get_name(m_Demux.video.stream->codecpar->codec_id)).c_str(), qsv_av_err2str(ret).c_str());
return RGY_ERR_UNSUPPORTED;
}

m_Demux.video.simdCsp = prm->simdCsp;
const auto pixCspConv = csp_avpixfmt_to_rgy(m_Demux.video.codecCtxDecode->pix_fmt);
if (pixCspConv == RGY_CSP_NA) {
AddMessage(RGY_LOG_ERROR, _T("invalid color format: %s\n"),
Expand All @@ -2073,22 +2110,22 @@ RGY_ERR RGYInputAvcodec::Init(const TCHAR *strFileName, VideoInfo *inputInfo, co
//ロスレスの場合は、入力側で出力フォーマットを決める
m_inputVideoInfo.csp = pixfmtData->output_csp;
} else {
m_inputVideoInfo.csp = (m_convert->getFunc(m_inputCsp, prefered_csp, false, prm->simdCsp) != nullptr) ? prefered_csp : pixfmtData->output_csp;
m_inputVideoInfo.csp = (m_convert->getFunc(m_inputCsp, prefered_csp, false, m_Demux.video.simdCsp) != nullptr) ? prefered_csp : pixfmtData->output_csp;
//QSVではNV16->P010がサポートされていない
if (ENCODER_QSV && m_inputVideoInfo.csp == RGY_CSP_NV16 && prefered_csp == RGY_CSP_P010) {
m_inputVideoInfo.csp = RGY_CSP_P210;
}
//なるべく軽いフォーマットでGPUに転送するように
if (ENCODER_NVENC
&& RGY_CSP_BIT_PER_PIXEL[pixfmtData->output_csp] < RGY_CSP_BIT_PER_PIXEL[prefered_csp]
&& m_convert->getFunc(m_inputCsp, pixfmtData->output_csp, false, prm->simdCsp) != nullptr) {
&& m_convert->getFunc(m_inputCsp, pixfmtData->output_csp, false, m_Demux.video.simdCsp) != nullptr) {
m_inputVideoInfo.csp = pixfmtData->output_csp;
}
}
if (m_convert->getFunc(m_inputCsp, m_inputVideoInfo.csp, false, prm->simdCsp) == nullptr && m_inputCsp == RGY_CSP_YUY2) {
if (m_convert->getFunc(m_inputCsp, m_inputVideoInfo.csp, false, m_Demux.video.simdCsp) == nullptr && m_inputCsp == RGY_CSP_YUY2) {
//YUY2用の特別処理
m_inputVideoInfo.csp = RGY_CSP_CHROMA_FORMAT[pixfmtData->output_csp] == RGY_CHROMAFMT_YUV420 ? RGY_CSP_NV12 : RGY_CSP_YUV444;
m_convert->getFunc(m_inputCsp, m_inputVideoInfo.csp, false, prm->simdCsp);
m_convert->getFunc(m_inputCsp, m_inputVideoInfo.csp, false, m_Demux.video.simdCsp);
}
if (m_convert->getFunc() == nullptr) {
AddMessage(RGY_LOG_ERROR, _T("color conversion not supported: %s -> %s.\n"),
Expand Down Expand Up @@ -2156,7 +2193,7 @@ RGY_ERR RGYInputAvcodec::Init(const TCHAR *strFileName, VideoInfo *inputInfo, co
AddMessage(RGY_LOG_DEBUG, mes);
m_inputInfo += mes;
} else {
CreateInputInfo((tstring(_T("avsw: ")) + char_to_tstring(avcodec_get_name(m_Demux.video.stream->codecpar->codec_id))).c_str(),
CreateInputInfo((tstring(_T("avsw: ")) + char_to_tstring(m_Demux.video.codecCtxDecode->codec->name)).c_str(),
RGY_CSP_NAMES[m_convert->getFunc()->csp_from], RGY_CSP_NAMES[m_convert->getFunc()->csp_to], get_simd_str(m_convert->getFunc()->simd), &m_inputVideoInfo);
if (input_prm->seekSec > 0.0f) {
m_inputInfo += strsprintf(_T("\n seek: %s"), print_time(input_prm->seekSec).c_str());
Expand Down Expand Up @@ -3045,6 +3082,15 @@ RGY_ERR RGYInputAvcodec::LoadNextFrameInternal(RGYFrame *pSurface) {
pSurface->dataList().push_back(dovirpu);
}
}

//実際には初期化時と異なるcspの場合があるので、ここで再度チェック
m_inputCsp = csp_avpixfmt_to_rgy((AVPixelFormat)m_Demux.video.frame->format);
if (m_convert->getFunc(m_inputCsp, m_inputVideoInfo.csp, m_Demux.video.simdCsp) == nullptr) {
AddMessage(RGY_LOG_ERROR, _T("color conversion not supported: %s -> %s.\n"),
RGY_CSP_NAMES[m_inputCsp], RGY_CSP_NAMES[m_inputVideoInfo.csp]);
return RGY_ERR_INVALID_COLOR_FORMAT;
}

//フレームデータをコピー
void *dst_array[3];
pSurface->ptrArray(dst_array);
Expand Down
3 changes: 3 additions & 0 deletions NVEncCore/rgy_input_avcodec.h
Original file line number Diff line number Diff line change
Expand Up @@ -757,6 +757,8 @@ struct AVDemuxVideo {
bool hdr10plusMetadataCopy; //HDR10plusのメタ情報を取得する
bool doviRpuCopy; //dovi rpuのメタ情報を取得する

RGY_SIMD simdCsp; //使用するSIMD

std::unique_ptr<AVMasteringDisplayMetadata, RGYAVDeleter<AVMasteringDisplayMetadata>> masteringDisplay; //入力ファイルから抽出したHDRメタ情報
std::unique_ptr<AVContentLightMetadata, RGYAVDeleter<AVContentLightMetadata>> contentLight; //入力ファイルから抽出したHDRメタ情報

Expand Down Expand Up @@ -844,6 +846,7 @@ class RGYInputAvcodecPrm : public RGYInputPrm {
bool timestampPassThrough; //timestampをそのまま出力する
RGYOptList inputOpt; //入力オプション
RGYHEVCBsf hevcbsf;
tstring avswDecoder; //avswデコーダの指定

RGYInputAvcodecPrm(RGYInputPrm base);
virtual ~RGYInputAvcodecPrm() {};
Expand Down
3 changes: 2 additions & 1 deletion NVEncCore/rgy_prm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1690,7 +1690,8 @@ tstring RGYDebugLogFile::getFilename(const tstring& outputFilename, const tstrin

RGYParamInput::RGYParamInput() :
resizeResMode(RGYResizeResMode::Normal),
ignoreSAR(false) {
ignoreSAR(false),
avswDecoder() {

}

Expand Down
2 changes: 1 addition & 1 deletion NVEncCore/rgy_prm.h
Original file line number Diff line number Diff line change
Expand Up @@ -1690,6 +1690,7 @@ struct RGYDebugLogFile {
struct RGYParamInput {
RGYResizeResMode resizeResMode;
bool ignoreSAR;
tstring avswDecoder; //avswデコーダの指定

RGYParamInput();
~RGYParamInput();
Expand All @@ -1701,7 +1702,6 @@ struct RGYParamCommon {
tstring muxOutputFormat; //出力フォーマット
VideoVUIInfo out_vui;
RGYOptList inputOpt; //入力オプション

std::string maxCll;
std::string masterDisplay;
CspTransfer atcSei;
Expand Down
6 changes: 3 additions & 3 deletions NVEncCore/rgy_version.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
#ifndef __RGY_CONFIG_H__
#define __RGY_CONFIG_H__

#define VER_FILEVERSION 0,7,52,0
#define VER_STR_FILEVERSION "7.52"
#define VER_STR_FILEVERSION_TCHAR _T("7.52")
#define VER_FILEVERSION 0,7,53,0
#define VER_STR_FILEVERSION "7.53"
#define VER_STR_FILEVERSION_TCHAR _T("7.53")

#ifdef _M_IX86
#define BUILD_ARCH_STR _T("x86")
Expand Down

0 comments on commit 0a29eeb

Please sign in to comment.