diff --git a/Plugin~/README.md b/Plugin~/README.md
new file mode 100644
index 0000000000..3cdbfb6835
--- /dev/null
+++ b/Plugin~/README.md
@@ -0,0 +1,31 @@
+# プラグイン
+
+`com.unity.webrtc` が依存するネイティブプラグインのビルド及び配置の方法について説明します。
+
+### libwebrtc の組み込み
+
+プラグインは **libwebrtc** に依存しているため、ビルドするために libwebrtc をスタティックリンクする必要があります。Github Release ページに `webrtc-win.zip` を配置しています。
+
+
+zip ファイルを展開後、Plugin フォルダ直下に配置します。
+
+
+
+### 開発環境
+
+version 1.0 現在、ビルドは **Visual Studio 2017** を利用しています。
+
+### プロジェクトの設定
+
+プラグインの開発を行うためには、個別の環境に合わせて`WebRTCPlugin` プロジェクトのプロパティを変更する必要があります。
+
+`Command` に Unity の実行ファイルパス、`Command Arguments` にプロジェクトパスを指定してください。この設定を行うことで、デバッグ実行時に Unity エディタが起動し、ブレークポイントが有効になります。
+
+
+
+### プラグインの配置
+
+ビルド実行すると、`webrtc.dll` が `Packages\com.unity.webrtc\Runtime\Plugins\x86_64` に配置されます。このとき Unity のインスペクタ上で以下の設定になっていることを確認してください。
+
+
+
diff --git a/Plugin~/README_EN.md b/Plugin~/README_EN.md
new file mode 100644
index 0000000000..2c87537abc
--- /dev/null
+++ b/Plugin~/README_EN.md
@@ -0,0 +1,32 @@
+# Building the Plugin
+
+This guide will cover building and deploying the native plugin `com.unity.webrtc` depends on.
+
+### Embedding libwebrtc
+
+The plugin relies on **libwebrtc**, so building it requires a static libwebrtc link. `webrtc-win.zip` can be found on the Github Release page.
+
+
+
+Extract the files from the zip, and place them in the Plugin folder.
+
+
+
+### Build
+
+Version 1.0 is currently built with **Visual Studio 2017**.
+
+### Project Settings
+
+The `WebRTCPlugin` project properties must be adjusted to match your environment in order to build the plugin.
+
+Set the Unity .exe path under `Command` and the project path under `Command Arguments`. Once set, during debugging the Unity Editor will run and breakpoints will be enabled.
+
+
+
+### Deploying the Plugin
+
+When you run the build, `webrtc.dll` will be placed in `Packages\com.unity.webrtc\Runtime\Plugins\x86_64`. You should then be able to verify the following settings in the Unity Inspector window.
+
+
+
diff --git a/Plugin~/WebRTCPlugin/Callback.cpp b/Plugin~/WebRTCPlugin/Callback.cpp
new file mode 100644
index 0000000000..1b1dad402d
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Callback.cpp
@@ -0,0 +1,85 @@
+#include "pch.h"
+#include "Context.h"
+#include "IUnityGraphics.h"
+#include "IUnityGraphicsD3D11.h"
+
+namespace WebRTC
+{
+ IUnityInterfaces* s_UnityInterfaces = nullptr;
+ IUnityGraphics* s_Graphics = nullptr;
+ UnityGfxRenderer s_RenderType;
+ //d3d11 context
+ ID3D11DeviceContext* context;
+ //d3d11 device
+ ID3D11Device* g_D3D11Device = nullptr;
+ //natively created ID3D11Texture2D ptrs
+ UnityFrameBuffer* renderTextures[bufferedFrameNum];
+
+ Context* s_context;
+}
+using namespace WebRTC;
+//get d3d11 device
+static void UNITY_INTERFACE_API OnGraphicsDeviceEvent(UnityGfxDeviceEventType eventType)
+{
+ switch (eventType)
+ {
+ case kUnityGfxDeviceEventInitialize:
+ {
+ s_RenderType = s_UnityInterfaces->Get()->GetRenderer();
+ if (s_RenderType == kUnityGfxRendererD3D11)
+ {
+ g_D3D11Device = s_UnityInterfaces->Get()->GetDevice();
+ g_D3D11Device->GetImmediateContext(&context);
+ }
+ break;
+ }
+ case kUnityGfxDeviceEventShutdown:
+ {
+ for (auto rt : renderTextures)
+ {
+ if (rt)
+ {
+ rt->Release();
+ rt = nullptr;
+ }
+ }
+ //UnityPluginUnload not called normally
+ s_Graphics->UnregisterDeviceEventCallback(OnGraphicsDeviceEvent);
+ break;
+ }
+ case kUnityGfxDeviceEventBeforeReset:
+ {
+ break;
+ }
+ case kUnityGfxDeviceEventAfterReset:
+ {
+ break;
+ }
+ };
+}
+// Unity plugin load event
+extern "C" void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UnityPluginLoad(IUnityInterfaces* unityInterfaces)
+{
+ s_UnityInterfaces = unityInterfaces;
+ s_Graphics = unityInterfaces->Get();
+ s_Graphics->RegisterDeviceEventCallback(OnGraphicsDeviceEvent);
+ OnGraphicsDeviceEvent(kUnityGfxDeviceEventInitialize);
+}
+extern "C" void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UnityPluginUnload()
+{
+ s_Graphics->UnregisterDeviceEventCallback(OnGraphicsDeviceEvent);
+}
+
+static void UNITY_INTERFACE_API OnRenderEvent(int eventID)
+{
+ if (s_context != nullptr)
+ {
+ s_context->EncodeFrame();
+ }
+}
+
+extern "C" UnityRenderingEvent UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API GetRenderEventFunc(Context* context)
+{
+ s_context = context;
+ return OnRenderEvent;
+}
diff --git a/Plugin~/WebRTCPlugin/Context.cpp b/Plugin~/WebRTCPlugin/Context.cpp
new file mode 100644
index 0000000000..74e9d3be08
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Context.cpp
@@ -0,0 +1,372 @@
+#include "pch.h"
+#include "WebRTCPlugin.h"
+#include "Context.h"
+
+namespace WebRTC
+{
+ ContextManager ContextManager::s_instance;
+
+ CodecInitializationResult ContextManager::InitializeAndTryNvEnc()
+ {
+ auto result = LoadNvEncApi();
+ if (result == CodecInitializationResult::Success)
+ {
+ //Try to create encoder once
+ result = TryNvEnc();
+ }
+ return result;
+ }
+
+ Context* ContextManager::GetContext(int uid)
+ {
+ if (s_instance.codecInitializationResult == CodecInitializationResult::NotInitialized)
+ {
+ s_instance.codecInitializationResult = s_instance.InitializeAndTryNvEnc();
+ }
+ auto it = s_instance.m_contexts.find(uid);
+ if (it != s_instance.m_contexts.end()) {
+ DebugLog("Using already created context with ID %d", uid);
+ return it->second.get();
+ }
+
+ auto ctx = new Context(uid);
+ s_instance.m_contexts[uid].reset(ctx);
+ DebugLog("Register context with ID %d", uid);
+ return ctx;
+ }
+ CodecInitializationResult ContextManager::GetCodecInitializationResult() const
+ {
+ return s_instance.codecInitializationResult;
+ }
+
+ void ContextManager::SetCurContext(Context* context)
+ {
+ curContext = context;
+ }
+ CodecInitializationResult ContextManager::TryNvEnc()
+ {
+ NV_ENC_INITIALIZE_PARAMS nvEncInitializeParams = {};
+ NV_ENC_CONFIG nvEncConfig = {};
+ bool result = true;
+ _NVENCSTATUS errorCode;
+ void* pEncoderInterface = nullptr;
+#pragma region open an encode session
+ //open an encode session
+ NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS openEncdoeSessionExParams = { 0 };
+ openEncdoeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER;
+ openEncdoeSessionExParams.device = g_D3D11Device;
+ openEncdoeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX;
+ openEncdoeSessionExParams.apiVersion = NVENCAPI_VERSION;
+ result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncOpenEncodeSessionEx(&openEncdoeSessionExParams, &pEncoderInterface)));
+ checkf(result, "Unable to open NvEnc encode session");
+ LogPrint(StringFormat("OpenEncodeSession Error is %d", errorCode).c_str());
+ if (!result)
+ {
+ return CodecInitializationResult::EncoderInitializationFailed;
+ }
+#pragma endregion
+#pragma region set initialization parameters
+ nvEncInitializeParams.version = NV_ENC_INITIALIZE_PARAMS_VER;
+ nvEncInitializeParams.encodeWidth = 1920;
+ nvEncInitializeParams.encodeHeight = 1080;
+ nvEncInitializeParams.darWidth = 1920;
+ nvEncInitializeParams.darHeight = 1080;
+ nvEncInitializeParams.encodeGUID = NV_ENC_CODEC_H264_GUID;
+ nvEncInitializeParams.presetGUID = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID;
+ nvEncInitializeParams.frameRateNum = 60;
+ nvEncInitializeParams.frameRateDen = 1;
+ nvEncInitializeParams.enablePTD = 1;
+ nvEncInitializeParams.reportSliceOffsets = 0;
+ nvEncInitializeParams.enableSubFrameWrite = 0;
+ nvEncInitializeParams.encodeConfig = &nvEncConfig;
+ nvEncInitializeParams.maxEncodeWidth = 3840;
+ nvEncInitializeParams.maxEncodeHeight = 2160;
+#pragma endregion
+#pragma region get preset ocnfig and set it
+ NV_ENC_PRESET_CONFIG presetConfig = { 0 };
+ presetConfig.version = NV_ENC_PRESET_CONFIG_VER;
+ presetConfig.presetCfg.version = NV_ENC_CONFIG_VER;
+ result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncGetEncodePresetConfig(pEncoderInterface, nvEncInitializeParams.encodeGUID, nvEncInitializeParams.presetGUID, &presetConfig));
+ checkf(result, "Failed to select NVEncoder preset config");
+ if(!result)
+ {
+ return CodecInitializationResult::EncoderInitializationFailed;
+ }
+ std::memcpy(&nvEncConfig, &presetConfig.presetCfg, sizeof(NV_ENC_CONFIG));
+ nvEncConfig.profileGUID = NV_ENC_H264_PROFILE_BASELINE_GUID;
+ nvEncConfig.gopLength = nvEncInitializeParams.frameRateNum;
+ nvEncConfig.rcParams.averageBitRate = 10000000;
+ nvEncConfig.encodeCodecConfig.h264Config.idrPeriod = nvEncConfig.gopLength;
+
+ nvEncConfig.encodeCodecConfig.h264Config.sliceMode = 0;
+ nvEncConfig.encodeCodecConfig.h264Config.sliceModeData = 0;
+ nvEncConfig.encodeCodecConfig.h264Config.repeatSPSPPS = 1;
+ //Quality Control
+ nvEncConfig.encodeCodecConfig.h264Config.level = NV_ENC_LEVEL_H264_51;
+#pragma endregion
+#pragma region get encoder capability
+ NV_ENC_CAPS_PARAM capsParam = { 0 };
+ capsParam.version = NV_ENC_CAPS_PARAM_VER;
+ capsParam.capsToQuery = NV_ENC_CAPS_ASYNC_ENCODE_SUPPORT;
+ int32 asyncMode = 0;
+ result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncGetEncodeCaps(pEncoderInterface, nvEncInitializeParams.encodeGUID, &capsParam, &asyncMode));
+ checkf(result, "Failded to get NVEncoder capability params");
+ if (!result)
+ {
+ return CodecInitializationResult::EncoderInitializationFailed;
+ }
+ nvEncInitializeParams.enableEncodeAsync = 0;
+#pragma endregion
+#pragma region initialize hardware encoder session
+ result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncInitializeEncoder(pEncoderInterface, &nvEncInitializeParams)));
+ checkf(result, "Failed to initialize NVEncoder");
+ LogPrint(StringFormat("nvEncInitializeEncoder error is %d", errorCode).c_str());
+ if (!result)
+ {
+ return CodecInitializationResult::EncoderInitializationFailed;
+ }
+#pragma endregion
+ if (pEncoderInterface)
+ {
+ bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyEncoder(pEncoderInterface));
+ checkf(result, "Failed to destroy NV encoder interface");
+ pEncoderInterface = nullptr;
+ }
+ return CodecInitializationResult::Success;
+ }
+
+ CodecInitializationResult ContextManager::LoadNvEncApi()
+ {
+ pNvEncodeAPI = std::make_unique();
+ pNvEncodeAPI->version = NV_ENCODE_API_FUNCTION_LIST_VER;
+#if defined(_WIN32)
+#if defined(_WIN64)
+ HMODULE module = LoadLibrary(TEXT("nvEncodeAPI64.dll"));
+#else
+ HMODULE module = LoadLibrary(TEXT("nvEncodeAPI.dll"));
+#endif
+#else
+ void *module = dlopen("libnvidia-encode.so.1", RTLD_LAZY);
+#endif
+
+ if (module == nullptr)
+ {
+ LogPrint("NVENC library file is not found. Please ensure NV driver is installed");
+ return CodecInitializationResult::DriverNotInstalled;
+ }
+ hModule = module;
+
+ using NvEncodeAPIGetMaxSupportedVersion_Type = NVENCSTATUS(NVENCAPI *)(uint32_t*);
+#if defined(_WIN32)
+ NvEncodeAPIGetMaxSupportedVersion_Type NvEncodeAPIGetMaxSupportedVersion = (NvEncodeAPIGetMaxSupportedVersion_Type)GetProcAddress(module, "NvEncodeAPIGetMaxSupportedVersion");
+#else
+ NvEncodeAPIGetMaxSupportedVersion_Type NvEncodeAPIGetMaxSupportedVersion = (NvEncodeAPIGetMaxSupportedVersion_Type)dlsym(hModule, "NvEncodeAPIGetMaxSupportedVersion");
+#endif
+
+ uint32_t version = 0;
+ uint32_t currentVersion = (NVENCAPI_MAJOR_VERSION << 4) | NVENCAPI_MINOR_VERSION;
+ NvEncodeAPIGetMaxSupportedVersion(&version);
+ if (currentVersion > version)
+ {
+ LogPrint("Current Driver Version does not support this NvEncodeAPI version, please upgrade driver");
+ return CodecInitializationResult::DriverVersionDoesNotSupportAPI;
+ }
+
+ using NvEncodeAPICreateInstance_Type = NVENCSTATUS(NVENCAPI *)(NV_ENCODE_API_FUNCTION_LIST*);
+#if defined(_WIN32)
+ NvEncodeAPICreateInstance_Type NvEncodeAPICreateInstance = (NvEncodeAPICreateInstance_Type)GetProcAddress(module, "NvEncodeAPICreateInstance");
+#else
+ NvEncodeAPICreateInstance_Type NvEncodeAPICreateInstance = (NvEncodeAPICreateInstance_Type)dlsym(module, "NvEncodeAPICreateInstance");
+#endif
+
+ if (!NvEncodeAPICreateInstance)
+ {
+ LogPrint("Cannot find NvEncodeAPICreateInstance() entry in NVENC library");
+ return CodecInitializationResult::APINotFound;
+ }
+ bool result = (NvEncodeAPICreateInstance(pNvEncodeAPI.get()) == NV_ENC_SUCCESS);
+ checkf(result, "Unable to create NvEnc API function list");
+ if (!result)
+ {
+ return CodecInitializationResult::APINotFound;
+ }
+ return CodecInitializationResult::Success;
+ }
+
+ void ContextManager::DestroyContext(int uid)
+ {
+ auto it = s_instance.m_contexts.find(uid);
+ if (it != s_instance.m_contexts.end()) {
+ DebugLog("Unregister context with ID %d", uid);
+ s_instance.m_contexts.erase(it);
+ }
+ }
+
+ ContextManager::~ContextManager()
+ {
+ if (hModule)
+ {
+ FreeLibrary((HMODULE)hModule);
+ hModule = nullptr;
+ }
+ if (m_contexts.size()) {
+ DebugWarning("%lu remaining context(s) registered", m_contexts.size());
+ }
+ m_contexts.clear();
+ }
+
+ void Convert(const std::string& str, webrtc::PeerConnectionInterface::RTCConfiguration& config)
+ {
+ config = webrtc::PeerConnectionInterface::RTCConfiguration{};
+ Json::Reader jsonReader;
+ Json::Value configJson;
+ jsonReader.parse(str, configJson);
+ Json::Value iceServersJson = configJson["iceServers"];
+ if (!iceServersJson)
+ return;
+ for (auto iceServerJson : iceServersJson)
+ {
+ webrtc::PeerConnectionInterface::IceServer iceServer;
+ for (auto url : iceServerJson["urls"])
+ {
+ iceServer.urls.push_back(url.asString());
+ }
+ if (!iceServerJson["username"].isNull())
+ {
+ iceServer.username = iceServerJson["username"].asString();
+ }
+ if (!iceServerJson["username"].isNull())
+ {
+ iceServer.password = iceServerJson["credential"].asString();
+ }
+ config.servers.push_back(iceServer);
+ }
+ config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ }
+#pragma warning(push)
+#pragma warning(disable: 4715)
+ webrtc::SdpType ConvertSdpType(RTCSdpType type)
+ {
+ switch (type)
+ {
+ case RTCSdpType::Offer:
+ return webrtc::SdpType::kOffer;
+ case RTCSdpType::PrAnswer:
+ return webrtc::SdpType::kPrAnswer;
+ case RTCSdpType::Answer:
+ return webrtc::SdpType::kAnswer;
+ }
+ }
+
+ RTCSdpType ConvertSdpType(webrtc::SdpType type)
+ {
+ switch (type)
+ {
+ case webrtc::SdpType::kOffer:
+ return RTCSdpType::Offer;
+ case webrtc::SdpType::kPrAnswer:
+ return RTCSdpType::PrAnswer;
+ case webrtc::SdpType::kAnswer:
+ return RTCSdpType::Answer;
+ }
+ }
+#pragma warning(pop)
+
+ Context::Context(int uid)
+ : m_uid(uid)
+ {
+ workerThread.reset(new rtc::Thread());
+ workerThread->Start();
+ signalingThread.reset(new rtc::Thread());
+ signalingThread->Start();
+
+ rtc::InitializeSSL();
+
+ audioDevice = new rtc::RefCountedObject();
+ nvVideoCapturerUnique = std::make_unique();
+ nvVideoCapturer = nvVideoCapturerUnique.get();
+ auto dummyVideoEncoderFactory = std::make_unique(nvVideoCapturer);
+
+ peerConnectionFactory = webrtc::CreatePeerConnectionFactory(
+ workerThread.get(),
+ workerThread.get(),
+ signalingThread.get(),
+ audioDevice,
+ webrtc::CreateAudioEncoderFactory(),
+ webrtc::CreateAudioDecoderFactory(),
+ std::move(dummyVideoEncoderFactory),
+ webrtc::CreateBuiltinVideoDecoderFactory(),
+ nullptr,
+ nullptr);
+ }
+
+ Context::~Context()
+ {
+ clients.clear();
+ peerConnectionFactory = nullptr;
+ audioTrack = nullptr;
+ videoTracks.clear();
+ audioStream = nullptr;
+ videoStreams.clear();
+
+ workerThread->Quit();
+ workerThread.reset();
+ signalingThread->Quit();
+ signalingThread.reset();
+ }
+
+ webrtc::MediaStreamInterface* Context::CreateVideoStream(UnityFrameBuffer* frameBuffer)
+ {
+ //TODO: label and stream id should be maintained in some way for multi-stream
+ auto videoTrack = peerConnectionFactory->CreateVideoTrack(
+ "video", peerConnectionFactory->CreateVideoSource(std::move(nvVideoCapturerUnique)));
+ if (!videoTracks.count(frameBuffer))
+ {
+ videoTracks[frameBuffer] = videoTrack;
+ }
+ auto videoStream = peerConnectionFactory->CreateLocalMediaStream("video");
+ videoStream->AddTrack(videoTrack);
+ videoStreams.push_back(videoStream);
+ nvVideoCapturer->unityRT = frameBuffer;
+ nvVideoCapturer->StartEncoder();
+ return videoStream.get();
+ }
+
+ webrtc::MediaStreamInterface* Context::CreateAudioStream()
+ {
+ //avoid optimization specially for voice
+ cricket::AudioOptions audioOptions;
+ audioOptions.auto_gain_control = false;
+ audioOptions.noise_suppression = false;
+ audioOptions.highpass_filter = false;
+ //TODO: label and stream id should be maintained in some way for multi-stream
+ audioTrack = peerConnectionFactory->CreateAudioTrack("audio", peerConnectionFactory->CreateAudioSource(audioOptions));
+ audioStream = peerConnectionFactory->CreateLocalMediaStream("audio");
+ audioStream->AddTrack(audioTrack);
+ return audioStream.get();
+ }
+
+ PeerSDPObserver* PeerSDPObserver::Create(DelegateSetSDSuccess onSuccess, DelegateSetSDFailure onFailure)
+ {
+ auto observer = new rtc::RefCountedObject();
+ observer->onSuccess = onSuccess;
+ observer->onFailure = onFailure;
+ return observer;
+ }
+
+ void PeerSDPObserver::OnSuccess()
+ {
+ if (onSuccess != nullptr)
+ {
+ onSuccess();
+ }
+ }
+
+ void PeerSDPObserver::OnFailure(const std::string& error)
+ {
+ if (onFailure != nullptr)
+ {
+ onFailure();
+ }
+ }
+}
diff --git a/Plugin~/WebRTCPlugin/Context.h b/Plugin~/WebRTCPlugin/Context.h
new file mode 100644
index 0000000000..9bd487a871
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Context.h
@@ -0,0 +1,86 @@
+#pragma once
+#include "DummyAudioDevice.h"
+#include "DummyVideoEncoder.h"
+#include "PeerConnectionObject.h"
+#include "NvVideoCapturer.h"
+
+
+namespace WebRTC
+{
+ class Context;
+ class PeerSDPObserver;
+ class ContextManager
+ {
+ public:
+ static ContextManager* GetInstance() { return &s_instance; }
+
+ Context* GetContext(int uid);
+ void DestroyContext(int uid);
+ void SetCurContext(Context*);
+ CodecInitializationResult GetCodecInitializationResult() const;
+
+ public:
+ using ContextPtr = std::unique_ptr;
+ Context* curContext = nullptr;
+ std::unique_ptr pNvEncodeAPI;
+ void* hModule = nullptr;
+ private:
+ ~ContextManager();
+ CodecInitializationResult InitializeAndTryNvEnc();
+ CodecInitializationResult LoadNvEncApi();
+ CodecInitializationResult TryNvEnc();
+
+ CodecInitializationResult codecInitializationResult;
+ std::map m_contexts;
+ static ContextManager s_instance;
+ };
+
+ class Context
+ {
+ public:
+ explicit Context(int uid = -1);
+ webrtc::MediaStreamInterface* CreateVideoStream(UnityFrameBuffer* frameBuffer);
+ webrtc::MediaStreamInterface* CreateAudioStream();
+ ~Context();
+
+ PeerConnectionObject* CreatePeerConnection(int id);
+ PeerConnectionObject* CreatePeerConnection(int id, const std::string& conf);
+ void DeletePeerConnection(int id) { clients.erase(id); }
+ void InitializeEncoder(int32 width, int32 height) { nvVideoCapturer->InitializeEncoder(width, height); }
+ void EncodeFrame() { nvVideoCapturer->EncodeVideoData(); }
+ void StopCapturer() { nvVideoCapturer->Stop(); }
+ void ProcessAudioData(const float* data, int32 size) { audioDevice->ProcessAudioData(data, size); }
+ private:
+ int m_uid;
+ std::unique_ptr workerThread;
+ std::unique_ptr signalingThread;
+ std::map> clients;
+ rtc::scoped_refptr peerConnectionFactory;
+ NvVideoCapturer* nvVideoCapturer;
+ std::unique_ptr nvVideoCapturerUnique;
+ rtc::scoped_refptr audioDevice;
+ rtc::scoped_refptr audioTrack;
+ rtc::scoped_refptr audioStream;
+ //TODO: move videoTrack to NvVideoCapturer and maintain multiple NvVideoCapturer here
+ std::vector> videoStreams;
+ std::map> videoTracks;
+ };
+
+ class PeerSDPObserver : public webrtc::SetSessionDescriptionObserver
+ {
+ public:
+ static PeerSDPObserver* Create(DelegateSetSDSuccess onSuccess, DelegateSetSDFailure onFailure);
+ virtual void OnSuccess();
+ virtual void OnFailure(const std::string& error);
+ DelegateSetSDSuccess onSuccess;
+ DelegateSetSDFailure onFailure;
+ protected:
+ PeerSDPObserver() {}
+ ~PeerSDPObserver() {}
+
+ }; // class PeerSDPObserver
+
+ extern void Convert(const std::string& str, webrtc::PeerConnectionInterface::RTCConfiguration& config);
+ extern webrtc::SdpType ConvertSdpType(RTCSdpType type);
+ extern RTCSdpType ConvertSdpType(webrtc::SdpType type);
+}
diff --git a/Plugin~/WebRTCPlugin/DataChannelObject.cpp b/Plugin~/WebRTCPlugin/DataChannelObject.cpp
new file mode 100644
index 0000000000..0fb96e77d7
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/DataChannelObject.cpp
@@ -0,0 +1,48 @@
+#include "pch.h"
+#include "DataChannelObject.h"
+
+namespace WebRTC
+{
+ DataChannelObject::DataChannelObject(rtc::scoped_refptr channel, PeerConnectionObject& pc) : dataChannel(channel), peerConnectionObj(pc)
+ {
+ dataChannel->RegisterObserver(this);
+ }
+ DataChannelObject::~DataChannelObject()
+ {
+ dataChannel->UnregisterObserver();
+ }
+
+ void DataChannelObject::OnStateChange()
+ {
+ auto state = dataChannel->state();
+ switch (state)
+ {
+ case webrtc::DataChannelInterface::kOpen:
+ if (onOpen != nullptr)
+ {
+ onOpen();
+ }
+ break;
+ case webrtc::DataChannelInterface::kClosed:
+ if (onClose != nullptr)
+ {
+ onClose();
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ void DataChannelObject::OnMessage(const webrtc::DataBuffer& buffer)
+ {
+ if (onMessage != nullptr)
+ {
+ size_t size = buffer.data.size();
+ if (onMessage != nullptr)
+ {
+#pragma warning(suppress: 4267)
+ onMessage(buffer.data.data(), size);
+ }
+ }
+ }
+}
diff --git a/Plugin~/WebRTCPlugin/DataChannelObject.h b/Plugin~/WebRTCPlugin/DataChannelObject.h
new file mode 100644
index 0000000000..edf09eb2d5
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/DataChannelObject.h
@@ -0,0 +1,62 @@
+#pragma once
+
+namespace WebRTC
+{
+ class PeerConnectionObject;
+ using DelegateOnMessage = void(*)(const byte*, int size);
+ using DelegateOnOpen = void(*)();
+ using DelegateOnClose = void(*)();
+
+ class DataChannelObject : public webrtc::DataChannelObserver
+ {
+ public:
+ DataChannelObject(rtc::scoped_refptr channel, PeerConnectionObject& pc);
+ ~DataChannelObject();
+
+ std::string GetLabel() const
+ {
+ return dataChannel->label();
+ }
+ int GetID() const
+ {
+ return dataChannel->id();
+ }
+ void Close()
+ {
+ dataChannel->Close();
+ }
+ void Send(const char* data)
+ {
+ dataChannel->Send(webrtc::DataBuffer(std::string(data)));
+ }
+ void Send(const byte* data, int len)
+ {
+ rtc::CopyOnWriteBuffer buf(data, len);
+ dataChannel->Send(webrtc::DataBuffer(buf, true));
+ }
+ void RegisterOnMessage(DelegateOnMessage callback)
+ {
+ onMessage = callback;
+ }
+ void RegisterOnOpen(DelegateOnOpen callback)
+ {
+ onOpen = callback;
+ }
+ void RegisterOnClose(DelegateOnClose callback)
+ {
+ onClose = callback;
+ }
+ //werbrtc::DataChannelObserver
+ // The data channel state have changed.
+ void OnStateChange() override;
+ // A data buffer was successfully received.
+ void OnMessage(const webrtc::DataBuffer& buffer) override;
+ public:
+ DelegateOnMessage onMessage;
+ DelegateOnOpen onOpen;
+ DelegateOnClose onClose;
+ private:
+ rtc::scoped_refptr dataChannel;
+ PeerConnectionObject& peerConnectionObj;
+ };
+}
diff --git a/Plugin~/WebRTCPlugin/DummyAudioDevice.cpp b/Plugin~/WebRTCPlugin/DummyAudioDevice.cpp
new file mode 100644
index 0000000000..c8e1c2b0e2
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/DummyAudioDevice.cpp
@@ -0,0 +1,25 @@
+#include "pch.h"
+#include "DummyAudioDevice.h"
+
+namespace WebRTC
+{
+ void DummyAudioDevice::ProcessAudioData(const float* data, int32 size)
+ {
+ if (started && isRecording)
+ {
+ for (int i = 0; i < size; i++)
+ {
+#pragma warning (suppress: 4244)
+ convertedAudioData.push_back(data[i] >= 0 ? data[i] * SHRT_MAX : data[i] * -SHRT_MIN);
+ }
+ //opus supports up to 48khz sample rate, enforce 48khz here for quality
+ int chunkSize = 48000 * 2 / 100;
+ while (convertedAudioData.size() > chunkSize)
+ {
+ deviceBuffer->SetRecordedBuffer(convertedAudioData.data(), chunkSize / 2);
+ deviceBuffer->DeliverRecordedData();
+ convertedAudioData.erase(convertedAudioData.begin(), convertedAudioData.begin() + chunkSize);
+ }
+ }
+ }
+}
diff --git a/Plugin~/WebRTCPlugin/DummyAudioDevice.h b/Plugin~/WebRTCPlugin/DummyAudioDevice.h
new file mode 100644
index 0000000000..343841661d
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/DummyAudioDevice.h
@@ -0,0 +1,295 @@
+#pragma once
+
+namespace WebRTC
+{
+ class DummyAudioDevice : public webrtc::AudioDeviceModule
+ {
+ public:
+ void ProcessAudioData(const float* data, int32 size);
+
+ //webrtc::AudioDeviceModule
+ // Retrieve the currently utilized audio layer
+ virtual int32 ActiveAudioLayer(AudioLayer* audioLayer) const override
+ {
+ *audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+ return 0;
+ }
+ // Full-duplex transportation of PCM audio
+ virtual int32 RegisterAudioCallback(webrtc::AudioTransport* audioCallback) override
+ {
+ deviceBuffer->RegisterAudioCallback(audioCallback);
+ return 0;
+ }
+
+ // Main initialization and termination
+ virtual int32 Init() override
+ {
+ deviceBuffer = std::make_unique();
+ started = true;
+ return 0;
+ }
+ virtual int32 Terminate() override
+ {
+ deviceBuffer.reset();
+ started = false;
+ isRecording = false;
+ return 0;
+ }
+ virtual bool Initialized() const override
+ {
+ return started;
+ }
+
+ // Device enumeration
+ virtual int16 PlayoutDevices() override
+ {
+ return 0;
+ }
+ virtual int16 RecordingDevices() override
+ {
+ return 0;
+ }
+ virtual int32 PlayoutDeviceName(uint16 index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override
+ {
+ return 0;
+ }
+ virtual int32 RecordingDeviceName(uint16 index,
+ char name[webrtc::kAdmMaxDeviceNameSize],
+ char guid[webrtc::kAdmMaxGuidSize]) override
+ {
+ return 0;
+ }
+
+ // Device selection
+ virtual int32 SetPlayoutDevice(uint16 index) override
+ {
+ return 0;
+ }
+ virtual int32 SetPlayoutDevice(WindowsDeviceType device) override
+ {
+ return 0;
+ }
+ virtual int32 SetRecordingDevice(uint16 index) override
+ {
+ return 0;
+ }
+ virtual int32 SetRecordingDevice(WindowsDeviceType device) override
+ {
+ return 0;
+ }
+
+ // Audio transport initialization
+ virtual int32 PlayoutIsAvailable(bool* available) override
+ {
+ return 0;
+ }
+ virtual int32 InitPlayout() override
+ {
+ return 0;
+ }
+ virtual bool PlayoutIsInitialized() const override
+ {
+ return false;
+ }
+ virtual int32 RecordingIsAvailable(bool* available) override
+ {
+ return 0;
+ }
+ virtual int32 InitRecording() override
+ {
+ isRecording = true;
+ deviceBuffer->SetRecordingSampleRate(48000);
+ deviceBuffer->SetRecordingChannels(2);
+ return 0;
+ }
+ virtual bool RecordingIsInitialized() const override
+ {
+ return isRecording;
+ }
+
+ // Audio transport control
+ virtual int32 StartPlayout() override
+ {
+ return 0;
+ }
+ virtual int32 StopPlayout() override
+ {
+ return 0;
+ }
+ virtual bool Playing() const override
+ {
+ return false;
+ }
+ virtual int32 StartRecording() override
+ {
+ return 0;
+ }
+ virtual int32 StopRecording() override
+ {
+ return 0;
+ }
+ virtual bool Recording() const override
+ {
+ return isRecording;
+ }
+
+ // Audio mixer initialization
+ virtual int32 InitSpeaker() override
+ {
+ return 0;
+ }
+ virtual bool SpeakerIsInitialized() const override
+ {
+ return false;
+ }
+ virtual int32 InitMicrophone() override
+ {
+ return 0;
+ }
+ virtual bool MicrophoneIsInitialized() const override
+ {
+ return false;
+ }
+
+ // Speaker volume controls
+ virtual int32 SpeakerVolumeIsAvailable(bool* available) override
+ {
+ return 0;
+ }
+ virtual int32 SetSpeakerVolume(uint32 volume) override
+ {
+ return 0;
+ }
+ virtual int32 SpeakerVolume(uint32* volume) const override
+ {
+ return 0;
+ }
+ virtual int32 MaxSpeakerVolume(uint32* maxVolume) const override
+ {
+ return 0;
+ }
+ virtual int32 MinSpeakerVolume(uint32* minVolume) const override
+ {
+ return 0;
+ }
+
+ // Microphone volume controls
+ virtual int32 MicrophoneVolumeIsAvailable(bool* available) override
+ {
+ return 0;
+ }
+ virtual int32 SetMicrophoneVolume(uint32 volume) override
+ {
+ return 0;
+ }
+ virtual int32 MicrophoneVolume(uint32* volume) const override
+ {
+ return 0;
+ }
+ virtual int32 MaxMicrophoneVolume(uint32* maxVolume) const override
+ {
+ return 0;
+ }
+ virtual int32 MinMicrophoneVolume(uint32* minVolume) const override
+ {
+ return 0;
+ }
+
+ // Speaker mute control
+ virtual int32 SpeakerMuteIsAvailable(bool* available) override
+ {
+ return 0;
+ }
+ virtual int32 SetSpeakerMute(bool enable) override
+ {
+ return 0;
+ }
+ virtual int32 SpeakerMute(bool* enabled) const override
+ {
+ return 0;
+ }
+
+ // Microphone mute control
+ virtual int32 MicrophoneMuteIsAvailable(bool* available) override
+ {
+ return 0;
+ }
+ virtual int32 SetMicrophoneMute(bool enable) override
+ {
+ return 0;
+ }
+ virtual int32 MicrophoneMute(bool* enabled) const override
+ {
+ return 0;
+ }
+
+ // Stereo support
+ virtual int32 StereoPlayoutIsAvailable(bool* available) const override
+ {
+ return 0;
+ }
+ virtual int32 SetStereoPlayout(bool enable) override
+ {
+ return 0;
+ }
+ virtual int32 StereoPlayout(bool* enabled) const override
+ {
+ return 0;
+ }
+ virtual int32 StereoRecordingIsAvailable(bool* available) const override
+ {
+ *available = true;
+ return 0;
+ }
+ virtual int32 SetStereoRecording(bool enable) override
+ {
+ return 0;
+ }
+ virtual int32 StereoRecording(bool* enabled) const override
+ {
+ *enabled = true;
+ return 0;
+ }
+
+ // Playout delay
+ virtual int32 PlayoutDelay(uint16* delayMS) const override
+ {
+ return 0;
+ }
+
+ // Only supported on Android.
+ virtual bool BuiltInAECIsAvailable() const override
+ {
+ return false;
+ }
+ virtual bool BuiltInAGCIsAvailable() const override
+ {
+ return false;
+ }
+ virtual bool BuiltInNSIsAvailable() const override
+ {
+ return false;
+ }
+
+ // Enables the built-in audio effects. Only supported on Android.
+ virtual int32 EnableBuiltInAEC(bool enable) override
+ {
+ return 0;
+ }
+ virtual int32 EnableBuiltInAGC(bool enable) override
+ {
+ return 0;
+ }
+ virtual int32 EnableBuiltInNS(bool enable) override
+ {
+ return 0;
+ }
+ private:
+ std::unique_ptr deviceBuffer;
+ std::atomic started = false;
+ std::atomic isRecording = false;
+ std::vector convertedAudioData;
+ };
+}
diff --git a/Plugin~/WebRTCPlugin/DummyVideoEncoder.cpp b/Plugin~/WebRTCPlugin/DummyVideoEncoder.cpp
new file mode 100644
index 0000000000..1a5dff007c
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/DummyVideoEncoder.cpp
@@ -0,0 +1,94 @@
+#include "pch.h"
+#include "DummyVideoEncoder.h"
+#include "NvVideoCapturer.h"
+#include
+
+namespace WebRTC
+{
+ int32_t DummyVideoEncoder::Encode(
+ const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* codecSpecificInfo,
+ const std::vector* frameTypes)
+ {
+ FrameBuffer* frameBuffer = static_cast(frame.video_frame_buffer().get());
+ std::vector& frameDataBuffer = frameBuffer->buffer;
+
+ encodedImage._completeFrame = true;
+ encodedImage.SetTimestamp(frame.timestamp());
+ encodedImage._encodedWidth = frame.video_frame_buffer()->width();
+ encodedImage._encodedHeight = frame.video_frame_buffer()->height();
+ encodedImage.ntp_time_ms_ = frame.ntp_time_ms();
+ encodedImage.rotation_ = frame.rotation();
+ encodedImage.content_type_ = webrtc::VideoContentType::UNSPECIFIED;
+ encodedImage.timing_.flags = webrtc::VideoSendTiming::kInvalid;
+ encodedImage._frameType = webrtc::kVideoFrameDelta;
+ std::vector naluIndices =
+ webrtc::H264::FindNaluIndices(&frameDataBuffer[0], frameDataBuffer.size());
+ for (int i = 0; i < naluIndices.size(); i++)
+ {
+ webrtc::H264::NaluType NALUType = webrtc::H264::ParseNaluType(frameDataBuffer[naluIndices[i].payload_start_offset]);
+ if (NALUType == webrtc::H264::kIdr)
+ {
+ encodedImage._frameType = webrtc::kVideoFrameKey;
+ break;
+ }
+ }
+
+ if (encodedImage._frameType != webrtc::kVideoFrameKey && frameTypes && (*frameTypes)[0] == webrtc::kVideoFrameKey)
+ {
+ SetKeyFrame();
+ }
+
+ if (lastBitrate.get_sum_kbps() > 0)
+ {
+ SetRateAllocation(lastBitrate, 30);
+ }
+ encodedImage._buffer = &frameDataBuffer[0];
+ encodedImage._length = encodedImage._size = frameDataBuffer.size();
+
+ fragHeader.VerifyAndAllocateFragmentationHeader(naluIndices.size());
+ fragHeader.fragmentationVectorSize = static_cast(naluIndices.size());
+ for (int i = 0; i < naluIndices.size(); i++)
+ {
+ webrtc::H264::NaluIndex const& NALUIndex = naluIndices[i];
+ fragHeader.fragmentationOffset[i] = NALUIndex.payload_start_offset;
+ fragHeader.fragmentationLength[i] = NALUIndex.payload_size;
+ webrtc::H264::NaluType NALUType = webrtc::H264::ParseNaluType(frameDataBuffer[NALUIndex.payload_start_offset]);
+ }
+ webrtc::CodecSpecificInfo codecInfo;
+ codecInfo.codecType = webrtc::kVideoCodecH264;
+ callback->OnEncodedImage(encodedImage, &codecInfo, &fragHeader);
+ return 0;
+ }
+
+ int32_t DummyVideoEncoder::SetRateAllocation(const webrtc::VideoBitrateAllocation& allocation, uint32_t framerate)
+ {
+ lastBitrate = allocation;
+ SetRate(allocation.get_sum_kbps() * 1000);
+ return 0;
+ }
+ DummyVideoEncoderFactory::DummyVideoEncoderFactory(NvVideoCapturer* videoCapturer):capturer(videoCapturer){}
+ std::vector DummyVideoEncoderFactory::GetSupportedFormats() const
+ {
+ const absl::optional profileLevelId =
+ webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(webrtc::H264::kProfileConstrainedBaseline, webrtc::H264::kLevel5_1));
+ return { webrtc::SdpVideoFormat(
+ cricket::kH264CodecName,
+ { {cricket::kH264FmtpProfileLevelId, *profileLevelId},
+ {cricket::kH264FmtpLevelAsymmetryAllowed, "1"},
+ {cricket::kH264FmtpPacketizationMode, "1"} }) };
+ }
+
+ webrtc::VideoEncoderFactory::CodecInfo DummyVideoEncoderFactory::QueryVideoEncoder(const webrtc::SdpVideoFormat& format) const
+ {
+ return CodecInfo{ true, false };
+ }
+ std::unique_ptr DummyVideoEncoderFactory::CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& format)
+ {
+ auto dummyVideoEncoder = std::make_unique();
+ dummyVideoEncoder->SetKeyFrame.connect(capturer, &NvVideoCapturer::SetKeyFrame);
+ dummyVideoEncoder->SetRate.connect(capturer, &NvVideoCapturer::SetRate);
+ return dummyVideoEncoder;
+ }
+}
diff --git a/Plugin~/WebRTCPlugin/DummyVideoEncoder.h b/Plugin~/WebRTCPlugin/DummyVideoEncoder.h
new file mode 100644
index 0000000000..43437e2ad8
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/DummyVideoEncoder.h
@@ -0,0 +1,58 @@
+#pragma once
+
+namespace WebRTC
+{
+ class NvVideoCapturer;
+ class DummyVideoEncoder : public webrtc::VideoEncoder
+ {
+ public:
+ sigslot::signal0<> SetKeyFrame;
+ sigslot::signal1 SetRate;
+ //webrtc::VideoEncoder
+ // Initialize the encoder with the information from the codecSettings
+ virtual int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ size_t max_payload_size) override {
+ return 0;
+ }
+ // Register an encode complete callback object.
+ virtual int32_t RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) override {
+ this->callback = callback;
+ return 0;
+ }
+ // Free encoder memory.
+ virtual int32_t Release() override { callback = nullptr; return 0; }
+ // Encode an I420 image (as a part of a video stream). The encoded image
+ // will be returned to the user through the encode complete callback.
+ virtual int32_t Encode(
+ const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ const std::vector* frame_types) override;
+ // Default fallback: Just use the sum of bitrates as the single target rate.
+ virtual int32_t SetRateAllocation(const webrtc::VideoBitrateAllocation& allocation, uint32_t framerate) override;
+ private:
+ webrtc::EncodedImageCallback* callback = nullptr;
+ webrtc::EncodedImage encodedImage;
+ webrtc::H264BitstreamParser bitstreamParser;
+ webrtc::RTPFragmentationHeader fragHeader;
+ webrtc::VideoBitrateAllocation lastBitrate;
+ };
+
+ class DummyVideoEncoderFactory : public webrtc::VideoEncoderFactory
+ {
+ public:
+ //VideoEncoderFactory
+ // Returns a list of supported video formats in order of preference, to use
+ // for signaling etc.
+ virtual std::vector GetSupportedFormats() const override;
+ // Returns information about how this format will be encoded. The specified
+ // format must be one of the supported formats by this factory.
+ virtual webrtc::VideoEncoderFactory::CodecInfo QueryVideoEncoder(const webrtc::SdpVideoFormat& format) const override;
+ // Creates a VideoEncoder for the specified format.
+ virtual std::unique_ptr CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& format) override;
+ DummyVideoEncoderFactory(NvVideoCapturer* videoCapturer);
+ private:
+ NvVideoCapturer* capturer;
+ };
+}
diff --git a/Plugin~/WebRTCPlugin/Logger.cpp b/Plugin~/WebRTCPlugin/Logger.cpp
new file mode 100644
index 0000000000..eb09e20982
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Logger.cpp
@@ -0,0 +1,29 @@
+#include "pch.h"
+#include
+#include "WebRTCPlugin.h"
+
+namespace WebRTC
+{
+ void LogPrint(const char* fmt, ...)
+ {
+#ifdef _DEBUG
+ va_list vl;
+ va_start(vl, fmt);
+#ifdef _WIN32
+ char buf[2048];
+ vsprintf_s(buf, fmt, vl);
+ debugLog(buf);
+#else
+ vprintf(fmt, vl);
+#endif
+ va_end(vl);
+#endif
+ }
+ void checkf(bool result, const char* msg)
+ {
+ if (!result)
+ {
+ LogPrint(msg);
+ }
+ }
+}
diff --git a/Plugin~/WebRTCPlugin/Logger.h b/Plugin~/WebRTCPlugin/Logger.h
new file mode 100644
index 0000000000..b33b829cfb
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Logger.h
@@ -0,0 +1,6 @@
+#pragma once
+
+namespace WebRTC
+{
+ void LogPrint(const char* fmt, ...);
+}
diff --git a/Plugin~/WebRTCPlugin/NvEncoder.cpp b/Plugin~/WebRTCPlugin/NvEncoder.cpp
new file mode 100644
index 0000000000..c12e17d782
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/NvEncoder.cpp
@@ -0,0 +1,274 @@
+#include "pch.h"
+#include "NvEncoder.h"
+#include "IUnityGraphicsD3D11.h"
+#include "Context.h"
+#include
+
+namespace WebRTC
+{
+ NvEncoder::NvEncoder(int width, int height) :width(width), height(height)
+ {
+ LogPrint(StringFormat("width is %d, height is %d", width, height).c_str());
+ checkf(g_D3D11Device != nullptr, "D3D11Device is invalid");
+ checkf(width > 0 && height > 0, "Invalid width or height!");
+ bool result = true;
+#pragma region open an encode session
+ //open an encode session
+ NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS openEncdoeSessionExParams = { 0 };
+ openEncdoeSessionExParams.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER;
+ openEncdoeSessionExParams.device = g_D3D11Device;
+ openEncdoeSessionExParams.deviceType = NV_ENC_DEVICE_TYPE_DIRECTX;
+ openEncdoeSessionExParams.apiVersion = NVENCAPI_VERSION;
+ result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncOpenEncodeSessionEx(&openEncdoeSessionExParams, &pEncoderInterface)));
+ checkf(result, "Unable to open NvEnc encode session");
+ LogPrint(StringFormat("OpenEncodeSession Error is %d", errorCode).c_str());
+#pragma endregion
+#pragma region set initialization parameters
+ nvEncInitializeParams.version = NV_ENC_INITIALIZE_PARAMS_VER;
+ nvEncInitializeParams.encodeWidth = width;
+ nvEncInitializeParams.encodeHeight = height;
+ nvEncInitializeParams.darWidth = width;
+ nvEncInitializeParams.darHeight = height;
+ nvEncInitializeParams.encodeGUID = NV_ENC_CODEC_H264_GUID;
+ nvEncInitializeParams.presetGUID = NV_ENC_PRESET_LOW_LATENCY_HQ_GUID;
+ nvEncInitializeParams.frameRateNum = frameRate;
+ nvEncInitializeParams.frameRateDen = 1;
+ nvEncInitializeParams.enablePTD = 1;
+ nvEncInitializeParams.reportSliceOffsets = 0;
+ nvEncInitializeParams.enableSubFrameWrite = 0;
+ nvEncInitializeParams.encodeConfig = &nvEncConfig;
+ nvEncInitializeParams.maxEncodeWidth = 3840;
+ nvEncInitializeParams.maxEncodeHeight = 2160;
+#pragma endregion
+#pragma region get preset ocnfig and set it
+ NV_ENC_PRESET_CONFIG presetConfig = { 0 };
+ presetConfig.version = NV_ENC_PRESET_CONFIG_VER;
+ presetConfig.presetCfg.version = NV_ENC_CONFIG_VER;
+ result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncGetEncodePresetConfig(pEncoderInterface, nvEncInitializeParams.encodeGUID, nvEncInitializeParams.presetGUID, &presetConfig));
+ checkf(result, "Failed to select NVEncoder preset config");
+ std::memcpy(&nvEncConfig, &presetConfig.presetCfg, sizeof(NV_ENC_CONFIG));
+ nvEncConfig.profileGUID = NV_ENC_H264_PROFILE_BASELINE_GUID;
+ nvEncConfig.gopLength = nvEncInitializeParams.frameRateNum;
+ nvEncConfig.rcParams.averageBitRate = bitRate;
+ nvEncConfig.encodeCodecConfig.h264Config.idrPeriod = nvEncConfig.gopLength;
+
+ nvEncConfig.encodeCodecConfig.h264Config.sliceMode = 0;
+ nvEncConfig.encodeCodecConfig.h264Config.sliceModeData = 0;
+ nvEncConfig.encodeCodecConfig.h264Config.repeatSPSPPS = 1;
+ //Quality Control
+ nvEncConfig.encodeCodecConfig.h264Config.level = NV_ENC_LEVEL_H264_51;
+#pragma endregion
+#pragma region get encoder capability
+ NV_ENC_CAPS_PARAM capsParam = { 0 };
+ capsParam.version = NV_ENC_CAPS_PARAM_VER;
+ capsParam.capsToQuery = NV_ENC_CAPS_ASYNC_ENCODE_SUPPORT;
+ int32 asyncMode = 0;
+ result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncGetEncodeCaps(pEncoderInterface, nvEncInitializeParams.encodeGUID, &capsParam, &asyncMode));
+ checkf(result, "Failded to get NVEncoder capability params");
+ nvEncInitializeParams.enableEncodeAsync = 0;
+#pragma endregion
+#pragma region initialize hardware encoder session
+ result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncInitializeEncoder(pEncoderInterface, &nvEncInitializeParams)));
+ checkf(result, "Failed to initialize NVEncoder");
+ LogPrint(StringFormat("nvEncInitializeEncoder error is %d", errorCode).c_str());
+#pragma endregion
+ InitEncoderResources();
+ isNvEncoderSupported = true;
+ }
+ NvEncoder::~NvEncoder()
+ {
+ ReleaseEncoderResources();
+ if (pEncoderInterface)
+ {
+ bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyEncoder(pEncoderInterface));
+ checkf(result, "Failed to destroy NV encoder interface");
+ pEncoderInterface = nullptr;
+ }
+
+ }
+
+ void NvEncoder::UpdateSettings()
+ {
+ bool settingChanged = false;
+ if (nvEncConfig.rcParams.averageBitRate != bitRate)
+ {
+ nvEncConfig.rcParams.averageBitRate = bitRate;
+ settingChanged = true;
+ }
+ if (nvEncInitializeParams.frameRateNum != frameRate)
+ {
+ nvEncInitializeParams.frameRateNum = frameRate;
+ settingChanged = true;
+ }
+
+ if (settingChanged)
+ {
+ NV_ENC_RECONFIGURE_PARAMS nvEncReconfigureParams;
+ std::memcpy(&nvEncReconfigureParams.reInitEncodeParams, &nvEncInitializeParams, sizeof(nvEncInitializeParams));
+ nvEncReconfigureParams.version = NV_ENC_RECONFIGURE_PARAMS_VER;
+ bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncReconfigureEncoder(pEncoderInterface, &nvEncReconfigureParams));
+ checkf(result, "Failed to reconfigure encoder setting");
+ }
+ }
+ void NvEncoder::SetRate(uint32 rate)
+ {
+#pragma warning (suppress: 4018)
+ if (rate < lastBitRate)
+ {
+#pragma warning(suppress: 4018)
+ bitRate = rate > minBitRate ? rate : minBitRate;
+ lastBitRate = bitRate;
+ }
+ }
+ //entry for encoding a frame
+ void NvEncoder::EncodeFrame()
+ {
+ UpdateSettings();
+ uint32 bufferIndexToWrite = frameCount % bufferedFrameNum;
+ Frame& frame = bufferedFrames[bufferIndexToWrite];
+#pragma region set frame params
+ //no free buffer, skip this frame
+ if (frame.isEncoding)
+ {
+ return;
+ }
+ frame.isEncoding = true;
+#pragma endregion
+#pragma region configure per-frame encode parameters
+ NV_ENC_PIC_PARAMS picParams = { 0 };
+ picParams.version = NV_ENC_PIC_PARAMS_VER;
+ picParams.pictureStruct = NV_ENC_PIC_STRUCT_FRAME;
+ picParams.inputBuffer = frame.inputFrame.mappedResource;
+ picParams.bufferFmt = frame.inputFrame.bufferFormat;
+ picParams.inputWidth = nvEncInitializeParams.encodeWidth;
+ picParams.inputHeight = nvEncInitializeParams.encodeHeight;
+ picParams.outputBitstream = frame.outputFrame;
+ picParams.inputTimeStamp = frameCount;
+#pragma endregion
+#pragma region start encoding
+ if (isIdrFrame)
+ {
+ picParams.encodePicFlags |= NV_ENC_PIC_FLAG_FORCEIDR;
+ }
+ isIdrFrame = false;
+ bool result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncEncodePicture(pEncoderInterface, &picParams)));
+ checkf(result, StringFormat("Failed to encode frame, error is %d", errorCode).c_str());
+#pragma endregion
+ ProcessEncodedFrame(frame);
+ frameCount++;
+ }
+
+ //get encoded frame
+ void NvEncoder::ProcessEncodedFrame(Frame& frame)
+ {
+ //The frame hasn't been encoded, something wrong
+ if (!frame.isEncoding)
+ {
+ return;
+ }
+ frame.isEncoding = false;
+#pragma region retrieve encoded frame from output buffer
+ NV_ENC_LOCK_BITSTREAM lockBitStream = { 0 };
+ lockBitStream.version = NV_ENC_LOCK_BITSTREAM_VER;
+ lockBitStream.outputBitstream = frame.outputFrame;
+ lockBitStream.doNotWait = nvEncInitializeParams.enableEncodeAsync;
+ bool result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncLockBitstream(pEncoderInterface, &lockBitStream)));
+ checkf(result, StringFormat("Failed to lock bit stream, error is %d", errorCode).c_str());
+ if (lockBitStream.bitstreamSizeInBytes)
+ {
+ frame.encodedFrame.resize(lockBitStream.bitstreamSizeInBytes);
+ std::memcpy(frame.encodedFrame.data(), lockBitStream.bitstreamBufferPtr, lockBitStream.bitstreamSizeInBytes);
+ }
+
+ result = NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncUnlockBitstream(pEncoderInterface, frame.outputFrame)));
+ checkf(result, StringFormat("Failed to unlock bit stream, error is %d", errorCode).c_str());
+ frame.isIdrFrame = lockBitStream.pictureType == NV_ENC_PIC_TYPE_IDR;
+#pragma endregion
+ CaptureFrame(frame.encodedFrame);
+ }
+
+ ID3D11Texture2D* NvEncoder::AllocateInputBuffers()
+ {
+ ID3D11Texture2D* inputTextures = nullptr;
+ D3D11_TEXTURE2D_DESC desc = { 0 };
+ desc.Width = width;
+ desc.Height = height;
+ desc.MipLevels = 1;
+ desc.ArraySize = 1;
+ desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ desc.SampleDesc.Count = 1;
+ desc.Usage = D3D11_USAGE_DEFAULT;
+ desc.BindFlags = D3D11_BIND_RENDER_TARGET;
+ desc.CPUAccessFlags = 0;
+ g_D3D11Device->CreateTexture2D(&desc, NULL, &inputTextures);
+ return inputTextures;
+ }
+ NV_ENC_REGISTERED_PTR NvEncoder::RegisterResource(void *buffer)
+ {
+ NV_ENC_REGISTER_RESOURCE registerResource = { 0 };
+ registerResource.version = NV_ENC_REGISTER_RESOURCE_VER;
+ registerResource.resourceType = NV_ENC_INPUT_RESOURCE_TYPE_DIRECTX;
+ registerResource.resourceToRegister = buffer;
+
+ if (!registerResource.resourceToRegister)
+ LogPrint("resource is not initialized");
+ registerResource.width = width;
+ registerResource.height = height;
+ LogPrint(StringFormat("nvEncRegisterResource: width is %d, height is %d", registerResource.width, registerResource.height).c_str());
+ registerResource.bufferFormat = NV_ENC_BUFFER_FORMAT_ARGB;
+ checkf(NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncRegisterResource(pEncoderInterface, ®isterResource))),
+ StringFormat("nvEncRegisterResource error is %d", errorCode).c_str());
+ return registerResource.registeredResource;
+ }
+ void NvEncoder::MapResources(InputFrame& inputFrame)
+ {
+ NV_ENC_MAP_INPUT_RESOURCE mapInputResource = { 0 };
+ mapInputResource.version = NV_ENC_MAP_INPUT_RESOURCE_VER;
+ mapInputResource.registeredResource = inputFrame.registeredResource;
+ checkf(NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncMapInputResource(pEncoderInterface, &mapInputResource))),
+ StringFormat("nvEncMapInputResource error is %d", errorCode).c_str());
+ inputFrame.mappedResource = mapInputResource.mappedResource;
+ }
+ NV_ENC_OUTPUT_PTR NvEncoder::InitializeBitstreamBuffer()
+ {
+ NV_ENC_CREATE_BITSTREAM_BUFFER createBitstreamBuffer = { 0 };
+ createBitstreamBuffer.version = NV_ENC_CREATE_BITSTREAM_BUFFER_VER;
+ checkf(NV_RESULT((errorCode = ContextManager::GetInstance()->pNvEncodeAPI->nvEncCreateBitstreamBuffer(pEncoderInterface, &createBitstreamBuffer))),
+ StringFormat("nvEncCreateBitstreamBuffer error is %d", errorCode).c_str());
+ return createBitstreamBuffer.bitstreamBuffer;
+ }
+ void NvEncoder::InitEncoderResources()
+ {
+ for (uint32 i = 0; i < bufferedFrameNum; i++)
+ {
+ renderTextures[i] = AllocateInputBuffers();
+ Frame& frame = bufferedFrames[i];
+ frame.inputFrame.registeredResource = RegisterResource(renderTextures[i]);
+ frame.inputFrame.bufferFormat = NV_ENC_BUFFER_FORMAT_ARGB;
+ MapResources(frame.inputFrame);
+ frame.outputFrame = InitializeBitstreamBuffer();
+ }
+ }
+ void NvEncoder::ReleaseFrameInputBuffer(Frame& frame)
+ {
+ bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncUnmapInputResource(pEncoderInterface, frame.inputFrame.mappedResource));
+ checkf(result, "Failed to unmap input resource");
+ frame.inputFrame.mappedResource = nullptr;
+
+ result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncUnregisterResource(pEncoderInterface, frame.inputFrame.registeredResource));
+ checkf(result, "Failed to unregister input buffer resource");
+ frame.inputFrame.registeredResource = nullptr;
+ }
+ void NvEncoder::ReleaseEncoderResources()
+ {
+ for (Frame& frame : bufferedFrames)
+ {
+ ReleaseFrameInputBuffer(frame);
+ bool result = NV_RESULT(ContextManager::GetInstance()->pNvEncodeAPI->nvEncDestroyBitstreamBuffer(pEncoderInterface, frame.outputFrame));
+ checkf(result, "Failed to destroy output buffer bit stream");
+ frame.outputFrame = nullptr;
+ }
+ }
+}
+
+
diff --git a/Plugin~/WebRTCPlugin/NvEncoder.h b/Plugin~/WebRTCPlugin/NvEncoder.h
new file mode 100644
index 0000000000..490b92f8e7
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/NvEncoder.h
@@ -0,0 +1,72 @@
+#pragma once
+#define _WINSOCKAPI_ //Stops windows.h including winsock.h
+#include
+#include "nvEncodeAPI.h"
+#include
+#include
+
+namespace WebRTC
+{
+ using OutputFrame = NV_ENC_OUTPUT_PTR;
+ class NvEncoder
+ {
+ private:
+ struct InputFrame
+ {
+ NV_ENC_REGISTERED_PTR registeredResource;
+ NV_ENC_INPUT_PTR mappedResource;
+ NV_ENC_BUFFER_FORMAT bufferFormat;
+ };
+
+
+ struct Frame
+ {
+ InputFrame inputFrame;
+ OutputFrame outputFrame;
+ std::vector encodedFrame;
+ bool isIdrFrame = false;
+ std::atomic isEncoding = false;
+ };
+
+ public:
+ NvEncoder(int width, int height);
+ ~NvEncoder();
+
+ void SetRate(uint32 rate);
+ void UpdateSettings();
+ void EncodeFrame();
+ bool IsSupported() const { return isNvEncoderSupported; }
+ void SetIdrFrame() { isIdrFrame = true; }
+ uint64 GetCurrentFrameCount() { return frameCount; }
+ sigslot::signal1&> CaptureFrame;
+ void InitEncoderResources();
+
+ private:
+ void LoadNvEncApi();
+ void ReleaseFrameInputBuffer(Frame& frame);
+ void ReleaseEncoderResources();
+ void ProcessEncodedFrame(Frame& frame);
+ ID3D11Texture2D* AllocateInputBuffers();
+ NV_ENC_REGISTERED_PTR RegisterResource(void *pBuffer);
+ void MapResources(InputFrame& inputFrame);
+ NV_ENC_OUTPUT_PTR InitializeBitstreamBuffer();
+ NV_ENC_INITIALIZE_PARAMS nvEncInitializeParams = {};
+ NV_ENC_CONFIG nvEncConfig = {};
+ _NVENCSTATUS errorCode;
+ Frame bufferedFrames[bufferedFrameNum];
+ uint64 frameCount = 0;
+ void* pEncoderInterface = nullptr;
+ bool isNvEncoderSupported = false;
+ bool isIdrFrame = false;
+ int width = 1920;
+ int height = 1080;
+ //10Mbps
+ int bitRate = 10000000;
+ //100Mbps
+ int lastBitRate = 100000000;
+ //5Mbps
+ const int minBitRate = 5000000;
+ int frameRate = 45;
+ };
+
+}
diff --git a/Plugin~/WebRTCPlugin/NvVideoCapturer.cpp b/Plugin~/WebRTCPlugin/NvVideoCapturer.cpp
new file mode 100644
index 0000000000..9e9a430125
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/NvVideoCapturer.cpp
@@ -0,0 +1,47 @@
+#include "pch.h"
+#include "NvVideoCapturer.h"
+
+namespace WebRTC
+{
+ NvVideoCapturer::NvVideoCapturer()
+ {
+ set_enable_video_adapter(false);
+ SetSupportedFormats(std::vector(1, cricket::VideoFormat(width, height, cricket::VideoFormat::FpsToInterval(framerate), cricket::FOURCC_H264)));
+ }
+ void NvVideoCapturer::EncodeVideoData()
+ {
+ if (captureStarted && !captureStopped)
+ {
+ int curFrameNum = nvEncoder->GetCurrentFrameCount() % bufferedFrameNum;
+ context->CopyResource(renderTextures[curFrameNum], unityRT);
+ nvEncoder->EncodeFrame();
+ }
+ }
+ void NvVideoCapturer::CaptureFrame(std::vector& data)
+ {
+ rtc::scoped_refptr buffer = new rtc::RefCountedObject(width, height, data);
+ int64 timestamp = rtc::TimeMillis();
+ webrtc::VideoFrame videoFrame{buffer, webrtc::VideoRotation::kVideoRotation_0, timestamp};
+ videoFrame.set_ntp_time_ms(timestamp);
+ OnFrame(videoFrame, width, height);
+ }
+ void NvVideoCapturer::StartEncoder()
+ {
+ captureStarted = true;
+ SetKeyFrame();
+ }
+ void NvVideoCapturer::SetKeyFrame()
+ {
+ nvEncoder->SetIdrFrame();
+ }
+ void NvVideoCapturer::SetRate(uint32 rate)
+ {
+ nvEncoder->SetRate(rate);
+ }
+
+ void NvVideoCapturer::InitializeEncoder(int32 width, int32 height)
+ {
+ nvEncoder = std::make_unique(width, height);
+ nvEncoder->CaptureFrame.connect(this, &NvVideoCapturer::CaptureFrame);
+ }
+}
diff --git a/Plugin~/WebRTCPlugin/NvVideoCapturer.h b/Plugin~/WebRTCPlugin/NvVideoCapturer.h
new file mode 100644
index 0000000000..5686f22160
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/NvVideoCapturer.h
@@ -0,0 +1,98 @@
+#pragma once
+#include "NvEncoder.h"
+
+namespace WebRTC
+{
+ class NvVideoCapturer : public cricket::VideoCapturer
+ {
+ public:
+ NvVideoCapturer();
+ void EncodeVideoData();
+ // Start the video capturer with the specified capture format.
+ virtual cricket::CaptureState Start(const cricket::VideoFormat& Format) override
+ {
+ return cricket::CS_RUNNING;
+ }
+ // Stop the video capturer.
+ virtual void Stop() override
+ {
+ captureStopped = true;
+ nvEncoder.reset();
+ }
+ // Check if the video capturer is running.
+ virtual bool IsRunning() override
+ {
+ return true;
+ }
+ // Returns true if the capturer is screencasting. This can be used to
+ // implement screencast specific behavior.
+ virtual bool IsScreencast() const override
+ {
+ return false;
+ }
+ void StartEncoder();
+ void InitializeEncoder(int32 width, int32 height);
+ void SetKeyFrame();
+ void SetRate(uint32 rate);
+ void CaptureFrame(std::vector& data);
+ bool CaptureStarted() { return captureStarted; }
+ public:
+ UnityFrameBuffer* unityRT = nullptr;
+ private:
+ // subclasses override this virtual method to provide a vector of fourccs, in
+ // order of preference, that are expected by the media engine.
+ bool GetPreferredFourccs(std::vector* fourccs) override
+ {
+ fourccs->push_back(cricket::FOURCC_H264);
+ return true;
+ }
+ std::unique_ptr nvEncoder;
+
+ //just fake info
+ const int32 width = 1280;
+ const int32 height = 720;
+ const int32 framerate = 60;
+
+ bool captureStarted = false;
+ bool captureStopped = false;
+
+ };
+
+ class FrameBuffer : public webrtc::VideoFrameBuffer
+ {
+ public:
+ std::vector& buffer;
+
+ FrameBuffer(int width, int height, std::vector& data) : frameWidth(width), frameHeight(height), buffer(data) {}
+
+ //webrtc::VideoFrameBuffer pure virtual functions
+ // This function specifies in what pixel format the data is stored in.
+ virtual Type type() const override
+ {
+ //fake I420 to avoid ToI420() being called
+ return Type::kI420;
+ }
+ // The resolution of the frame in pixels. For formats where some planes are
+ // subsampled, this is the highest-resolution plane.
+ virtual int width() const override
+ {
+ return frameWidth;
+ }
+ virtual int height() const override
+ {
+ return frameHeight;
+ }
+ // Returns a memory-backed frame buffer in I420 format. If the pixel data is
+ // in another format, a conversion will take place. All implementations must
+ // provide a fallback to I420 for compatibility with e.g. the internal WebRTC
+ // software encoders.
+ virtual rtc::scoped_refptr ToI420() override
+ {
+ return nullptr;
+ }
+
+ private:
+ int frameWidth;
+ int frameHeight;
+ };
+}
diff --git a/Plugin~/WebRTCPlugin/PeerConnectionObject.cpp b/Plugin~/WebRTCPlugin/PeerConnectionObject.cpp
new file mode 100644
index 0000000000..3456740c84
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/PeerConnectionObject.cpp
@@ -0,0 +1,320 @@
+#include "pch.h"
+#include "Context.h"
+#include "PeerConnectionObject.h"
+
+namespace WebRTC
+{
+ PeerConnectionObject::PeerConnectionObject(int id) : id(id) {}
+
+ PeerConnectionObject::~PeerConnectionObject()
+ {
+ if (connection == nullptr)
+ {
+ return;
+ }
+ auto senders = connection->GetSenders();
+ for (auto sender : senders)
+ {
+ connection->RemoveTrack(sender);
+ }
+
+ auto state = connection->peer_connection_state();
+ if (state != webrtc::PeerConnectionInterface::PeerConnectionState::kClosed)
+ {
+ connection->Close();
+ }
+ connection.release();
+ }
+
+ PeerConnectionObject* Context::CreatePeerConnection(int id)
+ {
+ rtc::scoped_refptr obj = new rtc::RefCountedObject(id);
+ webrtc::PeerConnectionInterface::RTCConfiguration _config;
+ _config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
+ obj->connection = peerConnectionFactory->CreatePeerConnection(_config, nullptr, nullptr, obj);
+
+ if (obj->connection == nullptr)
+ {
+ return nullptr;
+ }
+
+ clients[id] = std::move(obj);
+ return clients[id].get();
+ }
+
+ PeerConnectionObject* Context::CreatePeerConnection(int id, const std::string& conf)
+ {
+ rtc::scoped_refptr obj = new rtc::RefCountedObject(id);
+ webrtc::PeerConnectionInterface::RTCConfiguration _config;
+ Convert(conf, _config);
+ obj->connection = peerConnectionFactory->CreatePeerConnection(_config, nullptr, nullptr, obj);
+ if (obj->connection == nullptr)
+ {
+ return nullptr;
+ }
+ clients[id] = std::move(obj);
+ return clients[id].get();
+ }
+
+ void PeerConnectionObject::OnSuccess(webrtc::SessionDescriptionInterface* desc)
+ {
+ std::string out;
+ desc->ToString(&out);
+ auto type = ConvertSdpType(desc->GetType());
+ if (onCreateSDSuccess != nullptr)
+ {
+ onCreateSDSuccess(type, out.c_str());
+ }
+ }
+
+ void PeerConnectionObject::OnFailure(webrtc::RTCError error)
+ {
+ //::TODO
+ //RTCError _error = { RTCErrorDetailType::IdpTimeout };
+ if (onCreateSDFailure != nullptr)
+ {
+ onCreateSDFailure();
+ }
+ }
+
+ void PeerConnectionObject::OnDataChannel(rtc::scoped_refptr remoteDataChannel)
+ {
+ auto remoteDataChannelObj = new DataChannelObject(remoteDataChannel, *this);
+ int id = remoteDataChannelObj->GetID();
+ remoteDataChannels[id] = remoteDataChannelObj;
+ if (onDataChannel != nullptr)
+ {
+ onDataChannel(remoteDataChannels[id]);
+ }
+ }
+ void PeerConnectionObject::OnIceCandidate(const webrtc::IceCandidateInterface* candidate)
+ {
+ std::string out;
+
+ if (!candidate->ToString(&out))
+ {
+ DebugError("Can't make string form of sdp.");
+ }
+ if (onIceCandidate != nullptr)
+ {
+ onIceCandidate(out.c_str(), candidate->sdp_mid().c_str(), candidate->sdp_mline_index());
+ }
+ }
+
+ void PeerConnectionObject::OnRenegotiationNeeded()
+ {
+ if (onRenegotiationNeeded != nullptr)
+ {
+ onRenegotiationNeeded();
+ }
+ }
+
+ void PeerConnectionObject::OnTrack(rtc::scoped_refptr transceiver)
+ {
+ if (onTrack != nullptr)
+ {
+ onTrack(transceiver.get());
+ }
+ }
+ // Called any time the IceConnectionState changes.
+ void PeerConnectionObject::OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state)
+ {
+ if (onIceConnectionChange != nullptr)
+ {
+ onIceConnectionChange(new_state);
+ }
+ }
+ // Called any time the IceGatheringState changes.
+ void PeerConnectionObject::OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState new_state)
+ {
+ DebugLog("OnIceGatheringChange");
+ }
+
+ void PeerConnectionObject::OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state)
+ {
+ DebugLog("OnSignalingChange %d", new_state);
+ }
+
+ void PeerConnectionObject::OnAddStream(rtc::scoped_refptr stream)
+ {
+ DebugLog("OnAddStream");
+ }
+
+ void PeerConnectionObject::OnRemoveStream(rtc::scoped_refptr stream)
+ {
+ DebugLog("OnRemoveStream");
+ }
+
+ void PeerConnectionObject::Close()
+ {
+ if (connection != nullptr)
+ {
+ connection->Close();
+ }
+ }
+
+ void PeerConnectionObject::SetLocalDescription(const RTCSessionDescription& desc)
+ {
+ webrtc::SdpParseError error;
+ auto _desc = webrtc::CreateSessionDescription(ConvertSdpType(desc.type), desc.sdp, &error);
+ if (!_desc.get())
+ {
+ DebugLog("Can't parse received session description message.");
+ DebugLog("SdpParseError:\n%s", error.description);
+ return;
+ }
+ auto observer = PeerSDPObserver::Create(this->onSetSDSuccess, this->onSetSDFailure);
+ connection->SetLocalDescription(observer, _desc.release());
+ }
+
+ void PeerConnectionObject::SetRemoteDescription(const RTCSessionDescription& desc)
+ {
+ webrtc::SdpParseError error;
+ auto _desc = webrtc::CreateSessionDescription(ConvertSdpType(desc.type), desc.sdp, &error);
+ if (!_desc.get())
+ {
+ DebugLog("Can't parse received session description message.");
+ DebugLog("SdpParseError:\n%s", error.description);
+ return;
+ }
+ auto observer = PeerSDPObserver::Create(this->onSetSDSuccess, this->onSetSDFailure);
+ connection->SetRemoteDescription(observer, _desc.release());
+ }
+
+ webrtc::RTCErrorType PeerConnectionObject::SetConfiguration(const std::string& config)
+ {
+ webrtc::PeerConnectionInterface::RTCConfiguration _config;
+ Convert(config, _config);
+
+ webrtc::RTCError error;
+ if (!connection->SetConfiguration(_config, &error))
+ {
+ LogPrint(error.message());
+ }
+ return error.type();
+ }
+
+ void PeerConnectionObject::GetConfiguration(std::string& config) const
+ {
+ auto _config = connection->GetConfiguration();
+
+ Json::Value root;
+ root["iceServers"] = Json::Value(Json::arrayValue);
+ for (auto iceServer : _config.servers)
+ {
+ Json::Value jsonIceServer = Json::Value(Json::objectValue);
+ jsonIceServer["username"] = iceServer.username;
+ jsonIceServer["credential"] = iceServer.password;
+ jsonIceServer["credentialType"] = (int)RTCIceCredentialType::Password;
+ jsonIceServer["urls"] = Json::Value(Json::arrayValue);
+ for (auto url : iceServer.urls)
+ {
+ jsonIceServer["urls"].append(url);
+ }
+ root["iceServers"].append(jsonIceServer);
+ }
+ Json::FastWriter writer;
+ config = writer.write(root);
+ }
+
+ void PeerConnectionObject::CreateOffer(const RTCOfferOptions & options)
+ {
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions _options;
+ _options.ice_restart = options.iceRestart;
+ _options.offer_to_receive_audio = options.offerToReceiveAudio;
+ _options.offer_to_receive_video = options.offerToReceiveVideo;
+ connection->CreateOffer(this, _options);
+ }
+
+ void PeerConnectionObject::CreateAnswer(const RTCAnswerOptions& options)
+ {
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions _options;
+ _options.ice_restart = options.iceRestart;
+ connection->CreateAnswer(this, _options);
+ }
+
+ void PeerConnectionObject::AddIceCandidate(const RTCIceCandidate& candidate)
+ {
+ webrtc::SdpParseError error;
+ std::unique_ptr _candidate(
+ webrtc::CreateIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.candidate, &error));
+ connection->AddIceCandidate(_candidate.get());
+ }
+
+ void PeerConnectionObject::GetLocalDescription(RTCSessionDescription& desc) const
+ {
+ std::string out;
+ auto current = connection->current_local_description();
+ current->ToString(&out);
+
+ desc.type = ConvertSdpType(current->GetType());
+ //TODO: Linux compatibility
+ desc.sdp = (char*)CoTaskMemAlloc(out.size() + 1);
+ out.copy(desc.sdp, out.size());
+ desc.sdp[out.size()] = '\0';
+ }
+
+ DataChannelObject* PeerConnectionObject::CreateDataChannel(const char* label, const RTCDataChannelInit& options)
+ {
+ webrtc::DataChannelInit config;
+ config.reliable = options.reliable;
+ config.ordered = options.ordered;
+ config.maxRetransmitTime = options.maxRetransmitTime;
+ config.maxRetransmits = options.maxRetransmits;
+ config.protocol = options.protocol;
+ config.negotiated = options.negotiated;
+
+ auto channel = connection->CreateDataChannel(label, &config);
+ auto dataChannelObj = new DataChannelObject(channel, *this);
+ int id = dataChannelObj->GetID();
+ localDataChannels[id] = dataChannelObj;
+ return localDataChannels[id];
+ }
+#pragma warning(push)
+#pragma warning(disable: 4715)
+ RTCIceConnectionState PeerConnectionObject::GetIceCandidateState()
+ {
+ auto state = connection->ice_connection_state();
+ switch (state)
+ {
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionNew:
+ return RTCIceConnectionState::New;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionChecking:
+ return RTCIceConnectionState::Checking;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionConnected:
+ return RTCIceConnectionState::Connected;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionCompleted:
+ return RTCIceConnectionState::Completed;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionFailed:
+ return RTCIceConnectionState::Failed;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionDisconnected:
+ return RTCIceConnectionState::Disconnected;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionClosed:
+ return RTCIceConnectionState::Closed;
+ case webrtc::PeerConnectionInterface::IceConnectionState::kIceConnectionMax:
+ return RTCIceConnectionState::Max;
+ }
+ }
+
+ RTCPeerConnectionState PeerConnectionObject::GetConnectionState()
+ {
+ auto state = connection->peer_connection_state();
+ switch (state)
+ {
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed:
+ return RTCPeerConnectionState::Closed;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected:
+ return RTCPeerConnectionState::Connected;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting:
+ return RTCPeerConnectionState::Connecting;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected:
+ return RTCPeerConnectionState::Disconnected;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed:
+ return RTCPeerConnectionState::Failed;
+ case webrtc::PeerConnectionInterface::PeerConnectionState::kNew:
+ return RTCPeerConnectionState::New;
+ }
+ }
+#pragma warning(pop)
+}
+
diff --git a/Plugin~/WebRTCPlugin/PeerConnectionObject.h b/Plugin~/WebRTCPlugin/PeerConnectionObject.h
new file mode 100644
index 0000000000..022f6f6f6b
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/PeerConnectionObject.h
@@ -0,0 +1,117 @@
+#pragma once
+#include "WebRTCPlugin.h"
+#include "DataChannelObject.h"
+
+
+namespace WebRTC
+{
+ using DelegateCreateSDSuccess = void(*)(RTCSdpType, const char*);
+ using DelegateCreateSDFailure = void(*)();
+ using DelegateSetSDSuccess = void(*)();
+ using DelegateSetSDFailure = void(*)();
+ using DelegateLocalSdpReady = void(*)(const char*, const char*);
+ using DelegateIceCandidate = void(*)(const char*, const char*, const int);
+ using DelegateOnIceConnectionChange = void(*)(webrtc::PeerConnectionInterface::IceConnectionState);
+ using DelegateOnDataChannel = void(*)(DataChannelObject*);
+ using DelegateOnRenegotiationNeeded = void(*)();
+ using DelegateOnTrack = void(*)(webrtc::RtpTransceiverInterface*);
+
+ class PeerConnectionObject
+ : public webrtc::CreateSessionDescriptionObserver
+ , public webrtc::PeerConnectionObserver
+ {
+ public:
+ PeerConnectionObject(int id);
+ ~PeerConnectionObject();
+
+ void Close();
+ void SetLocalDescription(const RTCSessionDescription& desc);
+ void GetLocalDescription(RTCSessionDescription& desc) const;
+ void SetRemoteDescription(const RTCSessionDescription& desc);
+ webrtc::RTCErrorType SetConfiguration(const std::string& config);
+ void GetConfiguration(std::string& config) const;
+ void CreateOffer(const RTCOfferOptions& options);
+ void CreateAnswer(const RTCAnswerOptions& options);
+ void AddIceCandidate(const RTCIceCandidate& candidate);
+ DataChannelObject* CreateDataChannel(const char* label, const RTCDataChannelInit& options);
+
+ void RegisterCallbackSetSD(DelegateSetSDSuccess onSuccess, DelegateSetSDFailure onFailure)
+ {
+ onSetSDSuccess = onSuccess;
+ onSetSDFailure = onFailure;
+ }
+ void RegisterCallbackCreateSD(DelegateCreateSDSuccess onSuccess, DelegateCreateSDFailure onFailure)
+ {
+ onCreateSDSuccess = onSuccess;
+ onCreateSDFailure = onFailure;
+ }
+ void RegisterLocalSdpReady(DelegateLocalSdpReady callback) { onLocalSdpReady = callback; }
+ void RegisterIceCandidate(DelegateIceCandidate callback) { onIceCandidate = callback; }
+ void RegisterIceConnectionChange(DelegateOnIceConnectionChange callback) { onIceConnectionChange = callback; };
+ void RegisterOnDataChannel(DelegateOnDataChannel callback) { onDataChannel = callback; }
+ void RegisterOnRenegotiationNeeded(DelegateOnRenegotiationNeeded callback) { onRenegotiationNeeded = callback; }
+ void RegisterOnTrack(DelegateOnTrack callback) { onTrack = callback; }
+
+ RTCPeerConnectionState GetConnectionState();
+ RTCIceConnectionState GetIceCandidateState();
+
+ //webrtc::CreateSessionDescriptionObserver
+ // This callback transfers the ownership of the |desc|.
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ // The OnFailure callback takes an RTCError, which consists of an
+ // error code and a string.
+ void OnFailure(webrtc::RTCError error) override;
+ // webrtc::PeerConnectionObserver
+ // Triggered when the SignalingState changed.
+ void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) override;
+ // Triggered when media is received on a new stream from remote peer.
+ void OnAddStream(rtc::scoped_refptr stream) override;
+ // Triggered when a remote peer closes a stream.
+ void OnRemoveStream(rtc::scoped_refptr stream) override;
+ // Triggered when a remote peer opens a data channel.
+ void OnDataChannel(rtc::scoped_refptr data_channel) override;
+ // Triggered when renegotiation is needed. For example, an ICE restart
+ // has begun.
+ void OnRenegotiationNeeded() override;
+ // Called any time the IceConnectionState changes.
+ void OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) override;
+ // Called any time the IceGatheringState changes.
+ void OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState new_state) override;
+ // A new ICE candidate has been gathered.
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+ // Ice candidates have been removed.
+ void OnIceCandidatesRemoved(const std::vector& candidates) override {}
+ // Called when the ICE connection receiving status changes.
+ void OnIceConnectionReceivingChange(bool Receiving) override {}
+ // This is called when signaling indicates a transceiver will be receiving
+ // media from the remote endpoint. This is fired during a call to
+ // SetRemoteDescription. The receiving track can be accessed by:
+ // |transceiver->receiver()->track()| and its associated streams by
+ // |transceiver->receiver()->streams()|.
+ // Note: This will only be called if Unified Plan semantics are specified.
+ // This behavior is specified in section 2.2.8.2.5 of the "Set the
+ // RTCSessionDescription" algorithm:
+ // https://w3c.github.io/webrtc-pc/#set-description
+ void OnTrack(
+ rtc::scoped_refptr transceiver);
+
+ friend class DataChannelObject;
+
+ public:
+ DelegateCreateSDSuccess onCreateSDSuccess;
+ DelegateCreateSDFailure onCreateSDFailure;
+ DelegateSetSDSuccess onSetSDSuccess;
+ DelegateSetSDFailure onSetSDFailure;
+ DelegateLocalSdpReady onLocalSdpReady;
+ DelegateIceCandidate onIceCandidate;
+ DelegateOnIceConnectionChange onIceConnectionChange;
+ DelegateOnDataChannel onDataChannel;
+ DelegateOnRenegotiationNeeded onRenegotiationNeeded;
+ DelegateOnTrack onTrack;
+ rtc::scoped_refptr connection;
+ private:
+ std::map localDataChannels;
+ std::map remoteDataChannels;
+ int32 id;
+ };
+}
diff --git a/Plugin~/WebRTCPlugin/Utils.cpp b/Plugin~/WebRTCPlugin/Utils.cpp
new file mode 100644
index 0000000000..bb399c1139
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Utils.cpp
@@ -0,0 +1,81 @@
+#include "pch.h"
+#include "Utils.h"
+
+namespace WebRTC
+{
+ ProcessKeyEventDownFuncType processKeyDownEventFunc = nullptr;
+ ProcessKeyEventUpFuncType processKeyUpEventFunc = nullptr;
+ ProcessMouseButtonDownFuncType processMouseButtonDownFunc = nullptr;
+ ProcessMouseButtonUpFuncType processMouseButtonUpFunc = nullptr;
+ ProcessMouseMoveFuncType processMouseMoveFunc = nullptr;
+ ProcessMouseWheelFuncType processMouseWheelFunc = nullptr;
+
+ void ProcessKeyDown(uint8 keyCode)
+ {
+ if (processKeyDownEventFunc != nullptr)
+ {
+ processKeyDownEventFunc(keyCode);
+ }
+ }
+ void ProcessKeyUp(uint8 keyCode)
+ {
+ if (processKeyUpEventFunc != nullptr)
+ {
+ processKeyUpEventFunc(keyCode);
+ }
+ }
+ void ProcessMouseButtonDown(uint8 buttonType)
+ {
+ if (processMouseButtonDownFunc != nullptr)
+ {
+ processMouseButtonDownFunc(buttonType);
+ }
+ }
+ void ProcessMouseButtonUp(uint8 buttonType)
+ {
+ if (processMouseButtonUpFunc != nullptr)
+ {
+ processMouseButtonUpFunc(buttonType);
+ }
+ }
+ void ProcessMouseMove(int16 deltaX, int16 deltaY)
+ {
+ if (processMouseMoveFunc != nullptr)
+ {
+ processMouseMoveFunc(deltaX, deltaY);
+ }
+ }
+ void ProcessMouseWheel(int16 amount)
+ {
+ if (processMouseWheelFunc != nullptr)
+ {
+ processMouseWheelFunc(amount);
+ }
+ }
+}
+
+namespace NvCodec
+{
+ //unity log function to be called on plugin side
+ DebugLogFuncType debugLogFunc = nullptr;
+ SetResolutionFuncType setResFunc = nullptr;
+
+ void checkf(bool result, const char* msg)
+ {
+ if (!result)
+ {
+ LogPrint(msg);
+ }
+ }
+ void SetResolution(int32* width, int32* height)
+ {
+ if (setResFunc != nullptr)
+ {
+ setResFunc(width, height);
+ }
+ }
+}
+
+
+
+
diff --git a/Plugin~/WebRTCPlugin/Utils.h b/Plugin~/WebRTCPlugin/Utils.h
new file mode 100644
index 0000000000..d4547c9679
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/Utils.h
@@ -0,0 +1,98 @@
+#pragma once
+#include
+#include
+#include
+#include
+#include
+#include "d3d11.h"
+
+using uint8 = unsigned char;
+using uint16 = unsigned short int;
+using uint32 = unsigned int;
+using uint64 = unsigned long long;
+using int8 = signed char;
+using int16 = signed short int;
+using int32 = signed int;
+using int64 = signed long long;
+
+namespace WebRTC
+{
+ enum class ProxyToSignalServerMsg : uint8
+ {
+ answer,
+ iceCandiate,
+ disconnect
+ };
+
+ enum class SignalServerToProxyMsg : uint8
+ {
+ offer,
+ iceCandidate,
+ clientDisconnected,
+ config,
+ };
+
+ enum class InputEvent : uint8
+ {
+ KeyDown,
+ KeyUp,
+ MouseDown,
+ MouseUp,
+ MouseMove,
+ MouseWheel,
+ };
+
+ using ProcessKeyEventDownFuncType = void(*)(uint8);
+ using ProcessKeyEventUpFuncType = void(*)(uint8);
+ using ProcessMouseButtonDownFuncType = void(*)(uint8);
+ using ProcessMouseButtonUpFuncType = void(*)(uint8);
+ using ProcessMouseMoveFuncType = void(*)(int16, int16);
+ using ProcessMouseWheelFuncType = void(*)(int16);
+
+ extern ProcessKeyEventDownFuncType processKeyDownEventFunc;
+ extern ProcessKeyEventUpFuncType processKeyUpEventFunc;
+ extern ProcessMouseButtonDownFuncType processMouseButtonDownFunc;
+ extern ProcessMouseButtonUpFuncType processMouseButtonUpFunc;
+ extern ProcessMouseMoveFuncType processMouseMoveFunc;
+ extern ProcessMouseWheelFuncType processMouseWheelFunc;
+
+ void ProcessKeyDown(uint8 keyCode);
+ void ProcessKeyUp(uint8 keyCode);
+ void ProcessMouseButtonDown(uint8 buttonType);
+ void ProcessMouseButtonUp(uint8 buttonType);
+ void ProcessMouseMove(int16 deltaX, int16 deltaY);
+ void ProcessMouseWheel(int16 amount);
+
+ const std::string SignalingServerIP = "127.0.0.1";
+ const uint16 UnityPort = 8888;
+ class WebRTCUnityClient;
+
+ // Names used for a IceCandidate JSON object.
+ const char candidateSdpMidName[] = "sdpMid";
+ const char candidateSdpMlineIndexName[] = "sdpMLineIndex";
+ const char candidateSdpName[] = "candidate";
+ // Names used for a SessionDescription JSON object.
+ const char sessionDescriptionTypeName[] = "type";
+ const char sessionDescriptionSdpName[] = "sdp";
+
+ extern std::unique_ptr unityClient;
+}
+
+
+namespace NvCodec
+{
+ using FrameBuffer = ID3D11Texture2D;
+ using DebugLogFuncType = void(*)(const char*);
+ using SetResolutionFuncType = void(*)(int32*, int32*);
+ const uint32 bufferedFrameNum = 3;
+
+ void checkf(bool result, const char* msg);
+ void SetResolution(int32* widht, int32* height);
+
+ extern FrameBuffer* renderTextures[bufferedFrameNum];
+ extern ID3D11DeviceContext* context;
+ extern FrameBuffer* unityRT;
+ extern ID3D11Device* g_D3D11Device;
+ extern DebugLogFuncType debugLogFunc;
+ extern SetResolutionFuncType setResFunc;
+}
diff --git a/Plugin~/WebRTCPlugin/WebRTCPlugin.cpp b/Plugin~/WebRTCPlugin/WebRTCPlugin.cpp
new file mode 100644
index 0000000000..b525243ed7
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/WebRTCPlugin.cpp
@@ -0,0 +1,361 @@
+#include "pch.h"
+#include "WebRTCPlugin.h"
+#include "PeerConnectionObject.h"
+#include "Context.h"
+
+using namespace WebRTC;
+namespace WebRTC
+{
+ DelegateDebugLog delegateDebugLog = nullptr;
+ DelegateSetResolution delegateSetResolution = nullptr;
+
+ void debugLog(const char* buf)
+ {
+ if (delegateDebugLog != nullptr)
+ {
+ delegateDebugLog(buf);
+ }
+ }
+
+ void SetResolution(int32* width, int32* length)
+ {
+ if (delegateSetResolution != nullptr)
+ {
+ delegateSetResolution(width, length);
+ }
+ }
+}
+
+
+extern "C"
+{
+ UNITY_INTERFACE_EXPORT CodecInitializationResult GetCodecInitializationResult()
+ {
+ return ContextManager::GetInstance()->GetCodecInitializationResult();
+ }
+
+ UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CaptureVideoStream(Context* context, UnityFrameBuffer* rt, int32 width, int32 height)
+ {
+ context->InitializeEncoder(width, height);
+ return context->CreateVideoStream(rt);
+ }
+ //TODO: Multi-track support
+ UNITY_INTERFACE_EXPORT void StopMediaStreamTrack(Context* context, webrtc::MediaStreamTrackInterface* track)
+ {
+ context->StopCapturer();
+ }
+
+ UNITY_INTERFACE_EXPORT webrtc::MediaStreamInterface* CaptureAudioStream(Context* context)
+ {
+ return context->CreateAudioStream();
+ }
+
+ UNITY_INTERFACE_EXPORT void MediaStreamAddTrack(webrtc::MediaStreamInterface* stream, webrtc::MediaStreamTrackInterface* track)
+ {
+ if (track->kind() == "audio")
+ {
+ stream->AddTrack((webrtc::AudioTrackInterface*)track);
+ }
+ else
+ {
+ stream->AddTrack((webrtc::VideoTrackInterface*)track);
+ }
+ }
+ UNITY_INTERFACE_EXPORT void MediaStreamRemoveTrack(webrtc::MediaStreamInterface* stream, webrtc::MediaStreamTrackInterface* track)
+ {
+ if (track->kind() == "audio")
+ {
+ stream->RemoveTrack((webrtc::AudioTrackInterface*)track);
+ }
+ else
+ {
+ stream->RemoveTrack((webrtc::VideoTrackInterface*)track);
+ }
+ }
+
+ UNITY_INTERFACE_EXPORT char* MediaStreamGetID(webrtc::MediaStreamInterface* stream)
+ {
+ auto idStr = stream->id();
+ //TODO: Linux compatibility
+ char* id = (char*)CoTaskMemAlloc(idStr.size() + sizeof(char));
+ idStr.copy(id, idStr.size());
+ id[idStr.size()] = '\0';
+ return id;
+ }
+
+
+ UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface** MediaStreamGetVideoTracks(webrtc::MediaStreamInterface* stream, int* length)
+ {
+ auto tracksVector = stream->GetVideoTracks();
+#pragma warning(suppress: 4267)
+ *length = tracksVector.size();
+ //TODO: Linux compatibility
+ auto tracks = (webrtc::MediaStreamTrackInterface**)CoTaskMemAlloc(sizeof(webrtc::MediaStreamTrackInterface*) * tracksVector.size());
+ for (int i = 0; i < tracksVector.size(); i++)
+ {
+ tracks[i] = tracksVector[i].get();
+ }
+ return tracks;
+ }
+
+ UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface** MediaStreamGetAudioTracks(webrtc::MediaStreamInterface* stream, int* length)
+ {
+ auto tracksVector = stream->GetAudioTracks();
+#pragma warning(suppress: 4267)
+ *length = tracksVector.size();
+ //TODO: Linux compatibility
+ auto tracks = (webrtc::MediaStreamTrackInterface**)CoTaskMemAlloc(sizeof(webrtc::MediaStreamTrackInterface*) * tracksVector.size());
+ for (int i = 0; i < tracksVector.size(); i++)
+ {
+ tracks[i] = tracksVector[i].get();
+ }
+ return tracks;
+ }
+
+ UNITY_INTERFACE_EXPORT TrackKind MediaStreamTrackGetKind(webrtc::MediaStreamTrackInterface* track)
+ {
+ auto kindStr = track->kind();
+ if (kindStr == "audio")
+ {
+ return TrackKind::Audio;
+ }
+ else
+ {
+ return TrackKind::Video;
+ }
+ }
+
+ UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface::TrackState MediaStreamTrackGetReadyState(webrtc::MediaStreamTrackInterface* track)
+ {
+ return track->state();
+ }
+
+ UNITY_INTERFACE_EXPORT char* MediaStreamTrackGetID(webrtc::MediaStreamTrackInterface* track)
+ {
+ auto idStr = track->id();
+ //TODO: Linux compatibility
+ char* id = (char*)CoTaskMemAlloc(idStr.size() + sizeof(char));
+ idStr.copy(id, idStr.size());
+ id[idStr.size()] = '\0';
+ return id;
+ }
+
+ UNITY_INTERFACE_EXPORT bool MediaStreamTrackGetEnabled(webrtc::MediaStreamTrackInterface* track)
+ {
+ return track->enabled();
+ }
+
+ UNITY_INTERFACE_EXPORT void MediaStreamTrackSetEnabled(webrtc::MediaStreamTrackInterface* track, bool enabled)
+ {
+ track->set_enabled(enabled);
+ }
+
+ UNITY_INTERFACE_EXPORT void RegisterDebugLog(DelegateDebugLog func)
+ {
+ delegateDebugLog = func;
+ }
+
+ UNITY_INTERFACE_EXPORT void RegisterSetResolution(DelegateSetResolution func)
+ {
+ delegateSetResolution = func;
+ }
+
+ UNITY_INTERFACE_EXPORT Context* ContextCreate(int uid)
+ {
+ return ContextManager::GetInstance()->GetContext(uid);
+ }
+
+ UNITY_INTERFACE_EXPORT void ContextDestroy(int uid)
+ {
+ ContextManager::GetInstance()->DestroyContext(uid);
+ }
+
+ UNITY_INTERFACE_EXPORT PeerConnectionObject* ContextCreatePeerConnection(Context* ctx, int id)
+ {
+ return ctx->CreatePeerConnection(id);
+ }
+
+ UNITY_INTERFACE_EXPORT PeerConnectionObject* ContextCreatePeerConnectionWithConfig(Context* ctx, int id, const char* conf)
+ {
+ return ctx->CreatePeerConnection(id, conf);
+ }
+ UNITY_INTERFACE_EXPORT void ContextDeletePeerConnection(Context* ctx, int id)
+ {
+ ctx->DeletePeerConnection(id);
+ }
+ UNITY_INTERFACE_EXPORT void PeerConnectionClose(PeerConnectionObject* obj)
+ {
+ obj->Close();
+ }
+ UNITY_INTERFACE_EXPORT webrtc::RtpSenderInterface* PeerConnectionAddTrack(PeerConnectionObject* obj, webrtc::MediaStreamTrackInterface* track)
+ {
+ return obj->connection->AddTrack(rtc::scoped_refptr (track), { "unity" }).value().get();
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRemoveTrack(PeerConnectionObject* obj, webrtc::RtpSenderInterface* sender)
+ {
+ obj->connection->RemoveTrack(sender);
+ }
+
+ UNITY_INTERFACE_EXPORT webrtc::RTCErrorType PeerConnectionSetConfiguration(PeerConnectionObject* obj, const char* conf)
+ {
+ return obj->SetConfiguration(std::string(conf));
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionGetConfiguration(PeerConnectionObject* obj, char** conf, int* len)
+ {
+ std::string _conf;
+ obj->GetConfiguration(_conf);
+#pragma warning(suppress: 4267)
+ *len = _conf.size();
+ //TODO: Linux compatibility
+ *conf = (char*)::CoTaskMemAlloc(_conf.size() + sizeof(char));
+ _conf.copy(*conf, _conf.size());
+ (*conf)[_conf.size()] = '\0';
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionSetRemoteDescription(PeerConnectionObject* obj, const RTCSessionDescription* desc)
+ {
+ obj->SetRemoteDescription(*desc);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionSetLocalDescription(PeerConnectionObject* obj, const RTCSessionDescription* desc)
+ {
+ obj->SetLocalDescription(*desc);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionGetLocalDescription(PeerConnectionObject* obj, RTCSessionDescription* desc)
+ {
+ obj->GetLocalDescription(*desc);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionCreateOffer(PeerConnectionObject* obj, const RTCOfferOptions* options)
+ {
+ obj->CreateOffer(*options);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionCreateAnswer(PeerConnectionObject* obj, const RTCAnswerOptions* options)
+ {
+ obj->CreateAnswer(*options);
+ }
+
+ UNITY_INTERFACE_EXPORT DataChannelObject* PeerConnectionCreateDataChannel(PeerConnectionObject* obj, const char* label, const RTCDataChannelInit* options)
+ {
+ return obj->CreateDataChannel(label, *options);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterIceConnectionChange(PeerConnectionObject* obj, DelegateOnIceConnectionChange callback)
+ {
+ obj->RegisterIceConnectionChange(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterOnIceCandidate(PeerConnectionObject*obj, DelegateIceCandidate callback)
+ {
+ obj->RegisterIceCandidate(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterCallbackCreateSD(PeerConnectionObject* obj, DelegateCreateSDSuccess onSuccess, DelegateCreateSDFailure onFailure)
+ {
+ obj->RegisterCallbackCreateSD(onSuccess, onFailure);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterCallbackSetSD(PeerConnectionObject* obj, DelegateSetSDSuccess onSuccess, DelegateSetSDFailure onFailure)
+ {
+ obj->RegisterCallbackSetSD(onSuccess, onFailure);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionAddIceCandidate(PeerConnectionObject* obj, const RTCIceCandidate* candidate)
+ {
+ return obj->AddIceCandidate(*candidate);
+ }
+
+ UNITY_INTERFACE_EXPORT RTCPeerConnectionState PeerConnectionState(PeerConnectionObject* obj)
+ {
+ return obj->GetConnectionState();
+ }
+
+ UNITY_INTERFACE_EXPORT RTCIceConnectionState PeerConnectionIceConditionState(PeerConnectionObject* obj)
+ {
+ return obj->GetIceCandidateState();
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterOnDataChannel(PeerConnectionObject* obj, DelegateOnDataChannel callback)
+ {
+ obj->RegisterOnDataChannel(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterOnRenegotiationNeeded(PeerConnectionObject* obj, DelegateOnRenegotiationNeeded callback)
+ {
+ obj->RegisterOnRenegotiationNeeded(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void PeerConnectionRegisterOnTrack(PeerConnectionObject* obj, DelegateOnTrack callback)
+ {
+ obj->RegisterOnTrack(callback);
+ }
+ UNITY_INTERFACE_EXPORT webrtc::MediaStreamTrackInterface* RtpTransceiverInterfaceGetTrack(webrtc::RtpTransceiverInterface* obj)
+ {
+ return obj->receiver()->track().get();
+ }
+
+ UNITY_INTERFACE_EXPORT int DataChannelGetID(DataChannelObject* dataChannelObj)
+ {
+ return dataChannelObj->GetID();
+ }
+
+ UNITY_INTERFACE_EXPORT char* DataChannelGetLabel(DataChannelObject* dataChannelObj)
+ {
+ std::string tmp = dataChannelObj->GetLabel();
+ //TODO: Linux compatibility
+ char* label = (char*)CoTaskMemAlloc(tmp.size() + sizeof(char));
+ tmp.copy(label, tmp.size());
+ label[tmp.size()] = '\0';
+ return label;
+ }
+
+ UNITY_INTERFACE_EXPORT void DataChannelSend(DataChannelObject* dataChannelObj, const char* msg)
+ {
+ dataChannelObj->Send(msg);
+ }
+
+ UNITY_INTERFACE_EXPORT void DataChannelSendBinary(DataChannelObject* dataChannelObj, const byte* msg, int len)
+ {
+ dataChannelObj->Send(msg, len);
+ }
+
+ UNITY_INTERFACE_EXPORT void DataChannelClose(DataChannelObject* dataChannelObj)
+ {
+ dataChannelObj->Close();
+ }
+
+ UNITY_INTERFACE_EXPORT void DataChannelRegisterOnMessage(DataChannelObject* dataChannelObj, DelegateOnMessage callback)
+ {
+ dataChannelObj->RegisterOnMessage(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void DataChannelRegisterOnOpen(DataChannelObject* dataChannelObj, DelegateOnOpen callback)
+ {
+ dataChannelObj->RegisterOnOpen(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void DataChannelRegisterOnClose(DataChannelObject* dataChannelObj, DelegateOnClose callback)
+ {
+ dataChannelObj->RegisterOnClose(callback);
+ }
+
+ UNITY_INTERFACE_EXPORT void SetCurrentContext(Context* context)
+ {
+ ContextManager::GetInstance()->curContext = context;
+ }
+
+ UNITY_INTERFACE_EXPORT void ProcessAudio(float* data, int32 size)
+ {
+ if (ContextManager::GetInstance()->curContext)
+ {
+ ContextManager::GetInstance()->curContext->ProcessAudioData(data, size);
+ }
+ }
+}
+
+
+
diff --git a/Plugin~/WebRTCPlugin/WebRTCPlugin.h b/Plugin~/WebRTCPlugin/WebRTCPlugin.h
new file mode 100644
index 0000000000..a42dc7fbef
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/WebRTCPlugin.h
@@ -0,0 +1,167 @@
+#pragma once
+#include "IUnityInterface.h"
+
+namespace WebRTC
+{
+ class Context;
+ class PeerConnectionObject;
+ enum class RTCSdpType;
+ enum class RTCPeerConnectionEventType;
+ struct RTCError;
+ struct MediaStreamEvent;
+
+ using DelegateDebugLog = void(*)(const char*);
+ using DelegateSetResolution = void(*)(int32*, int32*);
+ using DelegateRTCPeerConnectionOnTrack = void(*)();
+ using DelegateRTCPeerConnectionOnConnectionStateChange = void(*)();
+
+ void debugLog(const char* buf);
+ void SetResolution(int32* width, int32* length);
+ extern DelegateDebugLog delegateDebugLog;
+
+ enum class CodecInitializationResult
+ {
+ NotInitialized,
+ Success,
+ DriverNotInstalled,
+ DriverVersionDoesNotSupportAPI,
+ APINotFound,
+ EncoderInitializationFailed
+ };
+
+ enum class RTCPeerConnectionState
+ {
+ New,
+ Connecting,
+ Connected,
+ Disconnected,
+ Failed,
+ Closed
+ };
+
+ enum class RTCIceConnectionState
+ {
+ New,
+ Checking,
+ Connected,
+ Completed,
+ Failed,
+ Disconnected,
+ Closed,
+ Max
+ };
+
+ enum class RTCPeerConnectionEventType
+ {
+ ConnectionStateChange,
+ DataChannel,
+ IceCandidate,
+ IceConnectionStateChange,
+ Track
+ };
+
+ enum class RTCSdpType
+ {
+ Offer,
+ PrAnswer,
+ Answer,
+ };
+
+ enum class SdpSemanticsType
+ {
+ UnifiedPlan
+ };
+
+ enum class RTCErrorDetailType
+ {
+ DataChannelFailure,
+ DtlsFailure,
+ FingerprintFailure,
+ IdpBadScriptFailure,
+ IdpExecutionFailure,
+ IdpLoadFailure,
+ IdpNeedLogin,
+ IdpTimeout,
+ IdpTlsFailure,
+ IdpTokenExpired,
+ IdpTokenInvalid,
+ SctpFailure,
+ SdpSyntaxError,
+ HardwareEncoderNotAvailable,
+ HardwareEncoderError
+ };
+
+ enum class RTCIceCredentialType
+ {
+ Password,
+ OAuth
+ };
+
+ enum class TrackKind
+ {
+ Audio,
+ Video
+ };
+
+ struct RTCError
+ {
+ RTCErrorDetailType errorDetail;
+ long sdpLineNumber;
+ long httpRequestStatusCode;
+ long sctpCauseCode;
+ unsigned long receivedAlert;
+ unsigned long sentAlert;
+ };
+
+ struct RTCSessionDescription
+ {
+ RTCSdpType type;
+ char* sdp;
+ };
+
+ struct RTCIceServer
+ {
+ char* credential;
+ char* credentialType;
+ char** urls;
+ int urlsLength;
+ char* username;
+ };
+
+ struct RTCConfiguration
+ {
+ RTCIceServer* iceServers;
+ int iceServersLength;
+ char* iceServerPolicy;
+ };
+
+ struct RTCIceCandidate
+ {
+ char* candidate;
+ char* sdpMid;
+ int sdpMLineIndex;
+ };
+
+ struct RTCOfferOptions
+ {
+ bool iceRestart;
+ bool offerToReceiveAudio;
+ bool offerToReceiveVideo;
+ };
+
+ struct RTCDataChannelInit
+ {
+ bool reliable = false;
+ bool ordered = true;
+ int maxRetransmitTime = -1;
+ int maxRetransmits = -1;
+ char* protocol;
+ bool negotiated = false;
+ int id = -1;
+ };
+
+ struct RTCAnswerOptions
+ {
+ bool iceRestart;
+ };
+}
diff --git a/Plugin~/WebRTCPlugin/WebRTCPlugin.vcxproj b/Plugin~/WebRTCPlugin/WebRTCPlugin.vcxproj
new file mode 100644
index 0000000000..6f22abd39b
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/WebRTCPlugin.vcxproj
@@ -0,0 +1,208 @@
+
+
+
+
+ Debug
+ Win32
+
+
+ Release
+ Win32
+
+
+ Debug
+ x64
+
+
+ Release
+ x64
+
+
+
+ 15.0
+ {94D2206F-BF45-45BD-8E13-214A3ED917AE}
+ Win32Proj
+ WebRTCPlugin
+ 10.0.17763.0
+
+
+
+ DynamicLibrary
+ true
+ v141
+ Unicode
+
+
+ DynamicLibrary
+ false
+ v141
+ true
+ Unicode
+
+
+ DynamicLibrary
+ true
+ v141
+ Unicode
+
+
+ DynamicLibrary
+ false
+ v141
+ true
+ Unicode
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ false
+ webrtc
+
+
+ true
+
+
+ true
+ webrtc
+
+
+ false
+
+
+
+ Use
+ Level3
+ MaxSpeed
+ true
+ true
+ true
+ NDEBUG;WEBRTCPLUGIN_EXPORTS;WEBRTC_WIN;_WINDOWS;_USRDLL;%(PreprocessorDefinitions);NOMINMAX
+ true
+ $(ProjectDir)..\webrtc\include\third_party\jsoncpp\source\include;$(ProjectDir)..\webrtc\include\third_party\abseil-cpp;$(ProjectDir)..\webrtc\include;$(ProjectDir)..\unity\include;$(ProjectDir)
+ pch.h
+ MultiThreaded
+
+
+ Windows
+ true
+ true
+ true
+ jsoncpp.lib;webrtc.lib;webrtc_opus.lib;audio_decoder_opus.lib;Winmm.lib;Msdmo.lib;Dmoguids.lib;wmcodecdspuuid.lib;Secur32.lib;iphlpapi.lib;%(AdditionalDependencies)
+ $(ProjectDir)..\webrtc\lib
+
+
+ ..\..\Packages\com.unity.webrtc\Runtime\Plugins\x86_64\$(TargetName)$(TargetExt)
+ UseLinkTimeCodeGeneration
+
+
+
+
+ Use
+ Level3
+ Disabled
+ true
+ WIN32;_DEBUG;WEBRTCPLUGIN_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions)
+ true
+
+
+ Windows
+ true
+
+
+
+
+
+
+ Use
+ Level3
+ Disabled
+ true
+ _DEBUG;WEBRTCPLUGIN_EXPORTS;WEBRTC_WIN;_WINDOWS;_USRDLL;%(PreprocessorDefinitions);NOMINMAX
+ true
+ $(ProjectDir)..\webrtc\include\third_party\jsoncpp\source\include;$(ProjectDir)..\webrtc\include\third_party\abseil-cpp;$(ProjectDir)..\webrtc\include;$(ProjectDir)..\unity\include;$(ProjectDir)
+ pch.h
+ MultiThreadedDebug
+
+
+ Windows
+ true
+ jsoncpp.lib;webrtc.lib;webrtc_opus.lib;audio_decoder_opus.lib;Winmm.lib;Msdmo.lib;Dmoguids.lib;wmcodecdspuuid.lib;Secur32.lib;iphlpapi.lib;%(AdditionalDependencies)
+ $(ProjectDir)..\webrtc\lib
+
+
+ ..\..\Packages\com.unity.webrtc\Runtime\Plugins\x86_64\$(TargetName)$(TargetExt)
+
+
+
+
+ Use
+ Level3
+ MaxSpeed
+ true
+ true
+ true
+ WIN32;NDEBUG;WEBRTCPLUGIN_EXPORTS;_WINDOWS;_USRDLL;%(PreprocessorDefinitions)
+ true
+
+
+ Windows
+ true
+ true
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Create
+ Create
+ pch.h
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Plugin~/WebRTCPlugin/WebRTCPlugin.vcxproj.filters b/Plugin~/WebRTCPlugin/WebRTCPlugin.vcxproj.filters
new file mode 100644
index 0000000000..0a0d2a3a4f
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/WebRTCPlugin.vcxproj.filters
@@ -0,0 +1,99 @@
+
+
+
+
+ {4FC737F1-C7A5-4376-A066-2A32D752A2FF}
+ cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx
+
+
+ {93995380-89BD-4b04-88EB-625FBE52EBFB}
+ h;hh;hpp;hxx;hm;inl;inc;ipp;xsd
+
+
+ {67DA6AB6-F800-4c08-8B7A-83BB121AAD01}
+ rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms
+
+
+ {16932972-c865-4546-9602-539504fea736}
+
+
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files
+
+
+ Header Files\Unity
+
+
+ Header Files\Unity
+
+
+ Header Files\Unity
+
+
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+ Source Files
+
+
+
\ No newline at end of file
diff --git a/Plugin~/WebRTCPlugin/nvEncodeAPI.h b/Plugin~/WebRTCPlugin/nvEncodeAPI.h
new file mode 100644
index 0000000000..a8ca54aca9
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/nvEncodeAPI.h
@@ -0,0 +1,3484 @@
+/*
+ * This copyright notice applies to this header file only:
+ *
+ * Copyright (c) 2010-2019 NVIDIA Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation
+ * files (the "Software"), to deal in the Software without
+ * restriction, including without limitation the rights to use,
+ * copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the software, and to permit persons to whom the
+ * software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/**
+ * \file nvEncodeAPI.h
+ * NVIDIA GPUs - beginning with the Kepler generation - contain a hardware-based encoder
+ * (referred to as NVENC) which provides fully-accelerated hardware-based video encoding.
+ * NvEncodeAPI provides the interface for NVIDIA video encoder (NVENC).
+ * \date 2011-2018
+ * This file contains the interface constants, structure definitions and function prototypes.
+ */
+
+#ifndef _NV_ENCODEAPI_H_
+#define _NV_ENCODEAPI_H_
+
+#include
+
+#ifdef _WIN32
+#include
+#endif
+
+#ifdef _MSC_VER
+#ifndef _STDINT
+typedef __int32 int32_t;
+typedef unsigned __int32 uint32_t;
+typedef __int64 int64_t;
+typedef unsigned __int64 uint64_t;
+typedef signed char int8_t;
+typedef unsigned char uint8_t;
+typedef short int16_t;
+typedef unsigned short uint16_t;
+#endif
+#else
+#include
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * \addtogroup ENCODER_STRUCTURE NvEncodeAPI Data structures
+ * @{
+ */
+
+#ifdef _WIN32
+#define NVENCAPI __stdcall
+typedef RECT NVENC_RECT;
+#else
+#define NVENCAPI
+// =========================================================================================
+#ifndef GUID
+/*!
+ * \struct GUID
+ * Abstracts the GUID structure for non-windows platforms.
+ */
+// =========================================================================================
+typedef struct
+{
+ uint32_t Data1; /**< [in]: Specifies the first 8 hexadecimal digits of the GUID. */
+ uint16_t Data2; /**< [in]: Specifies the first group of 4 hexadecimal digits. */
+ uint16_t Data3; /**< [in]: Specifies the second group of 4 hexadecimal digits. */
+ uint8_t Data4[8]; /**< [in]: Array of 8 bytes. The first 2 bytes contain the third group of 4 hexadecimal digits.
+ The remaining 6 bytes contain the final 12 hexadecimal digits. */
+} GUID;
+#endif // GUID
+
+/**
+ * \struct _NVENC_RECT
+ * Defines a Rectangle. Used in ::NV_ENC_PREPROCESS_FRAME.
+ */
+typedef struct _NVENC_RECT
+{
+ uint32_t left; /**< [in]: X coordinate of the upper left corner of rectangular area to be specified. */
+ uint32_t top; /**< [in]: Y coordinate of the upper left corner of the rectangular area to be specified. */
+ uint32_t right; /**< [in]: X coordinate of the bottom right corner of the rectangular area to be specified. */
+ uint32_t bottom; /**< [in]: Y coordinate of the bottom right corner of the rectangular area to be specified. */
+} NVENC_RECT;
+
+#endif // _WIN32
+
+/** @} */ /* End of GUID and NVENC_RECT structure grouping*/
+
+typedef void* NV_ENC_INPUT_PTR; /**< NVENCODE API input buffer */
+typedef void* NV_ENC_OUTPUT_PTR; /**< NVENCODE API output buffer*/
+typedef void* NV_ENC_REGISTERED_PTR; /**< A Resource that has been registered with NVENCODE API*/
+
+#define NVENCAPI_MAJOR_VERSION 9
+#define NVENCAPI_MINOR_VERSION 0
+
+#define NVENCAPI_VERSION (NVENCAPI_MAJOR_VERSION | (NVENCAPI_MINOR_VERSION << 24))
+
+/**
+ * Macro to generate per-structure version for use with API.
+ */
+#define NVENCAPI_STRUCT_VERSION(ver) ((uint32_t)NVENCAPI_VERSION | ((ver)<<16) | (0x7 << 28))
+
+
+#define NVENC_INFINITE_GOPLENGTH 0xffffffff
+
+#define NV_MAX_SEQ_HDR_LEN (512)
+
+// =========================================================================================
+// Encode Codec GUIDS supported by the NvEncodeAPI interface.
+// =========================================================================================
+
+// {6BC82762-4E63-4ca4-AA85-1E50F321F6BF}
+static const GUID NV_ENC_CODEC_H264_GUID =
+{ 0x6bc82762, 0x4e63, 0x4ca4, { 0xaa, 0x85, 0x1e, 0x50, 0xf3, 0x21, 0xf6, 0xbf } };
+
+// {790CDC88-4522-4d7b-9425-BDA9975F7603}
+static const GUID NV_ENC_CODEC_HEVC_GUID =
+{ 0x790cdc88, 0x4522, 0x4d7b, { 0x94, 0x25, 0xbd, 0xa9, 0x97, 0x5f, 0x76, 0x3 } };
+
+
+
+// =========================================================================================
+// * Encode Profile GUIDS supported by the NvEncodeAPI interface.
+// =========================================================================================
+
+// {BFD6F8E7-233C-4341-8B3E-4818523803F4}
+static const GUID NV_ENC_CODEC_PROFILE_AUTOSELECT_GUID =
+{ 0xbfd6f8e7, 0x233c, 0x4341, { 0x8b, 0x3e, 0x48, 0x18, 0x52, 0x38, 0x3, 0xf4 } };
+
+// {0727BCAA-78C4-4c83-8C2F-EF3DFF267C6A}
+static const GUID NV_ENC_H264_PROFILE_BASELINE_GUID =
+{ 0x727bcaa, 0x78c4, 0x4c83, { 0x8c, 0x2f, 0xef, 0x3d, 0xff, 0x26, 0x7c, 0x6a } };
+
+// {60B5C1D4-67FE-4790-94D5-C4726D7B6E6D}
+static const GUID NV_ENC_H264_PROFILE_MAIN_GUID =
+{ 0x60b5c1d4, 0x67fe, 0x4790, { 0x94, 0xd5, 0xc4, 0x72, 0x6d, 0x7b, 0x6e, 0x6d } };
+
+// {E7CBC309-4F7A-4b89-AF2A-D537C92BE310}
+static const GUID NV_ENC_H264_PROFILE_HIGH_GUID =
+{ 0xe7cbc309, 0x4f7a, 0x4b89, { 0xaf, 0x2a, 0xd5, 0x37, 0xc9, 0x2b, 0xe3, 0x10 } };
+
+// {7AC663CB-A598-4960-B844-339B261A7D52}
+static const GUID NV_ENC_H264_PROFILE_HIGH_444_GUID =
+{ 0x7ac663cb, 0xa598, 0x4960, { 0xb8, 0x44, 0x33, 0x9b, 0x26, 0x1a, 0x7d, 0x52 } };
+
+// {40847BF5-33F7-4601-9084-E8FE3C1DB8B7}
+static const GUID NV_ENC_H264_PROFILE_STEREO_GUID =
+{ 0x40847bf5, 0x33f7, 0x4601, { 0x90, 0x84, 0xe8, 0xfe, 0x3c, 0x1d, 0xb8, 0xb7 } };
+
+// {CE788D20-AAA9-4318-92BB-AC7E858C8D36}
+static const GUID NV_ENC_H264_PROFILE_SVC_TEMPORAL_SCALABILTY =
+{ 0xce788d20, 0xaaa9, 0x4318, { 0x92, 0xbb, 0xac, 0x7e, 0x85, 0x8c, 0x8d, 0x36 } };
+
+// {B405AFAC-F32B-417B-89C4-9ABEED3E5978}
+static const GUID NV_ENC_H264_PROFILE_PROGRESSIVE_HIGH_GUID =
+{ 0xb405afac, 0xf32b, 0x417b, { 0x89, 0xc4, 0x9a, 0xbe, 0xed, 0x3e, 0x59, 0x78 } };
+
+// {AEC1BD87-E85B-48f2-84C3-98BCA6285072}
+static const GUID NV_ENC_H264_PROFILE_CONSTRAINED_HIGH_GUID =
+{ 0xaec1bd87, 0xe85b, 0x48f2, { 0x84, 0xc3, 0x98, 0xbc, 0xa6, 0x28, 0x50, 0x72 } };
+
+// {B514C39A-B55B-40fa-878F-F1253B4DFDEC}
+static const GUID NV_ENC_HEVC_PROFILE_MAIN_GUID =
+{ 0xb514c39a, 0xb55b, 0x40fa, { 0x87, 0x8f, 0xf1, 0x25, 0x3b, 0x4d, 0xfd, 0xec } };
+
+// {fa4d2b6c-3a5b-411a-8018-0a3f5e3c9be5}
+static const GUID NV_ENC_HEVC_PROFILE_MAIN10_GUID =
+{ 0xfa4d2b6c, 0x3a5b, 0x411a, { 0x80, 0x18, 0x0a, 0x3f, 0x5e, 0x3c, 0x9b, 0xe5 } };
+
+// For HEVC Main 444 8 bit and HEVC Main 444 10 bit profiles only
+// {51ec32b5-1b4c-453c-9cbd-b616bd621341}
+static const GUID NV_ENC_HEVC_PROFILE_FREXT_GUID =
+{ 0x51ec32b5, 0x1b4c, 0x453c, { 0x9c, 0xbd, 0xb6, 0x16, 0xbd, 0x62, 0x13, 0x41 } };
+
+// =========================================================================================
+// * Preset GUIDS supported by the NvEncodeAPI interface.
+// =========================================================================================
+// {B2DFB705-4EBD-4C49-9B5F-24A777D3E587}
+static const GUID NV_ENC_PRESET_DEFAULT_GUID =
+{ 0xb2dfb705, 0x4ebd, 0x4c49, { 0x9b, 0x5f, 0x24, 0xa7, 0x77, 0xd3, 0xe5, 0x87 } };
+
+// {60E4C59F-E846-4484-A56D-CD45BE9FDDF6}
+static const GUID NV_ENC_PRESET_HP_GUID =
+{ 0x60e4c59f, 0xe846, 0x4484, { 0xa5, 0x6d, 0xcd, 0x45, 0xbe, 0x9f, 0xdd, 0xf6 } };
+
+// {34DBA71D-A77B-4B8F-9C3E-B6D5DA24C012}
+static const GUID NV_ENC_PRESET_HQ_GUID =
+{ 0x34dba71d, 0xa77b, 0x4b8f, { 0x9c, 0x3e, 0xb6, 0xd5, 0xda, 0x24, 0xc0, 0x12 } };
+
+// {82E3E450-BDBB-4e40-989C-82A90DF9EF32}
+static const GUID NV_ENC_PRESET_BD_GUID =
+{ 0x82e3e450, 0xbdbb, 0x4e40, { 0x98, 0x9c, 0x82, 0xa9, 0xd, 0xf9, 0xef, 0x32 } };
+
+// {49DF21C5-6DFA-4feb-9787-6ACC9EFFB726}
+static const GUID NV_ENC_PRESET_LOW_LATENCY_DEFAULT_GUID =
+{ 0x49df21c5, 0x6dfa, 0x4feb, { 0x97, 0x87, 0x6a, 0xcc, 0x9e, 0xff, 0xb7, 0x26 } };
+
+// {C5F733B9-EA97-4cf9-BEC2-BF78A74FD105}
+static const GUID NV_ENC_PRESET_LOW_LATENCY_HQ_GUID =
+{ 0xc5f733b9, 0xea97, 0x4cf9, { 0xbe, 0xc2, 0xbf, 0x78, 0xa7, 0x4f, 0xd1, 0x5 } };
+
+// {67082A44-4BAD-48FA-98EA-93056D150A58}
+static const GUID NV_ENC_PRESET_LOW_LATENCY_HP_GUID =
+{ 0x67082a44, 0x4bad, 0x48fa, { 0x98, 0xea, 0x93, 0x5, 0x6d, 0x15, 0xa, 0x58 } };
+
+// {D5BFB716-C604-44e7-9BB8-DEA5510FC3AC}
+static const GUID NV_ENC_PRESET_LOSSLESS_DEFAULT_GUID =
+{ 0xd5bfb716, 0xc604, 0x44e7, { 0x9b, 0xb8, 0xde, 0xa5, 0x51, 0xf, 0xc3, 0xac } };
+
+// {149998E7-2364-411d-82EF-179888093409}
+static const GUID NV_ENC_PRESET_LOSSLESS_HP_GUID =
+{ 0x149998e7, 0x2364, 0x411d, { 0x82, 0xef, 0x17, 0x98, 0x88, 0x9, 0x34, 0x9 } };
+
+/**
+ * \addtogroup ENCODER_STRUCTURE NvEncodeAPI Data structures
+ * @{
+ */
+
+/**
+ * Input frame encode modes
+ */
+typedef enum _NV_ENC_PARAMS_FRAME_FIELD_MODE
+{
+ NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME = 0x01, /**< Frame mode */
+ NV_ENC_PARAMS_FRAME_FIELD_MODE_FIELD = 0x02, /**< Field mode */
+ NV_ENC_PARAMS_FRAME_FIELD_MODE_MBAFF = 0x03 /**< MB adaptive frame/field */
+} NV_ENC_PARAMS_FRAME_FIELD_MODE;
+
+/**
+ * Rate Control Modes
+ */
+typedef enum _NV_ENC_PARAMS_RC_MODE
+{
+ NV_ENC_PARAMS_RC_CONSTQP = 0x0, /**< Constant QP mode */
+ NV_ENC_PARAMS_RC_VBR = 0x1, /**< Variable bitrate mode */
+ NV_ENC_PARAMS_RC_CBR = 0x2, /**< Constant bitrate mode */
+ NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ = 0x8, /**< low-delay CBR, high quality */
+ NV_ENC_PARAMS_RC_CBR_HQ = 0x10, /**< CBR, high quality (slower) */
+ NV_ENC_PARAMS_RC_VBR_HQ = 0x20 /**< VBR, high quality (slower) */
+} NV_ENC_PARAMS_RC_MODE;
+
+/**
+ * Emphasis Levels
+ */
+typedef enum _NV_ENC_EMPHASIS_MAP_LEVEL
+{
+ NV_ENC_EMPHASIS_MAP_LEVEL_0 = 0x0, /**< Emphasis Map Level 0, for zero Delta QP value */
+ NV_ENC_EMPHASIS_MAP_LEVEL_1 = 0x1, /**< Emphasis Map Level 1, for very low Delta QP value */
+ NV_ENC_EMPHASIS_MAP_LEVEL_2 = 0x2, /**< Emphasis Map Level 2, for low Delta QP value */
+ NV_ENC_EMPHASIS_MAP_LEVEL_3 = 0x3, /**< Emphasis Map Level 3, for medium Delta QP value */
+ NV_ENC_EMPHASIS_MAP_LEVEL_4 = 0x4, /**< Emphasis Map Level 4, for high Delta QP value */
+ NV_ENC_EMPHASIS_MAP_LEVEL_5 = 0x5 /**< Emphasis Map Level 5, for very high Delta QP value */
+} NV_ENC_EMPHASIS_MAP_LEVEL;
+
+/**
+ * QP MAP MODE
+ */
+typedef enum _NV_ENC_QP_MAP_MODE
+{
+ NV_ENC_QP_MAP_DISABLED = 0x0, /**< Value in NV_ENC_PIC_PARAMS::qpDeltaMap have no effect. */
+ NV_ENC_QP_MAP_EMPHASIS = 0x1, /**< Value in NV_ENC_PIC_PARAMS::qpDeltaMap will be treated as Empasis level. Currently this is only supported for H264 */
+ NV_ENC_QP_MAP_DELTA = 0x2, /**< Value in NV_ENC_PIC_PARAMS::qpDeltaMap will be treated as QP delta map. */
+ NV_ENC_QP_MAP = 0x3, /**< Currently This is not supported. Value in NV_ENC_PIC_PARAMS::qpDeltaMap will be treated as QP value. */
+} NV_ENC_QP_MAP_MODE;
+
+#define NV_ENC_PARAMS_RC_VBR_MINQP (NV_ENC_PARAMS_RC_MODE)0x4 /**< Deprecated */
+#define NV_ENC_PARAMS_RC_2_PASS_QUALITY NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ /**< Deprecated */
+#define NV_ENC_PARAMS_RC_2_PASS_FRAMESIZE_CAP NV_ENC_PARAMS_RC_CBR_HQ /**< Deprecated */
+#define NV_ENC_PARAMS_RC_2_PASS_VBR NV_ENC_PARAMS_RC_VBR_HQ /**< Deprecated */
+#define NV_ENC_PARAMS_RC_CBR2 NV_ENC_PARAMS_RC_CBR /**< Deprecated */
+
+/**
+ * Input picture structure
+ */
+typedef enum _NV_ENC_PIC_STRUCT
+{
+ NV_ENC_PIC_STRUCT_FRAME = 0x01, /**< Progressive frame */
+ NV_ENC_PIC_STRUCT_FIELD_TOP_BOTTOM = 0x02, /**< Field encoding top field first */
+ NV_ENC_PIC_STRUCT_FIELD_BOTTOM_TOP = 0x03 /**< Field encoding bottom field first */
+} NV_ENC_PIC_STRUCT;
+
+/**
+ * Input picture type
+ */
+typedef enum _NV_ENC_PIC_TYPE
+{
+ NV_ENC_PIC_TYPE_P = 0x0, /**< Forward predicted */
+ NV_ENC_PIC_TYPE_B = 0x01, /**< Bi-directionally predicted picture */
+ NV_ENC_PIC_TYPE_I = 0x02, /**< Intra predicted picture */
+ NV_ENC_PIC_TYPE_IDR = 0x03, /**< IDR picture */
+ NV_ENC_PIC_TYPE_BI = 0x04, /**< Bi-directionally predicted with only Intra MBs */
+ NV_ENC_PIC_TYPE_SKIPPED = 0x05, /**< Picture is skipped */
+ NV_ENC_PIC_TYPE_INTRA_REFRESH = 0x06, /**< First picture in intra refresh cycle */
+ NV_ENC_PIC_TYPE_NONREF_P = 0x07, /**< Non reference P picture */
+ NV_ENC_PIC_TYPE_UNKNOWN = 0xFF /**< Picture type unknown */
+} NV_ENC_PIC_TYPE;
+
+/**
+ * Motion vector precisions
+ */
+typedef enum _NV_ENC_MV_PRECISION
+{
+ NV_ENC_MV_PRECISION_DEFAULT = 0x0, /** (if lookahead is enabled, input frames must remain available to the encoder until encode completion) */
+ uint32_t disableIadapt :1; /**< [in]: Set this to 1 to disable adaptive I-frame insertion at scene cuts (only has an effect when lookahead is enabled) */
+ uint32_t disableBadapt :1; /**< [in]: Set this to 1 to disable adaptive B-frame decision (only has an effect when lookahead is enabled) */
+ uint32_t enableTemporalAQ :1; /**< [in]: Set this to 1 to enable temporal AQ for H.264 */
+ uint32_t zeroReorderDelay :1; /**< [in]: Set this to 1 to indicate zero latency operation (no reordering delay, num_reorder_frames=0) */
+ uint32_t enableNonRefP :1; /**< [in]: Set this to 1 to enable automatic insertion of non-reference P-frames (no effect if enablePTD=0) */
+ uint32_t strictGOPTarget :1; /**< [in]: Set this to 1 to minimize GOP-to-GOP rate fluctuations */
+ uint32_t aqStrength :4; /**< [in]: When AQ (Spatial) is enabled (i.e. NV_ENC_RC_PARAMS::enableAQ is set), this field is used to specify AQ strength. AQ strength scale is from 1 (low) - 15 (aggressive). If not set, strength is autoselected by driver. */
+ uint32_t reservedBitFields :16; /**< [in]: Reserved bitfields and must be set to 0 */
+ NV_ENC_QP minQP; /**< [in]: Specifies the minimum QP used for rate control. Client must set NV_ENC_CONFIG::enableMinQP to 1. */
+ NV_ENC_QP maxQP; /**< [in]: Specifies the maximum QP used for rate control. Client must set NV_ENC_CONFIG::enableMaxQP to 1. */
+ NV_ENC_QP initialRCQP; /**< [in]: Specifies the initial QP used for rate control. Client must set NV_ENC_CONFIG::enableInitialRCQP to 1. */
+ uint32_t temporallayerIdxMask; /**< [in]: Specifies the temporal layers (as a bitmask) whose QPs have changed. Valid max bitmask is [2^NV_ENC_CAPS_NUM_MAX_TEMPORAL_LAYERS - 1] */
+ uint8_t temporalLayerQP[8]; /**< [in]: Specifies the temporal layer QPs used for rate control. Temporal layer index is used as as the array index */
+ uint8_t targetQuality; /**< [in]: Target CQ (Constant Quality) level for VBR mode (range 0-51 with 0-automatic) */
+ uint8_t targetQualityLSB; /**< [in]: Fractional part of target quality (as 8.8 fixed point format) */
+ uint16_t lookaheadDepth; /**< [in]: Maximum depth of lookahead with range 0-32 (only used if enableLookahead=1) */
+ uint32_t reserved1;
+ NV_ENC_QP_MAP_MODE qpMapMode; /**< [in]: This flag is used to interpret values in array pecified by NV_ENC_PIC_PARAMS::qpDeltaMap.
+ Set this to NV_ENC_QP_MAP_EMPHASIS to treat values specified by NV_ENC_PIC_PARAMS::qpDeltaMap as Emphasis level Map.
+ Emphasis Level can be assigned any value specified in enum NV_ENC_EMPHASIS_MAP_LEVEL.
+ Emphasis Level Map is used to specify regions to be encoded at varying levels of quality.
+ The hardware encoder adjusts the quantization within the image as per the provided emphasis map,
+ by adjusting the quantization parameter (QP) assigned to each macroblock. This adjustment is commonly called “Delta QP”.
+ The adjustment depends on the absolute QP decided by the rate control algorithm, and is applied after the rate control has decided each macroblock’s QP.
+ Since the Delta QP overrides rate control, enabling emphasis level map may violate bitrate and VBV buffersize constraints.
+ Emphasis level map is useful in situations when client has a priori knowledge of the image complexity (e.g. via use of NVFBC's Classification feature) and encoding those high-complexity areas at higher quality (lower QP) is important, even at the possible cost of violating bitrate/VBV buffersize constraints
+ This feature is not supported when AQ( Spatial/Temporal) is enabled.
+ This feature is only supported for H264 codec currently.
+
+ Set this to NV_ENC_QP_MAP_DELTA to treat values specified by NV_ENC_PIC_PARAMS::qpDeltaMap as QPDelta. This specify QP modifier to be applied on top of the QP chosen by rate control
+
+ Set this to NV_ENC_QP_MAP_DISABLED to ignore NV_ENC_PIC_PARAMS::qpDeltaMap values. In this case, qpDeltaMap should be set to NULL.
+
+ Other values are reserved for future use.*/
+ uint32_t reserved[7];
+ } NV_ENC_RC_PARAMS;
+
+/** macro for constructing the version field of ::_NV_ENC_RC_PARAMS */
+#define NV_ENC_RC_PARAMS_VER NVENCAPI_STRUCT_VERSION(1)
+
+
+
+/**
+ * \struct _NV_ENC_CONFIG_H264_VUI_PARAMETERS
+ * H264 Video Usability Info parameters
+ */
+typedef struct _NV_ENC_CONFIG_H264_VUI_PARAMETERS
+{
+ uint32_t overscanInfoPresentFlag; /**< [in]: if set to 1 , it specifies that the overscanInfo is present */
+ uint32_t overscanInfo; /**< [in]: Specifies the overscan info(as defined in Annex E of the ITU-T Specification). */
+ uint32_t videoSignalTypePresentFlag; /**< [in]: If set to 1, it specifies that the videoFormat, videoFullRangeFlag and colourDescriptionPresentFlag are present. */
+ uint32_t videoFormat; /**< [in]: Specifies the source video format(as defined in Annex E of the ITU-T Specification).*/
+ uint32_t videoFullRangeFlag; /**< [in]: Specifies the output range of the luma and chroma samples(as defined in Annex E of the ITU-T Specification). */
+ uint32_t colourDescriptionPresentFlag; /**< [in]: If set to 1, it specifies that the colourPrimaries, transferCharacteristics and colourMatrix are present. */
+ uint32_t colourPrimaries; /**< [in]: Specifies color primaries for converting to RGB(as defined in Annex E of the ITU-T Specification) */
+ uint32_t transferCharacteristics; /**< [in]: Specifies the opto-electronic transfer characteristics to use (as defined in Annex E of the ITU-T Specification) */
+ uint32_t colourMatrix; /**< [in]: Specifies the matrix coefficients used in deriving the luma and chroma from the RGB primaries (as defined in Annex E of the ITU-T Specification). */
+ uint32_t chromaSampleLocationFlag; /**< [in]: if set to 1 , it specifies that the chromaSampleLocationTop and chromaSampleLocationBot are present.*/
+ uint32_t chromaSampleLocationTop; /**< [in]: Specifies the chroma sample location for top field(as defined in Annex E of the ITU-T Specification) */
+ uint32_t chromaSampleLocationBot; /**< [in]: Specifies the chroma sample location for bottom field(as defined in Annex E of the ITU-T Specification) */
+ uint32_t bitstreamRestrictionFlag; /**< [in]: if set to 1, it specifies the bitstream restriction parameters are present in the bitstream.*/
+ uint32_t reserved[15];
+}NV_ENC_CONFIG_H264_VUI_PARAMETERS;
+
+typedef NV_ENC_CONFIG_H264_VUI_PARAMETERS NV_ENC_CONFIG_HEVC_VUI_PARAMETERS;
+
+/**
+ * \struct _NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE
+ * External motion vector hint counts per block type.
+ * H264 supports multiple hint while HEVC supports one hint for each valid candidate.
+ */
+typedef struct _NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE
+{
+ uint32_t numCandsPerBlk16x16 : 4; /**< [in]: Supported for H264,HEVC.It Specifies the number of candidates per 16x16 block. */
+ uint32_t numCandsPerBlk16x8 : 4; /**< [in]: Supported for H264 only.Specifies the number of candidates per 16x8 block. */
+ uint32_t numCandsPerBlk8x16 : 4; /**< [in]: Supported for H264 only.Specifies the number of candidates per 8x16 block. */
+ uint32_t numCandsPerBlk8x8 : 4; /**< [in]: Supported for H264,HEVC.Specifies the number of candidates per 8x8 block. */
+ uint32_t reserved : 16; /**< [in]: Reserved for padding. */
+ uint32_t reserved1[3]; /**< [in]: Reserved for future use. */
+} NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE;
+
+
+/**
+ * \struct _NVENC_EXTERNAL_ME_HINT
+ * External Motion Vector hint structure.
+ */
+typedef struct _NVENC_EXTERNAL_ME_HINT
+{
+ int32_t mvx : 12; /**< [in]: Specifies the x component of integer pixel MV (relative to current MB) S12.0. */
+ int32_t mvy : 10; /**< [in]: Specifies the y component of integer pixel MV (relative to current MB) S10.0 .*/
+ int32_t refidx : 5; /**< [in]: Specifies the reference index (31=invalid). Current we support only 1 reference frame per direction for external hints, so \p refidx must be 0. */
+ int32_t dir : 1; /**< [in]: Specifies the direction of motion estimation . 0=L0 1=L1.*/
+ int32_t partType : 2; /**< [in]: Specifies the block partition type.0=16x16 1=16x8 2=8x16 3=8x8 (blocks in partition must be consecutive).*/
+ int32_t lastofPart : 1; /**< [in]: Set to 1 for the last MV of (sub) partition */
+ int32_t lastOfMB : 1; /**< [in]: Set to 1 for the last MV of macroblock. */
+} NVENC_EXTERNAL_ME_HINT;
+
+
+/**
+ * \struct _NV_ENC_CONFIG_H264
+ * H264 encoder configuration parameters
+ */
+typedef struct _NV_ENC_CONFIG_H264
+{
+ uint32_t enableTemporalSVC :1; /**< [in]: Set to 1 to enable SVC temporal*/
+ uint32_t enableStereoMVC :1; /**< [in]: Set to 1 to enable stereo MVC*/
+ uint32_t hierarchicalPFrames :1; /**< [in]: Set to 1 to enable hierarchical PFrames */
+ uint32_t hierarchicalBFrames :1; /**< [in]: Set to 1 to enable hierarchical BFrames */
+ uint32_t outputBufferingPeriodSEI :1; /**< [in]: Set to 1 to write SEI buffering period syntax in the bitstream */
+ uint32_t outputPictureTimingSEI :1; /**< [in]: Set to 1 to write SEI picture timing syntax in the bitstream. When set for following rateControlMode : NV_ENC_PARAMS_RC_CBR, NV_ENC_PARAMS_RC_CBR_LOWDELAY_HQ,
+ NV_ENC_PARAMS_RC_CBR_HQ, filler data is inserted if needed to achieve hrd bitrate */
+ uint32_t outputAUD :1; /**< [in]: Set to 1 to write access unit delimiter syntax in bitstream */
+ uint32_t disableSPSPPS :1; /**< [in]: Set to 1 to disable writing of Sequence and Picture parameter info in bitstream */
+ uint32_t outputFramePackingSEI :1; /**< [in]: Set to 1 to enable writing of frame packing arrangement SEI messages to bitstream */
+ uint32_t outputRecoveryPointSEI :1; /**< [in]: Set to 1 to enable writing of recovery point SEI message */
+ uint32_t enableIntraRefresh :1; /**< [in]: Set to 1 to enable gradual decoder refresh or intra refresh. If the GOP structure uses B frames this will be ignored */
+ uint32_t enableConstrainedEncoding :1; /**< [in]: Set this to 1 to enable constrainedFrame encoding where each slice in the constarined picture is independent of other slices
+ Check support for constrained encoding using ::NV_ENC_CAPS_SUPPORT_CONSTRAINED_ENCODING caps. */
+ uint32_t repeatSPSPPS :1; /**< [in]: Set to 1 to enable writing of Sequence and Picture parameter for every IDR frame */
+ uint32_t enableVFR :1; /**< [in]: Set to 1 to enable variable frame rate. */
+ uint32_t enableLTR :1; /**< [in]: Set to 1 to enable LTR (Long Term Reference) frame support. LTR can be used in two modes: "LTR Trust" mode and "LTR Per Picture" mode.
+ LTR Trust mode: In this mode, ltrNumFrames pictures after IDR are automatically marked as LTR. This mode is enabled by setting ltrTrustMode = 1.
+ Use of LTR Trust mode is strongly discouraged as this mode may be deprecated in future.
+ LTR Per Picture mode: In this mode, client can control whether the current picture should be marked as LTR. Enable this mode by setting
+ ltrTrustMode = 0 and ltrMarkFrame = 1 for the picture to be marked as LTR. This is the preferred mode
+ for using LTR.
+ Note that LTRs are not supported if encoding session is configured with B-frames */
+ uint32_t qpPrimeYZeroTransformBypassFlag :1; /**< [in]: To enable lossless encode set this to 1, set QP to 0 and RC_mode to NV_ENC_PARAMS_RC_CONSTQP and profile to HIGH_444_PREDICTIVE_PROFILE.
+ Check support for lossless encoding using ::NV_ENC_CAPS_SUPPORT_LOSSLESS_ENCODE caps. */
+ uint32_t useConstrainedIntraPred :1; /**< [in]: Set 1 to enable constrained intra prediction. */
+ uint32_t reservedBitFields :15; /**< [in]: Reserved bitfields and must be set to 0 */
+ uint32_t level; /**< [in]: Specifies the encoding level. Client is recommended to set this to NV_ENC_LEVEL_AUTOSELECT in order to enable the NvEncodeAPI interface to select the correct level. */
+ uint32_t idrPeriod; /**< [in]: Specifies the IDR interval. If not set, this is made equal to gopLength in NV_ENC_CONFIG.Low latency application client can set IDR interval to NVENC_INFINITE_GOPLENGTH so that IDR frames are not inserted automatically. */
+ uint32_t separateColourPlaneFlag; /**< [in]: Set to 1 to enable 4:4:4 separate colour planes */
+ uint32_t disableDeblockingFilterIDC; /**< [in]: Specifies the deblocking filter mode. Permissible value range: [0,2] */
+ uint32_t numTemporalLayers; /**< [in]: Specifies max temporal layers to be used for hierarchical coding. Valid value range is [1,::NV_ENC_CAPS_NUM_MAX_TEMPORAL_LAYERS] */
+ uint32_t spsId; /**< [in]: Specifies the SPS id of the sequence header */
+ uint32_t ppsId; /**< [in]: Specifies the PPS id of the picture header */
+ NV_ENC_H264_ADAPTIVE_TRANSFORM_MODE adaptiveTransformMode; /**< [in]: Specifies the AdaptiveTransform Mode. Check support for AdaptiveTransform mode using ::NV_ENC_CAPS_SUPPORT_ADAPTIVE_TRANSFORM caps. */
+ NV_ENC_H264_FMO_MODE fmoMode; /**< [in]: Specified the FMO Mode. Check support for FMO using ::NV_ENC_CAPS_SUPPORT_FMO caps. */
+ NV_ENC_H264_BDIRECT_MODE bdirectMode; /**< [in]: Specifies the BDirect mode. Check support for BDirect mode using ::NV_ENC_CAPS_SUPPORT_BDIRECT_MODE caps.*/
+ NV_ENC_H264_ENTROPY_CODING_MODE entropyCodingMode; /**< [in]: Specifies the entropy coding mode. Check support for CABAC mode using ::NV_ENC_CAPS_SUPPORT_CABAC caps. */
+ NV_ENC_STEREO_PACKING_MODE stereoMode; /**< [in]: Specifies the stereo frame packing mode which is to be signalled in frame packing arrangement SEI */
+ uint32_t intraRefreshPeriod; /**< [in]: Specifies the interval between successive intra refresh if enableIntrarefresh is set. Requires enableIntraRefresh to be set.
+ Will be disabled if NV_ENC_CONFIG::gopLength is not set to NVENC_INFINITE_GOPLENGTH. */
+ uint32_t intraRefreshCnt; /**< [in]: Specifies the length of intra refresh in number of frames for periodic intra refresh. This value should be smaller than intraRefreshPeriod */
+ uint32_t maxNumRefFrames; /**< [in]: Specifies the DPB size used for encoding. Setting it to 0 will let driver use the default dpb size.
+ The low latency application which wants to invalidate reference frame as an error resilience tool
+ is recommended to use a large DPB size so that the encoder can keep old reference frames which can be used if recent
+ frames are invalidated. */
+ uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices
+ sliceMode = 0 MB based slices, sliceMode = 1 Byte based slices, sliceMode = 2 MB row based slices, sliceMode = 3 numSlices in Picture.
+ When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting
+ When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */
+ uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For:
+ sliceMode = 0, sliceModeData specifies # of MBs in each slice (except last slice)
+ sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice)
+ sliceMode = 2, sliceModeData specifies # of MB rows in each slice (except last slice)
+ sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */
+ NV_ENC_CONFIG_H264_VUI_PARAMETERS h264VUIParameters; /**< [in]: Specifies the H264 video usability info pamameters */
+ uint32_t ltrNumFrames; /**< [in]: Specifies the number of LTR frames. This parameter has different meaning in two LTR modes.
+ In "LTR Trust" mode (ltrTrustMode = 1), encoder will mark the first ltrNumFrames base layer reference frames within each IDR interval as LTR.
+ In "LTR Per Picture" mode (ltrTrustMode = 0 and ltrMarkFrame = 1), ltrNumFrames specifies maximum number of LTR frames in DPB. */
+ uint32_t ltrTrustMode; /**< [in]: Specifies the LTR operating mode. See comments near NV_ENC_CONFIG_H264::enableLTR for description of the two modes.
+ Set to 1 to use "LTR Trust" mode of LTR operation. Clients are discouraged to use "LTR Trust" mode as this mode may
+ be deprecated in future releases.
+ Set to 0 when using "LTR Per Picture" mode of LTR operation. */
+ uint32_t chromaFormatIDC; /**< [in]: Specifies the chroma format. Should be set to 1 for yuv420 input, 3 for yuv444 input.
+ Check support for YUV444 encoding using ::NV_ENC_CAPS_SUPPORT_YUV444_ENCODE caps.*/
+ uint32_t maxTemporalLayers; /**< [in]: Specifies the max temporal layer used for hierarchical coding. */
+ NV_ENC_BFRAME_REF_MODE useBFramesAsRef; /**< [in]: Specifies the B-Frame as reference mode. Check support for useBFramesAsRef mode using ::NV_ENC_CAPS_SUPPORT_BFRAME_REF_MODE caps.*/
+ uint32_t reserved1[269]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_CONFIG_H264;
+
+/**
+ * \struct _NV_ENC_CONFIG_HEVC
+ * HEVC encoder configuration parameters to be set during initialization.
+ */
+typedef struct _NV_ENC_CONFIG_HEVC
+{
+ uint32_t level; /**< [in]: Specifies the level of the encoded bitstream.*/
+ uint32_t tier; /**< [in]: Specifies the level tier of the encoded bitstream.*/
+ NV_ENC_HEVC_CUSIZE minCUSize; /**< [in]: Specifies the minimum size of luma coding unit.*/
+ NV_ENC_HEVC_CUSIZE maxCUSize; /**< [in]: Specifies the maximum size of luma coding unit. Currently NVENC SDK only supports maxCUSize equal to NV_ENC_HEVC_CUSIZE_32x32.*/
+ uint32_t useConstrainedIntraPred :1; /**< [in]: Set 1 to enable constrained intra prediction. */
+ uint32_t disableDeblockAcrossSliceBoundary :1; /**< [in]: Set 1 to disable in loop filtering across slice boundary.*/
+ uint32_t outputBufferingPeriodSEI :1; /**< [in]: Set 1 to write SEI buffering period syntax in the bitstream */
+ uint32_t outputPictureTimingSEI :1; /**< [in]: Set 1 to write SEI picture timing syntax in the bitstream */
+ uint32_t outputAUD :1; /**< [in]: Set 1 to write Access Unit Delimiter syntax. */
+ uint32_t enableLTR :1; /**< [in]: Set to 1 to enable LTR (Long Term Reference) frame support. LTR can be used in two modes: "LTR Trust" mode and "LTR Per Picture" mode.
+ LTR Trust mode: In this mode, ltrNumFrames pictures after IDR are automatically marked as LTR. This mode is enabled by setting ltrTrustMode = 1.
+ Use of LTR Trust mode is strongly discouraged as this mode may be deprecated in future releases.
+ LTR Per Picture mode: In this mode, client can control whether the current picture should be marked as LTR. Enable this mode by setting
+ ltrTrustMode = 0 and ltrMarkFrame = 1 for the picture to be marked as LTR. This is the preferred mode
+ for using LTR.
+ Note that LTRs are not supported if encoding session is configured with B-frames */
+ uint32_t disableSPSPPS :1; /**< [in]: Set 1 to disable VPS,SPS and PPS signalling in the bitstream. */
+ uint32_t repeatSPSPPS :1; /**< [in]: Set 1 to output VPS,SPS and PPS for every IDR frame.*/
+ uint32_t enableIntraRefresh :1; /**< [in]: Set 1 to enable gradual decoder refresh or intra refresh. If the GOP structure uses B frames this will be ignored */
+ uint32_t chromaFormatIDC :2; /**< [in]: Specifies the chroma format. Should be set to 1 for yuv420 input, 3 for yuv444 input.*/
+ uint32_t pixelBitDepthMinus8 :3; /**< [in]: Specifies pixel bit depth minus 8. Should be set to 0 for 8 bit input, 2 for 10 bit input.*/
+ uint32_t reserved :18; /**< [in]: Reserved bitfields.*/
+ uint32_t idrPeriod; /**< [in]: Specifies the IDR interval. If not set, this is made equal to gopLength in NV_ENC_CONFIG.Low latency application client can set IDR interval to NVENC_INFINITE_GOPLENGTH so that IDR frames are not inserted automatically. */
+ uint32_t intraRefreshPeriod; /**< [in]: Specifies the interval between successive intra refresh if enableIntrarefresh is set. Requires enableIntraRefresh to be set.
+ Will be disabled if NV_ENC_CONFIG::gopLength is not set to NVENC_INFINITE_GOPLENGTH. */
+ uint32_t intraRefreshCnt; /**< [in]: Specifies the length of intra refresh in number of frames for periodic intra refresh. This value should be smaller than intraRefreshPeriod */
+ uint32_t maxNumRefFramesInDPB; /**< [in]: Specifies the maximum number of references frames in the DPB.*/
+ uint32_t ltrNumFrames; /**< [in]: This parameter has different meaning in two LTR modes.
+ In "LTR Trust" mode (ltrTrustMode = 1), encoder will mark the first ltrNumFrames base layer reference frames within each IDR interval as LTR.
+ In "LTR Per Picture" mode (ltrTrustMode = 0 and ltrMarkFrame = 1), ltrNumFrames specifies maximum number of LTR frames in DPB. */
+ uint32_t vpsId; /**< [in]: Specifies the VPS id of the video parameter set */
+ uint32_t spsId; /**< [in]: Specifies the SPS id of the sequence header */
+ uint32_t ppsId; /**< [in]: Specifies the PPS id of the picture header */
+ uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices
+ sliceMode = 0 CTU based slices, sliceMode = 1 Byte based slices, sliceMode = 2 CTU row based slices, sliceMode = 3, numSlices in Picture
+ When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */
+ uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For:
+ sliceMode = 0, sliceModeData specifies # of CTUs in each slice (except last slice)
+ sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice)
+ sliceMode = 2, sliceModeData specifies # of CTU rows in each slice (except last slice)
+ sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */
+ uint32_t maxTemporalLayersMinus1; /**< [in]: Specifies the max temporal layer used for hierarchical coding. */
+ NV_ENC_CONFIG_HEVC_VUI_PARAMETERS hevcVUIParameters; /**< [in]: Specifies the HEVC video usability info pamameters */
+ uint32_t ltrTrustMode; /**< [in]: Specifies the LTR operating mode. See comments near NV_ENC_CONFIG_HEVC::enableLTR for description of the two modes.
+ Set to 1 to use "LTR Trust" mode of LTR operation. Clients are discouraged to use "LTR Trust" mode as this mode may
+ be deprecated in future releases.
+ Set to 0 when using "LTR Per Picture" mode of LTR operation. */
+ NV_ENC_BFRAME_REF_MODE useBFramesAsRef; /**< [in]: Specifies the B-Frame as reference mode. Check support for useBFramesAsRef mode using ::NV_ENC_CAPS_SUPPORT_BFRAME_REF_MODE caps.*/
+ uint32_t reserved1[216]; /**< [in]: Reserved and must be set to 0.*/
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_CONFIG_HEVC;
+
+/**
+ * \struct _NV_ENC_CONFIG_H264_MEONLY
+ * H264 encoder configuration parameters for ME only Mode
+ *
+ */
+typedef struct _NV_ENC_CONFIG_H264_MEONLY
+{
+ uint32_t disablePartition16x16 :1; /**< [in]: Disable MotionEstimation on 16x16 blocks*/
+ uint32_t disablePartition8x16 :1; /**< [in]: Disable MotionEstimation on 8x16 blocks*/
+ uint32_t disablePartition16x8 :1; /**< [in]: Disable MotionEstimation on 16x8 blocks*/
+ uint32_t disablePartition8x8 :1; /**< [in]: Disable MotionEstimation on 8x8 blocks*/
+ uint32_t disableIntraSearch :1; /**< [in]: Disable Intra search during MotionEstimation*/
+ uint32_t bStereoEnable :1; /**< [in]: Enable Stereo Mode for Motion Estimation where each view is independently executed*/
+ uint32_t reserved :26; /**< [in]: Reserved and must be set to 0 */
+ uint32_t reserved1 [255]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_CONFIG_H264_MEONLY;
+
+
+/**
+ * \struct _NV_ENC_CONFIG_HEVC_MEONLY
+ * HEVC encoder configuration parameters for ME only Mode
+ *
+ */
+typedef struct _NV_ENC_CONFIG_HEVC_MEONLY
+{
+ uint32_t reserved [256]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved1[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_CONFIG_HEVC_MEONLY;
+
+/**
+ * \struct _NV_ENC_CODEC_CONFIG
+ * Codec-specific encoder configuration parameters to be set during initialization.
+ */
+typedef union _NV_ENC_CODEC_CONFIG
+{
+ NV_ENC_CONFIG_H264 h264Config; /**< [in]: Specifies the H.264-specific encoder configuration. */
+ NV_ENC_CONFIG_HEVC hevcConfig; /**< [in]: Specifies the HEVC-specific encoder configuration. */
+ NV_ENC_CONFIG_H264_MEONLY h264MeOnlyConfig; /**< [in]: Specifies the H.264-specific ME only encoder configuration. */
+ NV_ENC_CONFIG_HEVC_MEONLY hevcMeOnlyConfig; /**< [in]: Specifies the HEVC-specific ME only encoder configuration. */
+ uint32_t reserved[320]; /**< [in]: Reserved and must be set to 0 */
+} NV_ENC_CODEC_CONFIG;
+
+
+/**
+ * \struct _NV_ENC_CONFIG
+ * Encoder configuration parameters to be set during initialization.
+ */
+typedef struct _NV_ENC_CONFIG
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_CONFIG_VER. */
+ GUID profileGUID; /**< [in]: Specifies the codec profile guid. If client specifies \p NV_ENC_CODEC_PROFILE_AUTOSELECT_GUID the NvEncodeAPI interface will select the appropriate codec profile. */
+ uint32_t gopLength; /**< [in]: Specifies the number of pictures in one GOP. Low latency application client can set goplength to NVENC_INFINITE_GOPLENGTH so that keyframes are not inserted automatically. */
+ int32_t frameIntervalP; /**< [in]: Specifies the GOP pattern as follows: \p frameIntervalP = 0: I, 1: IPP, 2: IBP, 3: IBBP If goplength is set to NVENC_INFINITE_GOPLENGTH \p frameIntervalP should be set to 1. */
+ uint32_t monoChromeEncoding; /**< [in]: Set this to 1 to enable monochrome encoding for this session. */
+ NV_ENC_PARAMS_FRAME_FIELD_MODE frameFieldMode; /**< [in]: Specifies the frame/field mode.
+ Check support for field encoding using ::NV_ENC_CAPS_SUPPORT_FIELD_ENCODING caps.
+ Using a frameFieldMode other than NV_ENC_PARAMS_FRAME_FIELD_MODE_FRAME for RGB input is not supported. */
+ NV_ENC_MV_PRECISION mvPrecision; /**< [in]: Specifies the desired motion vector prediction precision. */
+ NV_ENC_RC_PARAMS rcParams; /**< [in]: Specifies the rate control parameters for the current encoding session. */
+ NV_ENC_CODEC_CONFIG encodeCodecConfig; /**< [in]: Specifies the codec specific config parameters through this union. */
+ uint32_t reserved [278]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_CONFIG;
+
+/** macro for constructing the version field of ::_NV_ENC_CONFIG */
+#define NV_ENC_CONFIG_VER (NVENCAPI_STRUCT_VERSION(7) | ( 1<<31 ))
+
+
+/**
+ * \struct _NV_ENC_INITIALIZE_PARAMS
+ * Encode Session Initialization parameters.
+ */
+typedef struct _NV_ENC_INITIALIZE_PARAMS
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_INITIALIZE_PARAMS_VER. */
+ GUID encodeGUID; /**< [in]: Specifies the Encode GUID for which the encoder is being created. ::NvEncInitializeEncoder() API will fail if this is not set, or set to unsupported value. */
+ GUID presetGUID; /**< [in]: Specifies the preset for encoding. If the preset GUID is set then , the preset configuration will be applied before any other parameter. */
+ uint32_t encodeWidth; /**< [in]: Specifies the encode width. If not set ::NvEncInitializeEncoder() API will fail. */
+ uint32_t encodeHeight; /**< [in]: Specifies the encode height. If not set ::NvEncInitializeEncoder() API will fail. */
+ uint32_t darWidth; /**< [in]: Specifies the display aspect ratio Width. */
+ uint32_t darHeight; /**< [in]: Specifies the display aspect ratio height. */
+ uint32_t frameRateNum; /**< [in]: Specifies the numerator for frame rate used for encoding in frames per second ( Frame rate = frameRateNum / frameRateDen ). */
+ uint32_t frameRateDen; /**< [in]: Specifies the denominator for frame rate used for encoding in frames per second ( Frame rate = frameRateNum / frameRateDen ). */
+ uint32_t enableEncodeAsync; /**< [in]: Set this to 1 to enable asynchronous mode and is expected to use events to get picture completion notification. */
+ uint32_t enablePTD; /**< [in]: Set this to 1 to enable the Picture Type Decision is be taken by the NvEncodeAPI interface. */
+ uint32_t reportSliceOffsets :1; /**< [in]: Set this to 1 to enable reporting slice offsets in ::_NV_ENC_LOCK_BITSTREAM. NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync must be set to 0 to use this feature. Client must set this to 0 if NV_ENC_CONFIG_H264::sliceMode is 1 on Kepler GPUs */
+ uint32_t enableSubFrameWrite :1; /**< [in]: Set this to 1 to write out available bitstream to memory at subframe intervals */
+ uint32_t enableExternalMEHints :1; /**< [in]: Set to 1 to enable external ME hints for the current frame. For NV_ENC_INITIALIZE_PARAMS::enablePTD=1 with B frames, programming L1 hints is optional for B frames since Client doesn't know internal GOP structure.
+ NV_ENC_PIC_PARAMS::meHintRefPicDist should preferably be set with enablePTD=1. */
+ uint32_t enableMEOnlyMode :1; /**< [in]: Set to 1 to enable ME Only Mode .*/
+ uint32_t enableWeightedPrediction :1; /**< [in]: Set this to 1 to enable weighted prediction. Not supported if encode session is configured for B-Frames( 'frameIntervalP' in NV_ENC_CONFIG is greater than 1).*/
+ uint32_t enableOutputInVidmem :1; /**< [in]: Set this to 1 to enable output of NVENC in video memory buffer created by application. This feature is not supported for HEVC ME only mode. */
+ uint32_t reservedBitFields :26; /**< [in]: Reserved bitfields and must be set to 0 */
+ uint32_t privDataSize; /**< [in]: Reserved private data buffer size and must be set to 0 */
+ void* privData; /**< [in]: Reserved private data buffer and must be set to NULL */
+ NV_ENC_CONFIG* encodeConfig; /**< [in]: Specifies the advanced codec specific structure. If client has sent a valid codec config structure, it will override parameters set by the NV_ENC_INITIALIZE_PARAMS::presetGUID parameter. If set to NULL the NvEncodeAPI interface will use the NV_ENC_INITIALIZE_PARAMS::presetGUID to set the codec specific parameters.
+ Client can also optionally query the NvEncodeAPI interface to get codec specific parameters for a presetGUID using ::NvEncGetEncodePresetConfig() API. It can then modify (if required) some of the codec config parameters and send down a custom config structure as part of ::_NV_ENC_INITIALIZE_PARAMS.
+ Even in this case client is recommended to pass the same preset guid it has used in ::NvEncGetEncodePresetConfig() API to query the config structure; as NV_ENC_INITIALIZE_PARAMS::presetGUID. This will not override the custom config structure but will be used to determine other Encoder HW specific parameters not exposed in the API. */
+ uint32_t maxEncodeWidth; /**< [in]: Maximum encode width to be used for current Encode session.
+ Client should allocate output buffers according to this dimension for dynamic resolution change. If set to 0, Encoder will not allow dynamic resolution change. */
+ uint32_t maxEncodeHeight; /**< [in]: Maximum encode height to be allowed for current Encode session.
+ Client should allocate output buffers according to this dimension for dynamic resolution change. If set to 0, Encode will not allow dynamic resolution change. */
+ NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE maxMEHintCountsPerBlock[2]; /**< [in]: If Client wants to pass external motion vectors in NV_ENC_PIC_PARAMS::meExternalHints buffer it must specify the maximum number of hint candidates per block per direction for the encode session.
+ The NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[0] is for L0 predictors and NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[1] is for L1 predictors.
+ This client must also set NV_ENC_INITIALIZE_PARAMS::enableExternalMEHints to 1. */
+ uint32_t reserved [289]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_INITIALIZE_PARAMS;
+
+/** macro for constructing the version field of ::_NV_ENC_INITIALIZE_PARAMS */
+#define NV_ENC_INITIALIZE_PARAMS_VER (NVENCAPI_STRUCT_VERSION(5) | ( 1<<31 ))
+
+
+/**
+ * \struct _NV_ENC_RECONFIGURE_PARAMS
+ * Encode Session Reconfigured parameters.
+ */
+typedef struct _NV_ENC_RECONFIGURE_PARAMS
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_RECONFIGURE_PARAMS_VER. */
+ NV_ENC_INITIALIZE_PARAMS reInitEncodeParams; /**< [in]: Encoder session re-initialization parameters.
+ If reInitEncodeParams.encodeConfig is NULL and
+ reInitEncodeParams.presetGUID is the same as the preset
+ GUID specified on the call to NvEncInitializeEncoder(),
+ EncodeAPI will continue to use the existing encode
+ configuration.
+ If reInitEncodeParams.encodeConfig is NULL and
+ reInitEncodeParams.presetGUID is different from the preset
+ GUID specified on the call to NvEncInitializeEncoder(),
+ EncodeAPI will try to use the default configuration for
+ the preset specified by reInitEncodeParams.presetGUID.
+ In this case, reconfiguration may fail if the new
+ configuration is incompatible with the existing
+ configuration (e.g. the new configuration results in
+ a change in the GOP structure). */
+ uint32_t resetEncoder :1; /**< [in]: This resets the rate control states and other internal encoder states. This should be used only with an IDR frame.
+ If NV_ENC_INITIALIZE_PARAMS::enablePTD is set to 1, encoder will force the frame type to IDR */
+ uint32_t forceIDR :1; /**< [in]: Encode the current picture as an IDR picture. This flag is only valid when Picture type decision is taken by the Encoder
+ [_NV_ENC_INITIALIZE_PARAMS::enablePTD == 1]. */
+ uint32_t reserved :30;
+
+}NV_ENC_RECONFIGURE_PARAMS;
+
+/** macro for constructing the version field of ::_NV_ENC_RECONFIGURE_PARAMS */
+#define NV_ENC_RECONFIGURE_PARAMS_VER (NVENCAPI_STRUCT_VERSION(1) | ( 1<<31 ))
+
+/**
+ * \struct _NV_ENC_PRESET_CONFIG
+ * Encoder preset config
+ */
+typedef struct _NV_ENC_PRESET_CONFIG
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_PRESET_CONFIG_VER. */
+ NV_ENC_CONFIG presetCfg; /**< [out]: preset config returned by the Nvidia Video Encoder interface. */
+ uint32_t reserved1[255]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+}NV_ENC_PRESET_CONFIG;
+
+/** macro for constructing the version field of ::_NV_ENC_PRESET_CONFIG */
+#define NV_ENC_PRESET_CONFIG_VER (NVENCAPI_STRUCT_VERSION(4) | ( 1<<31 ))
+
+
+/**
+ * \struct _NV_ENC_SEI_PAYLOAD
+ * User SEI message
+ */
+typedef struct _NV_ENC_SEI_PAYLOAD
+{
+ uint32_t payloadSize; /**< [in] SEI payload size in bytes. SEI payload must be byte aligned, as described in Annex D */
+ uint32_t payloadType; /**< [in] SEI payload types and syntax can be found in Annex D of the H.264 Specification. */
+ uint8_t *payload; /**< [in] pointer to user data */
+} NV_ENC_SEI_PAYLOAD;
+
+#define NV_ENC_H264_SEI_PAYLOAD NV_ENC_SEI_PAYLOAD
+
+/**
+ * \struct _NV_ENC_PIC_PARAMS_H264
+ * H264 specific enc pic params. sent on a per frame basis.
+ */
+typedef struct _NV_ENC_PIC_PARAMS_H264
+{
+ uint32_t displayPOCSyntax; /**< [in]: Specifies the display POC syntax This is required to be set if client is handling the picture type decision. */
+ uint32_t reserved3; /**< [in]: Reserved and must be set to 0 */
+ uint32_t refPicFlag; /**< [in]: Set to 1 for a reference picture. This is ignored if NV_ENC_INITIALIZE_PARAMS::enablePTD is set to 1. */
+ uint32_t colourPlaneId; /**< [in]: Specifies the colour plane ID associated with the current input. */
+ uint32_t forceIntraRefreshWithFrameCnt; /**< [in]: Forces an intra refresh with duration equal to intraRefreshFrameCnt.
+ When outputRecoveryPointSEI is set this is value is used for recovery_frame_cnt in recovery point SEI message
+ forceIntraRefreshWithFrameCnt cannot be used if B frames are used in the GOP structure specified */
+ uint32_t constrainedFrame :1; /**< [in]: Set to 1 if client wants to encode this frame with each slice completely independent of other slices in the frame.
+ NV_ENC_INITIALIZE_PARAMS::enableConstrainedEncoding should be set to 1 */
+ uint32_t sliceModeDataUpdate :1; /**< [in]: Set to 1 if client wants to change the sliceModeData field to specify new sliceSize Parameter
+ When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting */
+ uint32_t ltrMarkFrame :1; /**< [in]: Set to 1 if client wants to mark this frame as LTR */
+ uint32_t ltrUseFrames :1; /**< [in]: Set to 1 if client allows encoding this frame using the LTR frames specified in ltrFrameBitmap */
+ uint32_t reservedBitFields :28; /**< [in]: Reserved bit fields and must be set to 0 */
+ uint8_t* sliceTypeData; /**< [in]: Deprecated. */
+ uint32_t sliceTypeArrayCnt; /**< [in]: Deprecated. */
+ uint32_t seiPayloadArrayCnt; /**< [in]: Specifies the number of elements allocated in seiPayloadArray array. */
+ NV_ENC_SEI_PAYLOAD* seiPayloadArray; /**< [in]: Array of SEI payloads which will be inserted for this frame. */
+ uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices
+ sliceMode = 0 MB based slices, sliceMode = 1 Byte based slices, sliceMode = 2 MB row based slices, sliceMode = 3, numSlices in Picture
+ When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting
+ When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */
+ uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For:
+ sliceMode = 0, sliceModeData specifies # of MBs in each slice (except last slice)
+ sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice)
+ sliceMode = 2, sliceModeData specifies # of MB rows in each slice (except last slice)
+ sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */
+ uint32_t ltrMarkFrameIdx; /**< [in]: Specifies the long term referenceframe index to use for marking this frame as LTR.*/
+ uint32_t ltrUseFrameBitmap; /**< [in]: Specifies the the associated bitmap of LTR frame indices to use when encoding this frame. */
+ uint32_t ltrUsageMode; /**< [in]: Not supported. Reserved for future use and must be set to 0. */
+ uint32_t forceIntraSliceCount; /**< [in]: Specfies the number of slices to be forced to Intra in the current picture.
+ This option along with forceIntraSliceIdx[] array needs to be used with sliceMode = 3 only */
+ uint32_t *forceIntraSliceIdx; /**< [in]: Slice indices to be forced to intra in the current picture. Each slice index should be <= num_slices_in_picture -1. Index starts from 0 for first slice.
+ The number of entries in this array should be equal to forceIntraSliceCount */
+ uint32_t reserved [242]; /**< [in]: Reserved and must be set to 0. */
+ void* reserved2[61]; /**< [in]: Reserved and must be set to NULL. */
+} NV_ENC_PIC_PARAMS_H264;
+
+/**
+ * \struct _NV_ENC_PIC_PARAMS_HEVC
+ * HEVC specific enc pic params. sent on a per frame basis.
+ */
+typedef struct _NV_ENC_PIC_PARAMS_HEVC
+{
+ uint32_t displayPOCSyntax; /**< [in]: Specifies the display POC syntax This is required to be set if client is handling the picture type decision. */
+ uint32_t refPicFlag; /**< [in]: Set to 1 for a reference picture. This is ignored if NV_ENC_INITIALIZE_PARAMS::enablePTD is set to 1. */
+ uint32_t temporalId; /**< [in]: Specifies the temporal id of the picture */
+ uint32_t forceIntraRefreshWithFrameCnt; /**< [in]: Forces an intra refresh with duration equal to intraRefreshFrameCnt.
+ When outputRecoveryPointSEI is set this is value is used for recovery_frame_cnt in recovery point SEI message
+ forceIntraRefreshWithFrameCnt cannot be used if B frames are used in the GOP structure specified */
+ uint32_t constrainedFrame :1; /**< [in]: Set to 1 if client wants to encode this frame with each slice completely independent of other slices in the frame.
+ NV_ENC_INITIALIZE_PARAMS::enableConstrainedEncoding should be set to 1 */
+ uint32_t sliceModeDataUpdate :1; /**< [in]: Set to 1 if client wants to change the sliceModeData field to specify new sliceSize Parameter
+ When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting */
+ uint32_t ltrMarkFrame :1; /**< [in]: Set to 1 if client wants to mark this frame as LTR */
+ uint32_t ltrUseFrames :1; /**< [in]: Set to 1 if client allows encoding this frame using the LTR frames specified in ltrFrameBitmap */
+ uint32_t reservedBitFields :28; /**< [in]: Reserved bit fields and must be set to 0 */
+ uint8_t* sliceTypeData; /**< [in]: Array which specifies the slice type used to force intra slice for a particular slice. Currently supported only for NV_ENC_CONFIG_H264::sliceMode == 3.
+ Client should allocate array of size sliceModeData where sliceModeData is specified in field of ::_NV_ENC_CONFIG_H264
+ Array element with index n corresponds to nth slice. To force a particular slice to intra client should set corresponding array element to NV_ENC_SLICE_TYPE_I
+ all other array elements should be set to NV_ENC_SLICE_TYPE_DEFAULT */
+ uint32_t sliceTypeArrayCnt; /**< [in]: Client should set this to the number of elements allocated in sliceTypeData array. If sliceTypeData is NULL then this should be set to 0 */
+ uint32_t sliceMode; /**< [in]: This parameter in conjunction with sliceModeData specifies the way in which the picture is divided into slices
+ sliceMode = 0 CTU based slices, sliceMode = 1 Byte based slices, sliceMode = 2 CTU row based slices, sliceMode = 3, numSlices in Picture
+ When forceIntraRefreshWithFrameCnt is set it will have priority over sliceMode setting
+ When sliceMode == 0 and sliceModeData == 0 whole picture will be coded with one slice */
+ uint32_t sliceModeData; /**< [in]: Specifies the parameter needed for sliceMode. For:
+ sliceMode = 0, sliceModeData specifies # of CTUs in each slice (except last slice)
+ sliceMode = 1, sliceModeData specifies maximum # of bytes in each slice (except last slice)
+ sliceMode = 2, sliceModeData specifies # of CTU rows in each slice (except last slice)
+ sliceMode = 3, sliceModeData specifies number of slices in the picture. Driver will divide picture into slices optimally */
+ uint32_t ltrMarkFrameIdx; /**< [in]: Specifies the long term reference frame index to use for marking this frame as LTR.*/
+ uint32_t ltrUseFrameBitmap; /**< [in]: Specifies the associated bitmap of LTR frame indices to use when encoding this frame. */
+ uint32_t ltrUsageMode; /**< [in]: Not supported. Reserved for future use and must be set to 0. */
+ uint32_t seiPayloadArrayCnt; /**< [in]: Specifies the number of elements allocated in seiPayloadArray array. */
+ uint32_t reserved; /**< [in]: Reserved and must be set to 0. */
+ NV_ENC_SEI_PAYLOAD* seiPayloadArray; /**< [in]: Array of SEI payloads which will be inserted for this frame. */
+ uint32_t reserved2 [244]; /**< [in]: Reserved and must be set to 0. */
+ void* reserved3[61]; /**< [in]: Reserved and must be set to NULL. */
+} NV_ENC_PIC_PARAMS_HEVC;
+
+/**
+ * Codec specific per-picture encoding parameters.
+ */
+typedef union _NV_ENC_CODEC_PIC_PARAMS
+{
+ NV_ENC_PIC_PARAMS_H264 h264PicParams; /**< [in]: H264 encode picture params. */
+ NV_ENC_PIC_PARAMS_HEVC hevcPicParams; /**< [in]: HEVC encode picture params. */
+ uint32_t reserved[256]; /**< [in]: Reserved and must be set to 0. */
+} NV_ENC_CODEC_PIC_PARAMS;
+
+/**
+ * \struct _NV_ENC_PIC_PARAMS
+ * Encoding parameters that need to be sent on a per frame basis.
+ */
+typedef struct _NV_ENC_PIC_PARAMS
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_PIC_PARAMS_VER. */
+ uint32_t inputWidth; /**< [in]: Specifies the input buffer width */
+ uint32_t inputHeight; /**< [in]: Specifies the input buffer height */
+ uint32_t inputPitch; /**< [in]: Specifies the input buffer pitch. If pitch value is not known, set this to inputWidth. */
+ uint32_t encodePicFlags; /**< [in]: Specifies bit-wise OR`ed encode pic flags. See ::NV_ENC_PIC_FLAGS enum. */
+ uint32_t frameIdx; /**< [in]: Specifies the frame index associated with the input frame [optional]. */
+ uint64_t inputTimeStamp; /**< [in]: Specifies presentation timestamp associated with the input picture. */
+ uint64_t inputDuration; /**< [in]: Specifies duration of the input picture */
+ NV_ENC_INPUT_PTR inputBuffer; /**< [in]: Specifies the input buffer pointer. Client must use a pointer obtained from ::NvEncCreateInputBuffer() or ::NvEncMapInputResource() APIs.*/
+ NV_ENC_OUTPUT_PTR outputBitstream; /**< [in]: Specifies the output buffer pointer.
+ If NV_ENC_INITIALIZE_PARAMS::enableOutputInVidmem is set to 0, specifies the pointer to output buffer. Client should use a pointer obtained from ::NvEncCreateBitstreamBuffer() API.
+ If NV_ENC_INITIALIZE_PARAMS::enableOutputInVidmem is set to 1, client should allocate buffer in video memory for NV_ENC_ENCODE_OUT_PARAMS struct and encoded bitstream data. Client
+ should use a pointer obtained from ::NvEncMapInputResource() API, when mapping this output buffer and assign it to NV_ENC_PIC_PARAMS::outputBitstream.
+ First 256 bytes of this buffer should be interpreted as NV_ENC_ENCODE_OUT_PARAMS struct followed by encoded bitstream data. Recommended size for output buffer is sum of size of
+ NV_ENC_ENCODE_OUT_PARAMS struct and twice the input frame size for lower resolution eg. CIF and 1.5 times the input frame size for higher resolutions. If encoded bitstream size is
+ greater than the allocated buffer size for encoded bitstream, then the output buffer will have encoded bitstream data equal to buffer size. All CUDA operations on this buffer must use
+ the default stream. */
+ void* completionEvent; /**< [in]: Specifies an event to be signalled on completion of encoding of this Frame [only if operating in Asynchronous mode]. Each output buffer should be associated with a distinct event pointer. */
+ NV_ENC_BUFFER_FORMAT bufferFmt; /**< [in]: Specifies the input buffer format. */
+ NV_ENC_PIC_STRUCT pictureStruct; /**< [in]: Specifies structure of the input picture. */
+ NV_ENC_PIC_TYPE pictureType; /**< [in]: Specifies input picture type. Client required to be set explicitly by the client if the client has not set NV_ENC_INITALIZE_PARAMS::enablePTD to 1 while calling NvInitializeEncoder. */
+ NV_ENC_CODEC_PIC_PARAMS codecPicParams; /**< [in]: Specifies the codec specific per-picture encoding parameters. */
+ NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE meHintCountsPerBlock[2]; /**< [in]: Specifies the number of hint candidates per block per direction for the current frame. meHintCountsPerBlock[0] is for L0 predictors and meHintCountsPerBlock[1] is for L1 predictors.
+ The candidate count in NV_ENC_PIC_PARAMS::meHintCountsPerBlock[lx] must never exceed NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[lx] provided during encoder intialization. */
+ NVENC_EXTERNAL_ME_HINT *meExternalHints; /**< [in]: Specifies the pointer to ME external hints for the current frame. The size of ME hint buffer should be equal to number of macroblocks * the total number of candidates per macroblock.
+ The total number of candidates per MB per direction = 1*meHintCountsPerBlock[Lx].numCandsPerBlk16x16 + 2*meHintCountsPerBlock[Lx].numCandsPerBlk16x8 + 2*meHintCountsPerBlock[Lx].numCandsPerBlk8x8
+ + 4*meHintCountsPerBlock[Lx].numCandsPerBlk8x8. For frames using bidirectional ME , the total number of candidates for single macroblock is sum of total number of candidates per MB for each direction (L0 and L1) */
+ uint32_t reserved1[6]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[2]; /**< [in]: Reserved and must be set to NULL */
+ int8_t *qpDeltaMap; /**< [in]: Specifies the pointer to signed byte array containing value per MB in raster scan order for the current picture, which will be Interperated depending on NV_ENC_RC_PARAMS::qpMapMode.
+ If NV_ENC_RC_PARAMS::qpMapMode is NV_ENC_QP_MAP_DELTA , This specify QP modifier to be applied on top of the QP chosen by rate control.
+ If NV_ENC_RC_PARAMS::qpMapMode is NV_ENC_QP_MAP_EMPHASIS, it specifies emphasis level map per MB. This level value along with QP chosen by rate control is used to compute the QP modifier,
+ which in turn is applied on top of QP chosen by rate control.
+ If NV_ENC_RC_PARAMS::qpMapMode is NV_ENC_QP_MAP_DISABLED value in qpDeltaMap will be ignored.*/
+ uint32_t qpDeltaMapSize; /**< [in]: Specifies the size in bytes of qpDeltaMap surface allocated by client and pointed to by NV_ENC_PIC_PARAMS::qpDeltaMap. Surface (array) should be picWidthInMbs * picHeightInMbs */
+ uint32_t reservedBitFields; /**< [in]: Reserved bitfields and must be set to 0 */
+ uint16_t meHintRefPicDist[2]; /**< [in]: Specifies temporal distance for reference picture (NVENC_EXTERNAL_ME_HINT::refidx = 0) used during external ME with NV_ENC_INITALIZE_PARAMS::enablePTD = 1 . meHintRefPicDist[0] is for L0 hints and meHintRefPicDist[1] is for L1 hints.
+ If not set, will internally infer distance of 1. Ignored for NV_ENC_INITALIZE_PARAMS::enablePTD = 0 */
+ uint32_t reserved3[286]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved4[60]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_PIC_PARAMS;
+
+/** Macro for constructing the version field of ::_NV_ENC_PIC_PARAMS */
+#define NV_ENC_PIC_PARAMS_VER (NVENCAPI_STRUCT_VERSION(4) | ( 1<<31 ))
+
+
+/**
+ * \struct _NV_ENC_MEONLY_PARAMS
+ * MEOnly parameters that need to be sent on a per motion estimation basis.
+ * NV_ENC_MEONLY_PARAMS::meExternalHints is supported for H264 only.
+ */
+typedef struct _NV_ENC_MEONLY_PARAMS
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to NV_ENC_MEONLY_PARAMS_VER.*/
+ uint32_t inputWidth; /**< [in]: Specifies the input buffer width */
+ uint32_t inputHeight; /**< [in]: Specifies the input buffer height */
+ NV_ENC_INPUT_PTR inputBuffer; /**< [in]: Specifies the input buffer pointer. Client must use a pointer obtained from NvEncCreateInputBuffer() or NvEncMapInputResource() APIs. */
+ NV_ENC_INPUT_PTR referenceFrame; /**< [in]: Specifies the reference frame pointer */
+ NV_ENC_OUTPUT_PTR mvBuffer; /**< [in]: Specifies the output buffer pointer.
+ If NV_ENC_INITIALIZE_PARAMS::enableOutputInVidmem is set to 0, specifies the pointer to motion vector data buffer allocated by NvEncCreateMVBuffer.
+ Client must lock mvBuffer using ::NvEncLockBitstream() API to get the motion vector data.
+ If NV_ENC_INITIALIZE_PARAMS::enableOutputInVidmem is set to 1, client should allocate buffer in video memory for storing the motion vector data. The size of this buffer must
+ be equal to total number of macroblocks multiplied by size of NV_ENC_H264_MV_DATA struct. Client should use a pointer obtained from ::NvEncMapInputResource() API, when mapping this
+ output buffer and assign it to NV_ENC_MEONLY_PARAMS::mvBuffer. All CUDA operations on this buffer must use the default stream. */
+ NV_ENC_BUFFER_FORMAT bufferFmt; /**< [in]: Specifies the input buffer format. */
+ void* completionEvent; /**< [in]: Specifies an event to be signalled on completion of motion estimation
+ of this Frame [only if operating in Asynchronous mode].
+ Each output buffer should be associated with a distinct event pointer. */
+ uint32_t viewID; /**< [in]: Specifies left,right viewID if NV_ENC_CONFIG_H264_MEONLY::bStereoEnable is set.
+ viewID can be 0,1 if bStereoEnable is set, 0 otherwise. */
+ NVENC_EXTERNAL_ME_HINT_COUNTS_PER_BLOCKTYPE
+ meHintCountsPerBlock[2]; /**< [in]: Specifies the number of hint candidates per block for the current frame. meHintCountsPerBlock[0] is for L0 predictors.
+ The candidate count in NV_ENC_PIC_PARAMS::meHintCountsPerBlock[lx] must never exceed NV_ENC_INITIALIZE_PARAMS::maxMEHintCountsPerBlock[lx] provided during encoder intialization. */
+ NVENC_EXTERNAL_ME_HINT *meExternalHints; /**< [in]: Specifies the pointer to ME external hints for the current frame. The size of ME hint buffer should be equal to number of macroblocks * the total number of candidates per macroblock.
+ The total number of candidates per MB per direction = 1*meHintCountsPerBlock[Lx].numCandsPerBlk16x16 + 2*meHintCountsPerBlock[Lx].numCandsPerBlk16x8 + 2*meHintCountsPerBlock[Lx].numCandsPerBlk8x8
+ + 4*meHintCountsPerBlock[Lx].numCandsPerBlk8x8. For frames using bidirectional ME , the total number of candidates for single macroblock is sum of total number of candidates per MB for each direction (L0 and L1) */
+ uint32_t reserved1[243]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[59]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_MEONLY_PARAMS;
+
+/** NV_ENC_MEONLY_PARAMS struct version*/
+#define NV_ENC_MEONLY_PARAMS_VER NVENCAPI_STRUCT_VERSION(3)
+
+
+/**
+ * \struct _NV_ENC_LOCK_BITSTREAM
+ * Bitstream buffer lock parameters.
+ */
+typedef struct _NV_ENC_LOCK_BITSTREAM
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_LOCK_BITSTREAM_VER. */
+ uint32_t doNotWait :1; /**< [in]: If this flag is set, the NvEncodeAPI interface will return buffer pointer even if operation is not completed. If not set, the call will block until operation completes. */
+ uint32_t ltrFrame :1; /**< [out]: Flag indicating this frame is marked as LTR frame */
+ uint32_t getRCStats :1; /**< [in]: If this flag is set then lockBitstream call will add additional intra-inter MB count and average MVX, MVY */
+ uint32_t reservedBitFields :29; /**< [in]: Reserved bit fields and must be set to 0 */
+ void* outputBitstream; /**< [in]: Pointer to the bitstream buffer being locked. */
+ uint32_t* sliceOffsets; /**< [in,out]: Array which receives the slice offsets. This is not supported if NV_ENC_CONFIG_H264::sliceMode is 1 on Kepler GPUs. Array size must be equal to size of frame in MBs. */
+ uint32_t frameIdx; /**< [out]: Frame no. for which the bitstream is being retrieved. */
+ uint32_t hwEncodeStatus; /**< [out]: The NvEncodeAPI interface status for the locked picture. */
+ uint32_t numSlices; /**< [out]: Number of slices in the encoded picture. Will be reported only if NV_ENC_INITIALIZE_PARAMS::reportSliceOffsets set to 1. */
+ uint32_t bitstreamSizeInBytes; /**< [out]: Actual number of bytes generated and copied to the memory pointed by bitstreamBufferPtr. */
+ uint64_t outputTimeStamp; /**< [out]: Presentation timestamp associated with the encoded output. */
+ uint64_t outputDuration; /**< [out]: Presentation duration associates with the encoded output. */
+ void* bitstreamBufferPtr; /**< [out]: Pointer to the generated output bitstream.
+ For MEOnly mode _NV_ENC_LOCK_BITSTREAM::bitstreamBufferPtr should be typecast to
+ NV_ENC_H264_MV_DATA/NV_ENC_HEVC_MV_DATA pointer respectively for H264/HEVC */
+ NV_ENC_PIC_TYPE pictureType; /**< [out]: Picture type of the encoded picture. */
+ NV_ENC_PIC_STRUCT pictureStruct; /**< [out]: Structure of the generated output picture. */
+ uint32_t frameAvgQP; /**< [out]: Average QP of the frame. */
+ uint32_t frameSatd; /**< [out]: Total SATD cost for whole frame. */
+ uint32_t ltrFrameIdx; /**< [out]: Frame index associated with this LTR frame. */
+ uint32_t ltrFrameBitmap; /**< [out]: Bitmap of LTR frames indices which were used for encoding this frame. Value of 0 if no LTR frames were used. */
+ uint32_t reserved[13]; /**< [in]: Reserved and must be set to 0 */
+ uint32_t intraMBCount; /**< [out]: For H264, Number of Intra MBs in the encoded frame. For HEVC, Number of Intra CTBs in the encoded frame. Supported only if _NV_ENC_LOCK_BITSTREAM::getRCStats set to 1. */
+ uint32_t interMBCount; /**< [out]: For H264, Number of Inter MBs in the encoded frame, includes skip MBs. For HEVC, Number of Inter CTBs in the encoded frame. Supported only if _NV_ENC_LOCK_BITSTREAM::getRCStats set to 1. */
+ int32_t averageMVX; /**< [out]: Average Motion Vector in X direction for the encoded frame. Supported only if _NV_ENC_LOCK_BITSTREAM::getRCStats set to 1. */
+ int32_t averageMVY; /**< [out]: Average Motion Vector in y direction for the encoded frame. Supported only if _NV_ENC_LOCK_BITSTREAM::getRCStats set to 1. */
+ uint32_t reserved1[219]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_LOCK_BITSTREAM;
+
+/** Macro for constructing the version field of ::_NV_ENC_LOCK_BITSTREAM */
+#define NV_ENC_LOCK_BITSTREAM_VER NVENCAPI_STRUCT_VERSION(1)
+
+
+/**
+ * \struct _NV_ENC_LOCK_INPUT_BUFFER
+ * Uncompressed Input Buffer lock parameters.
+ */
+typedef struct _NV_ENC_LOCK_INPUT_BUFFER
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_LOCK_INPUT_BUFFER_VER. */
+ uint32_t doNotWait :1; /**< [in]: Set to 1 to make ::NvEncLockInputBuffer() a unblocking call. If the encoding is not completed, driver will return ::NV_ENC_ERR_ENCODER_BUSY error code. */
+ uint32_t reservedBitFields :31; /**< [in]: Reserved bitfields and must be set to 0 */
+ NV_ENC_INPUT_PTR inputBuffer; /**< [in]: Pointer to the input buffer to be locked, client should pass the pointer obtained from ::NvEncCreateInputBuffer() or ::NvEncMapInputResource API. */
+ void* bufferDataPtr; /**< [out]: Pointed to the locked input buffer data. Client can only access input buffer using the \p bufferDataPtr. */
+ uint32_t pitch; /**< [out]: Pitch of the locked input buffer. */
+ uint32_t reserved1[251]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_LOCK_INPUT_BUFFER;
+
+/** Macro for constructing the version field of ::_NV_ENC_LOCK_INPUT_BUFFER */
+#define NV_ENC_LOCK_INPUT_BUFFER_VER NVENCAPI_STRUCT_VERSION(1)
+
+
+/**
+ * \struct _NV_ENC_MAP_INPUT_RESOURCE
+ * Map an input resource to a Nvidia Encoder Input Buffer
+ */
+typedef struct _NV_ENC_MAP_INPUT_RESOURCE
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_MAP_INPUT_RESOURCE_VER. */
+ uint32_t subResourceIndex; /**< [in]: Deprecated. Do not use. */
+ void* inputResource; /**< [in]: Deprecated. Do not use. */
+ NV_ENC_REGISTERED_PTR registeredResource; /**< [in]: The Registered resource handle obtained by calling NvEncRegisterInputResource. */
+ NV_ENC_INPUT_PTR mappedResource; /**< [out]: Mapped pointer corresponding to the registeredResource. This pointer must be used in NV_ENC_PIC_PARAMS::inputBuffer parameter in ::NvEncEncodePicture() API. */
+ NV_ENC_BUFFER_FORMAT mappedBufferFmt; /**< [out]: Buffer format of the outputResource. This buffer format must be used in NV_ENC_PIC_PARAMS::bufferFmt if client using the above mapped resource pointer. */
+ uint32_t reserved1[251]; /**< [in]: Reserved and must be set to 0. */
+ void* reserved2[63]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_MAP_INPUT_RESOURCE;
+
+/** Macro for constructing the version field of ::_NV_ENC_MAP_INPUT_RESOURCE */
+#define NV_ENC_MAP_INPUT_RESOURCE_VER NVENCAPI_STRUCT_VERSION(4)
+
+/**
+ * \struct _NV_ENC_INPUT_RESOURCE_OPENGL_TEX
+ * NV_ENC_REGISTER_RESOURCE::resourceToRegister must be a pointer to a variable of this type,
+ * when NV_ENC_REGISTER_RESOURCE::resourceType is NV_ENC_INPUT_RESOURCE_TYPE_OPENGL_TEX
+ */
+typedef struct _NV_ENC_INPUT_RESOURCE_OPENGL_TEX
+{
+ uint32_t texture; /**< [in]: The name of the texture to be used. */
+ uint32_t target; /**< [in]: Accepted values are GL_TEXTURE_RECTANGLE and GL_TEXTURE_2D. */
+} NV_ENC_INPUT_RESOURCE_OPENGL_TEX;
+
+/**
+ * \struct _NV_ENC_REGISTER_RESOURCE
+ * Register a resource for future use with the Nvidia Video Encoder Interface.
+ */
+typedef struct _NV_ENC_REGISTER_RESOURCE
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_REGISTER_RESOURCE_VER. */
+ NV_ENC_INPUT_RESOURCE_TYPE resourceType; /**< [in]: Specifies the type of resource to be registered.
+ Supported values are
+ ::NV_ENC_INPUT_RESOURCE_TYPE_DIRECTX,
+ ::NV_ENC_INPUT_RESOURCE_TYPE_CUDADEVICEPTR,
+ ::NV_ENC_INPUT_RESOURCE_TYPE_OPENGL_TEX */
+ uint32_t width; /**< [in]: Input buffer Width. */
+ uint32_t height; /**< [in]: Input buffer Height. */
+ uint32_t pitch; /**< [in]: Input buffer Pitch.
+ For ::NV_ENC_INPUT_RESOURCE_TYPE_DIRECTX resources, set this to 0.
+ For ::NV_ENC_INPUT_RESOURCE_TYPE_CUDADEVICEPTR resources, set this to
+ the pitch as obtained from cuMemAllocPitch(), or to the width in
+ bytes (if this resource was created by using cuMemAlloc()). This
+ value must be a multiple of 4.
+ For ::NV_ENC_INPUT_RESOURCE_TYPE_CUDAARRAY resources, set this to the
+ width of the allocation in bytes (i.e.
+ CUDA_ARRAY3D_DESCRIPTOR::Width * CUDA_ARRAY3D_DESCRIPTOR::NumChannels).
+ For ::NV_ENC_INPUT_RESOURCE_TYPE_OPENGL_TEX resources, set this to the
+ texture width multiplied by the number of components in the texture
+ format. */
+ uint32_t subResourceIndex; /**< [in]: Subresource Index of the DirectX resource to be registered. Should be set to 0 for other interfaces. */
+ void* resourceToRegister; /**< [in]: Handle to the resource that is being registered. */
+ NV_ENC_REGISTERED_PTR registeredResource; /**< [out]: Registered resource handle. This should be used in future interactions with the Nvidia Video Encoder Interface. */
+ NV_ENC_BUFFER_FORMAT bufferFormat; /**< [in]: Buffer format of resource to be registered. */
+ NV_ENC_BUFFER_USAGE bufferUsage; /**< [in]: Usage of resource to be registered. */
+ uint32_t reserved1[247]; /**< [in]: Reserved and must be set to 0. */
+ void* reserved2[62]; /**< [in]: Reserved and must be set to NULL. */
+} NV_ENC_REGISTER_RESOURCE;
+
+/** Macro for constructing the version field of ::_NV_ENC_REGISTER_RESOURCE */
+#define NV_ENC_REGISTER_RESOURCE_VER NVENCAPI_STRUCT_VERSION(3)
+
+/**
+ * \struct _NV_ENC_STAT
+ * Encode Stats structure.
+ */
+typedef struct _NV_ENC_STAT
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_STAT_VER. */
+ uint32_t reserved; /**< [in]: Reserved and must be set to 0 */
+ NV_ENC_OUTPUT_PTR outputBitStream; /**< [out]: Specifies the pointer to output bitstream. */
+ uint32_t bitStreamSize; /**< [out]: Size of generated bitstream in bytes. */
+ uint32_t picType; /**< [out]: Picture type of encoded picture. See ::NV_ENC_PIC_TYPE. */
+ uint32_t lastValidByteOffset; /**< [out]: Offset of last valid bytes of completed bitstream */
+ uint32_t sliceOffsets[16]; /**< [out]: Offsets of each slice */
+ uint32_t picIdx; /**< [out]: Picture number */
+ uint32_t reserved1[233]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_STAT;
+
+/** Macro for constructing the version field of ::_NV_ENC_STAT */
+#define NV_ENC_STAT_VER NVENCAPI_STRUCT_VERSION(1)
+
+
+/**
+ * \struct _NV_ENC_SEQUENCE_PARAM_PAYLOAD
+ * Sequence and picture paramaters payload.
+ */
+typedef struct _NV_ENC_SEQUENCE_PARAM_PAYLOAD
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_INITIALIZE_PARAMS_VER. */
+ uint32_t inBufferSize; /**< [in]: Specifies the size of the spsppsBuffer provied by the client */
+ uint32_t spsId; /**< [in]: Specifies the SPS id to be used in sequence header. Default value is 0. */
+ uint32_t ppsId; /**< [in]: Specifies the PPS id to be used in picture header. Default value is 0. */
+ void* spsppsBuffer; /**< [in]: Specifies bitstream header pointer of size NV_ENC_SEQUENCE_PARAM_PAYLOAD::inBufferSize. It is the client's responsibility to manage this memory. */
+ uint32_t* outSPSPPSPayloadSize; /**< [out]: Size of the sequence and picture header in bytes written by the NvEncodeAPI interface to the SPSPPSBuffer. */
+ uint32_t reserved [250]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_SEQUENCE_PARAM_PAYLOAD;
+
+/** Macro for constructing the version field of ::_NV_ENC_SEQUENCE_PARAM_PAYLOAD */
+#define NV_ENC_SEQUENCE_PARAM_PAYLOAD_VER NVENCAPI_STRUCT_VERSION(1)
+
+
+/**
+ * Event registration/unregistration parameters.
+ */
+typedef struct _NV_ENC_EVENT_PARAMS
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_EVENT_PARAMS_VER. */
+ uint32_t reserved; /**< [in]: Reserved and must be set to 0 */
+ void* completionEvent; /**< [in]: Handle to event to be registered/unregistered with the NvEncodeAPI interface. */
+ uint32_t reserved1[253]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_EVENT_PARAMS;
+
+/** Macro for constructing the version field of ::_NV_ENC_EVENT_PARAMS */
+#define NV_ENC_EVENT_PARAMS_VER NVENCAPI_STRUCT_VERSION(1)
+
+/**
+ * Encoder Session Creation parameters
+ */
+typedef struct _NV_ENC_OPEN_ENCODE_SESSIONEX_PARAMS
+{
+ uint32_t version; /**< [in]: Struct version. Must be set to ::NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER. */
+ NV_ENC_DEVICE_TYPE deviceType; /**< [in]: Specified the device Type */
+ void* device; /**< [in]: Pointer to client device. */
+ void* reserved; /**< [in]: Reserved and must be set to 0. */
+ uint32_t apiVersion; /**< [in]: API version. Should be set to NVENCAPI_VERSION. */
+ uint32_t reserved1[253]; /**< [in]: Reserved and must be set to 0 */
+ void* reserved2[64]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS;
+/** Macro for constructing the version field of ::_NV_ENC_OPEN_ENCODE_SESSIONEX_PARAMS */
+#define NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER NVENCAPI_STRUCT_VERSION(1)
+
+/** @} */ /* END ENCODER_STRUCTURE */
+
+
+/**
+ * \addtogroup ENCODE_FUNC NvEncodeAPI Functions
+ * @{
+ */
+
+// NvEncOpenEncodeSession
+/**
+ * \brief Opens an encoding session.
+ *
+ * Deprecated.
+ *
+ * \return
+ * ::NV_ENC_ERR_INVALID_CALL\n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncOpenEncodeSession (void* device, uint32_t deviceType, void** encoder);
+
+// NvEncGetEncodeGuidCount
+/**
+ * \brief Retrieves the number of supported encode GUIDs.
+ *
+ * The function returns the number of codec guids supported by the NvEncodeAPI
+ * interface.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [out] encodeGUIDCount
+ * Number of supported encode GUIDs.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodeGUIDCount (void* encoder, uint32_t* encodeGUIDCount);
+
+
+// NvEncGetEncodeGUIDs
+/**
+ * \brief Retrieves an array of supported encoder codec GUIDs.
+ *
+ * The function returns an array of codec guids supported by the NvEncodeAPI interface.
+ * The client must allocate an array where the NvEncodeAPI interface can
+ * fill the supported guids and pass the pointer in \p *GUIDs parameter.
+ * The size of the array can be determined by using ::NvEncGetEncodeGUIDCount() API.
+ * The Nvidia Encoding interface returns the number of codec guids it has actually
+ * filled in the guid array in the \p GUIDCount parameter.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] guidArraySize
+ * Number of GUIDs to retrieved. Should be set to the number retrieved using
+ * ::NvEncGetEncodeGUIDCount.
+ * \param [out] GUIDs
+ * Array of supported Encode GUIDs.
+ * \param [out] GUIDCount
+ * Number of supported Encode GUIDs.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodeGUIDs (void* encoder, GUID* GUIDs, uint32_t guidArraySize, uint32_t* GUIDCount);
+
+
+// NvEncGetEncodeProfileGuidCount
+/**
+ * \brief Retrieves the number of supported profile GUIDs.
+ *
+ * The function returns the number of profile GUIDs supported for a given codec.
+ * The client must first enumerate the codec guids supported by the NvEncodeAPI
+ * interface. After determining the codec guid, it can query the NvEncodeAPI
+ * interface to determine the number of profile guids supported for a particular
+ * codec guid.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * The codec guid for which the profile guids are being enumerated.
+ * \param [out] encodeProfileGUIDCount
+ * Number of encode profiles supported for the given encodeGUID.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodeProfileGUIDCount (void* encoder, GUID encodeGUID, uint32_t* encodeProfileGUIDCount);
+
+
+// NvEncGetEncodeProfileGUIDs
+/**
+ * \brief Retrieves an array of supported encode profile GUIDs.
+ *
+ * The function returns an array of supported profile guids for a particular
+ * codec guid. The client must allocate an array where the NvEncodeAPI interface
+ * can populate the profile guids. The client can determine the array size using
+ * ::NvEncGetEncodeProfileGUIDCount() API. The client must also validiate that the
+ * NvEncodeAPI interface supports the GUID the client wants to pass as \p encodeGUID
+ * parameter.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * The encode guid whose profile guids are being enumerated.
+ * \param [in] guidArraySize
+ * Number of GUIDs to be retrieved. Should be set to the number retrieved using
+ * ::NvEncGetEncodeProfileGUIDCount.
+ * \param [out] profileGUIDs
+ * Array of supported Encode Profile GUIDs
+ * \param [out] GUIDCount
+ * Number of valid encode profile GUIDs in \p profileGUIDs array.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodeProfileGUIDs (void* encoder, GUID encodeGUID, GUID* profileGUIDs, uint32_t guidArraySize, uint32_t* GUIDCount);
+
+// NvEncGetInputFormatCount
+/**
+ * \brief Retrieve the number of supported Input formats.
+ *
+ * The function returns the number of supported input formats. The client must
+ * query the NvEncodeAPI interface to determine the supported input formats
+ * before creating the input surfaces.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * Encode GUID, corresponding to which the number of supported input formats
+ * is to be retrieved.
+ * \param [out] inputFmtCount
+ * Number of input formats supported for specified Encode GUID.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ */
+NVENCSTATUS NVENCAPI NvEncGetInputFormatCount (void* encoder, GUID encodeGUID, uint32_t* inputFmtCount);
+
+
+// NvEncGetInputFormats
+/**
+ * \brief Retrieves an array of supported Input formats
+ *
+ * Returns an array of supported input formats The client must use the input
+ * format to create input surface using ::NvEncCreateInputBuffer() API.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * Encode GUID, corresponding to which the number of supported input formats
+ * is to be retrieved.
+ *\param [in] inputFmtArraySize
+ * Size input format count array passed in \p inputFmts.
+ *\param [out] inputFmts
+ * Array of input formats supported for this Encode GUID.
+ *\param [out] inputFmtCount
+ * The number of valid input format types returned by the NvEncodeAPI
+ * interface in \p inputFmts array.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetInputFormats (void* encoder, GUID encodeGUID, NV_ENC_BUFFER_FORMAT* inputFmts, uint32_t inputFmtArraySize, uint32_t* inputFmtCount);
+
+
+// NvEncGetEncodeCaps
+/**
+ * \brief Retrieves the capability value for a specified encoder attribute.
+ *
+ * The function returns the capability value for a given encoder attribute. The
+ * client must validate the encodeGUID using ::NvEncGetEncodeGUIDs() API before
+ * calling this function. The encoder attribute being queried are enumerated in
+ * ::NV_ENC_CAPS_PARAM enum.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * Encode GUID, corresponding to which the capability attribute is to be retrieved.
+ * \param [in] capsParam
+ * Used to specify attribute being queried. Refer ::NV_ENC_CAPS_PARAM for more
+ * details.
+ * \param [out] capsVal
+ * The value corresponding to the capability attribute being queried.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodeCaps (void* encoder, GUID encodeGUID, NV_ENC_CAPS_PARAM* capsParam, int* capsVal);
+
+
+// NvEncGetEncodePresetCount
+/**
+ * \brief Retrieves the number of supported preset GUIDs.
+ *
+ * The function returns the number of preset GUIDs available for a given codec.
+ * The client must validate the codec guid using ::NvEncGetEncodeGUIDs() API
+ * before calling this function.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * Encode GUID, corresponding to which the number of supported presets is to
+ * be retrieved.
+ * \param [out] encodePresetGUIDCount
+ * Receives the number of supported preset GUIDs.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodePresetCount (void* encoder, GUID encodeGUID, uint32_t* encodePresetGUIDCount);
+
+
+// NvEncGetEncodePresetGUIDs
+/**
+ * \brief Receives an array of supported encoder preset GUIDs.
+ *
+ * The function returns an array of encode preset guids available for a given codec.
+ * The client can directly use one of the preset guids based upon the use case
+ * or target device. The preset guid chosen can be directly used in
+ * NV_ENC_INITIALIZE_PARAMS::presetGUID parameter to ::NvEncEncodePicture() API.
+ * Alternately client can also use the preset guid to retrieve the encoding config
+ * parameters being used by NvEncodeAPI interface for that given preset, using
+ * ::NvEncGetEncodePresetConfig() API. It can then modify preset config parameters
+ * as per its use case and send it to NvEncodeAPI interface as part of
+ * NV_ENC_INITIALIZE_PARAMS::encodeConfig parameter for NvEncInitializeEncoder()
+ * API.
+ *
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * Encode GUID, corresponding to which the list of supported presets is to be
+ * retrieved.
+ * \param [in] guidArraySize
+ * Size of array of preset guids passed in \p preset GUIDs
+ * \param [out] presetGUIDs
+ * Array of supported Encode preset GUIDs from the NvEncodeAPI interface
+ * to client.
+ * \param [out] encodePresetGUIDCount
+ * Receives the number of preset GUIDs returned by the NvEncodeAPI
+ * interface.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodePresetGUIDs (void* encoder, GUID encodeGUID, GUID* presetGUIDs, uint32_t guidArraySize, uint32_t* encodePresetGUIDCount);
+
+
+// NvEncGetEncodePresetConfig
+/**
+ * \brief Returns a preset config structure supported for given preset GUID.
+ *
+ * The function returns a preset config structure for a given preset guid. Before
+ * using this function the client must enumerate the preset guids available for
+ * a given codec. The preset config structure can be modified by the client depending
+ * upon its use case and can be then used to initialize the encoder using
+ * ::NvEncInitializeEncoder() API. The client can use this function only if it
+ * wants to modify the NvEncodeAPI preset configuration, otherwise it can
+ * directly use the preset guid.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] encodeGUID
+ * Encode GUID, corresponding to which the list of supported presets is to be
+ * retrieved.
+ * \param [in] presetGUID
+ * Preset GUID, corresponding to which the Encoding configurations is to be
+ * retrieved.
+ * \param [out] presetConfig
+ * The requested Preset Encoder Attribute set. Refer ::_NV_ENC_CONFIG for
+* more details.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodePresetConfig (void* encoder, GUID encodeGUID, GUID presetGUID, NV_ENC_PRESET_CONFIG* presetConfig);
+
+// NvEncInitializeEncoder
+/**
+ * \brief Initialize the encoder.
+ *
+ * This API must be used to initialize the encoder. The initialization parameter
+ * is passed using \p *createEncodeParams The client must send the following
+ * fields of the _NV_ENC_INITIALIZE_PARAMS structure with a valid value.
+ * - NV_ENC_INITIALIZE_PARAMS::encodeGUID
+ * - NV_ENC_INITIALIZE_PARAMS::encodeWidth
+ * - NV_ENC_INITIALIZE_PARAMS::encodeHeight
+ *
+ * The client can pass a preset guid directly to the NvEncodeAPI interface using
+ * NV_ENC_INITIALIZE_PARAMS::presetGUID field. If the client doesn't pass
+ * NV_ENC_INITIALIZE_PARAMS::encodeConfig structure, the codec specific parameters
+ * will be selected based on the preset guid. The preset guid must have been
+ * validated by the client using ::NvEncGetEncodePresetGUIDs() API.
+ * If the client passes a custom ::_NV_ENC_CONFIG structure through
+ * NV_ENC_INITIALIZE_PARAMS::encodeConfig , it will override the codec specific parameters
+ * based on the preset guid. It is recommended that even if the client passes a custom config,
+ * it should also send a preset guid. In this case, the preset guid passed by the client
+ * will not override any of the custom config parameters programmed by the client,
+ * it is only used as a hint by the NvEncodeAPI interface to determine certain encoder parameters
+ * which are not exposed to the client.
+ *
+ * There are two modes of operation for the encoder namely:
+ * - Asynchronous mode
+ * - Synchronous mode
+ *
+ * The client can select asynchronous or synchronous mode by setting the \p
+ * enableEncodeAsync field in ::_NV_ENC_INITIALIZE_PARAMS to 1 or 0 respectively.
+ *\par Asynchronous mode of operation:
+ * The Asynchronous mode can be enabled by setting NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 1.
+ * The client operating in asynchronous mode must allocate completion event object
+ * for each output buffer and pass the completion event object in the
+ * ::NvEncEncodePicture() API. The client can create another thread and wait on
+ * the event object to be signalled by NvEncodeAPI interface on completion of the
+ * encoding process for the output frame. This should unblock the main thread from
+ * submitting work to the encoder. When the event is signalled the client can call
+ * NvEncodeAPI interfaces to copy the bitstream data using ::NvEncLockBitstream()
+ * API. This is the preferred mode of operation.
+ *
+ * NOTE: Asynchronous mode is not supported on Linux.
+ *
+ *\par Synchronous mode of operation:
+ * The client can select synchronous mode by setting NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 0.
+ * The client working in synchronous mode can work in a single threaded or multi
+ * threaded mode. The client need not allocate any event objects. The client can
+ * only lock the bitstream data after NvEncodeAPI interface has returned
+ * ::NV_ENC_SUCCESS from encode picture. The NvEncodeAPI interface can return
+ * ::NV_ENC_ERR_NEED_MORE_INPUT error code from ::NvEncEncodePicture() API. The
+ * client must not lock the output buffer in such case but should send the next
+ * frame for encoding. The client must keep on calling ::NvEncEncodePicture() API
+ * until it returns ::NV_ENC_SUCCESS. \n
+ * The client must always lock the bitstream data in order in which it has submitted.
+ * This is true for both asynchronous and synchronous mode.
+ *
+ *\par Picture type decision:
+ * If the client is taking the picture type decision and it must disable the picture
+ * type decision module in NvEncodeAPI by setting NV_ENC_INITIALIZE_PARAMS::enablePTD
+ * to 0. In this case the client is required to send the picture in encoding
+ * order to NvEncodeAPI by doing the re-ordering for B frames. \n
+ * If the client doesn't want to take the picture type decision it can enable
+ * picture type decision module in the NvEncodeAPI interface by setting
+ * NV_ENC_INITIALIZE_PARAMS::enablePTD to 1 and send the input pictures in display
+ * order.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] createEncodeParams
+ * Refer ::_NV_ENC_INITIALIZE_PARAMS for details.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncInitializeEncoder (void* encoder, NV_ENC_INITIALIZE_PARAMS* createEncodeParams);
+
+
+// NvEncCreateInputBuffer
+/**
+ * \brief Allocates Input buffer.
+ *
+ * This function is used to allocate an input buffer. The client must enumerate
+ * the input buffer format before allocating the input buffer resources. The
+ * NV_ENC_INPUT_PTR returned by the NvEncodeAPI interface in the
+ * NV_ENC_CREATE_INPUT_BUFFER::inputBuffer field can be directly used in
+ * ::NvEncEncodePicture() API. The number of input buffers to be allocated by the
+ * client must be at least 4 more than the number of B frames being used for encoding.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] createInputBufferParams
+ * Pointer to the ::NV_ENC_CREATE_INPUT_BUFFER structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncCreateInputBuffer (void* encoder, NV_ENC_CREATE_INPUT_BUFFER* createInputBufferParams);
+
+
+// NvEncDestroyInputBuffer
+/**
+ * \brief Release an input buffers.
+ *
+ * This function is used to free an input buffer. If the client has allocated
+ * any input buffer using ::NvEncCreateInputBuffer() API, it must free those
+ * input buffers by calling this function. The client must release the input
+ * buffers before destroying the encoder using ::NvEncDestroyEncoder() API.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] inputBuffer
+ * Pointer to the input buffer to be released.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncDestroyInputBuffer (void* encoder, NV_ENC_INPUT_PTR inputBuffer);
+
+
+// NvEncCreateBitstreamBuffer
+/**
+ * \brief Allocates an output bitstream buffer
+ *
+ * This function is used to allocate an output bitstream buffer and returns a
+ * NV_ENC_OUTPUT_PTR to bitstream buffer to the client in the
+ * NV_ENC_CREATE_BITSTREAM_BUFFER::bitstreamBuffer field.
+ * The client can only call this function after the encoder session has been
+ * initialized using ::NvEncInitializeEncoder() API. The minimum number of output
+ * buffers allocated by the client must be at least 4 more than the number of B
+ * B frames being used for encoding. The client can only access the output
+ * bitsteam data by locking the \p bitstreamBuffer using the ::NvEncLockBitstream()
+ * function.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] createBitstreamBufferParams
+ * Pointer ::NV_ENC_CREATE_BITSTREAM_BUFFER for details.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncCreateBitstreamBuffer (void* encoder, NV_ENC_CREATE_BITSTREAM_BUFFER* createBitstreamBufferParams);
+
+
+// NvEncDestroyBitstreamBuffer
+/**
+ * \brief Release a bitstream buffer.
+ *
+ * This function is used to release the output bitstream buffer allocated using
+ * the ::NvEncCreateBitstreamBuffer() function. The client must release the output
+ * bitstreamBuffer using this function before destroying the encoder session.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] bitstreamBuffer
+ * Pointer to the bitstream buffer being released.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncDestroyBitstreamBuffer (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer);
+
+// NvEncEncodePicture
+/**
+ * \brief Submit an input picture for encoding.
+ *
+ * This function is used to submit an input picture buffer for encoding. The
+ * encoding parameters are passed using \p *encodePicParams which is a pointer
+ * to the ::_NV_ENC_PIC_PARAMS structure.
+ *
+ * If the client has set NV_ENC_INITIALIZE_PARAMS::enablePTD to 0, then it must
+ * send a valid value for the following fields.
+ * - NV_ENC_PIC_PARAMS::pictureType
+ * - NV_ENC_PIC_PARAMS_H264::displayPOCSyntax (H264 only)
+ * - NV_ENC_PIC_PARAMS_H264::frameNumSyntax(H264 only)
+ * - NV_ENC_PIC_PARAMS_H264::refPicFlag(H264 only)
+ *
+ *
+ *\par Asynchronous Encoding
+ * If the client has enabled asynchronous mode of encoding by setting
+ * NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 1 in the ::NvEncInitializeEncoder()
+ * API ,then the client must send a valid NV_ENC_PIC_PARAMS::completionEvent.
+ * Incase of asynchronous mode of operation, client can queue the ::NvEncEncodePicture()
+ * API commands from the main thread and then queue output buffers to be processed
+ * to a secondary worker thread. Before the locking the output buffers in the
+ * secondary thread , the client must wait on NV_ENC_PIC_PARAMS::completionEvent
+ * it has queued in ::NvEncEncodePicture() API call. The client must always process
+ * completion event and the output buffer in the same order in which they have been
+ * submitted for encoding. The NvEncodeAPI interface is responsible for any
+ * re-ordering required for B frames and will always ensure that encoded bitstream
+ * data is written in the same order in which output buffer is submitted.
+ *\code
+ The below example shows how asynchronous encoding in case of 1 B frames
+ ------------------------------------------------------------------------
+ Suppose the client allocated 4 input buffers(I1,I2..), 4 output buffers(O1,O2..)
+ and 4 completion events(E1, E2, ...). The NvEncodeAPI interface will need to
+ keep a copy of the input buffers for re-ordering and it allocates following
+ internal buffers (NvI1, NvI2...). These internal buffers are managed by NvEncodeAPI
+ and the client is not responsible for the allocating or freeing the memory of
+ the internal buffers.
+
+ a) The client main thread will queue the following encode frame calls.
+ Note the picture type is unknown to the client, the decision is being taken by
+ NvEncodeAPI interface. The client should pass ::_NV_ENC_PIC_PARAMS parameter
+ consisting of allocated input buffer, output buffer and output events in successive
+ ::NvEncEncodePicture() API calls along with other required encode picture params.
+ For example:
+ 1st EncodePicture parameters - (I1, O1, E1)
+ 2nd EncodePicture parameters - (I2, O2, E2)
+ 3rd EncodePicture parameters - (I3, O3, E3)
+
+ b) NvEncodeAPI SW will receive the following encode Commands from the client.
+ The left side shows input from client in the form (Input buffer, Output Buffer,
+ Output Event). The right hand side shows a possible picture type decision take by
+ the NvEncodeAPI interface.
+ (I1, O1, E1) ---P1 Frame
+ (I2, O2, E2) ---B2 Frame
+ (I3, O3, E3) ---P3 Frame
+
+ c) NvEncodeAPI interface will make a copy of the input buffers to its internal
+ buffersfor re-ordering. These copies are done as part of nvEncEncodePicture
+ function call from the client and NvEncodeAPI interface is responsible for
+ synchronization of copy operation with the actual encoding operation.
+ I1 --> NvI1
+ I2 --> NvI2
+ I3 --> NvI3
+
+ d) After returning from ::NvEncEncodePicture() call , the client must queue the output
+ bitstream processing work to the secondary thread. The output bitstream processing
+ for asynchronous mode consist of first waiting on completion event(E1, E2..)
+ and then locking the output bitstream buffer(O1, O2..) for reading the encoded
+ data. The work queued to the secondary thread by the client is in the following order
+ (I1, O1, E1)
+ (I2, O2, E2)
+ (I3, O3, E3)
+ Note they are in the same order in which client calls ::NvEncEncodePicture() API
+ in \p step a).
+
+ e) NvEncodeAPI interface will do the re-ordering such that Encoder HW will receive
+ the following encode commands:
+ (NvI1, O1, E1) ---P1 Frame
+ (NvI3, O2, E2) ---P3 Frame
+ (NvI2, O3, E3) ---B2 frame
+
+ f) After the encoding operations are completed, the events will be signalled
+ by NvEncodeAPI interface in the following order :
+ (O1, E1) ---P1 Frame ,output bitstream copied to O1 and event E1 signalled.
+ (O2, E2) ---P3 Frame ,output bitstream copied to O2 and event E2 signalled.
+ (O3, E3) ---B2 Frame ,output bitstream copied to O3 and event E3 signalled.
+
+ g) The client must lock the bitstream data using ::NvEncLockBitstream() API in
+ the order O1,O2,O3 to read the encoded data, after waiting for the events
+ to be signalled in the same order i.e E1, E2 and E3.The output processing is
+ done in the secondary thread in the following order:
+ Waits on E1, copies encoded bitstream from O1
+ Waits on E2, copies encoded bitstream from O2
+ Waits on E3, copies encoded bitstream from O3
+
+ -Note the client will receive the events signalling and output buffer in the
+ same order in which they have submitted for encoding.
+ -Note the LockBitstream will have picture type field which will notify the
+ output picture type to the clients.
+ -Note the input, output buffer and the output completion event are free to be
+ reused once NvEncodeAPI interfaced has signalled the event and the client has
+ copied the data from the output buffer.
+
+ * \endcode
+ *
+ *\par Synchronous Encoding
+ * The client can enable synchronous mode of encoding by setting
+ * NV_ENC_INITIALIZE_PARAMS::enableEncodeAsync to 0 in ::NvEncInitializeEncoder() API.
+ * The NvEncodeAPI interface may return ::NV_ENC_ERR_NEED_MORE_INPUT error code for
+ * some ::NvEncEncodePicture() API calls when NV_ENC_INITIALIZE_PARAMS::enablePTD
+ * is set to 1, but the client must not treat it as a fatal error. The NvEncodeAPI
+ * interface might not be able to submit an input picture buffer for encoding
+ * immediately due to re-ordering for B frames. The NvEncodeAPI interface cannot
+ * submit the input picture which is decided to be encoded as B frame as it waits
+ * for backward reference from temporally subsequent frames. This input picture
+ * is buffered internally and waits for more input picture to arrive. The client
+ * must not call ::NvEncLockBitstream() API on the output buffers whose
+ * ::NvEncEncodePicture() API returns ::NV_ENC_ERR_NEED_MORE_INPUT. The client must
+ * wait for the NvEncodeAPI interface to return ::NV_ENC_SUCCESS before locking the
+ * output bitstreams to read the encoded bitstream data. The following example
+ * explains the scenario with synchronous encoding with 2 B frames.
+ *\code
+ The below example shows how synchronous encoding works in case of 1 B frames
+ -----------------------------------------------------------------------------
+ Suppose the client allocated 4 input buffers(I1,I2..), 4 output buffers(O1,O2..)
+ and 4 completion events(E1, E2, ...). The NvEncodeAPI interface will need to
+ keep a copy of the input buffers for re-ordering and it allocates following
+ internal buffers (NvI1, NvI2...). These internal buffers are managed by NvEncodeAPI
+ and the client is not responsible for the allocating or freeing the memory of
+ the internal buffers.
+
+ The client calls ::NvEncEncodePicture() API with input buffer I1 and output buffer O1.
+ The NvEncodeAPI decides to encode I1 as P frame and submits it to encoder
+ HW and returns ::NV_ENC_SUCCESS.
+ The client can now read the encoded data by locking the output O1 by calling
+ NvEncLockBitstream API.
+
+ The client calls ::NvEncEncodePicture() API with input buffer I2 and output buffer O2.
+ The NvEncodeAPI decides to encode I2 as B frame and buffers I2 by copying it
+ to internal buffer and returns ::NV_ENC_ERR_NEED_MORE_INPUT.
+ The error is not fatal and it notifies client that it cannot read the encoded
+ data by locking the output O2 by calling ::NvEncLockBitstream() API without submitting
+ more work to the NvEncodeAPI interface.
+
+ The client calls ::NvEncEncodePicture() with input buffer I3 and output buffer O3.
+ The NvEncodeAPI decides to encode I3 as P frame and it first submits I3 for
+ encoding which will be used as backward reference frame for I2.
+ The NvEncodeAPI then submits I2 for encoding and returns ::NV_ENC_SUCESS. Both
+ the submission are part of the same ::NvEncEncodePicture() function call.
+ The client can now read the encoded data for both the frames by locking the output
+ O2 followed by O3 ,by calling ::NvEncLockBitstream() API.
+
+ The client must always lock the output in the same order in which it has submitted
+ to receive the encoded bitstream in correct encoding order.
+
+ * \endcode
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] encodePicParams
+ * Pointer to the ::_NV_ENC_PIC_PARAMS structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_ENCODER_BUSY \n
+ * ::NV_ENC_ERR_NEED_MORE_INPUT \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncEncodePicture (void* encoder, NV_ENC_PIC_PARAMS* encodePicParams);
+
+
+// NvEncLockBitstream
+/**
+ * \brief Lock output bitstream buffer
+ *
+ * This function is used to lock the bitstream buffer to read the encoded data.
+ * The client can only access the encoded data by calling this function.
+ * The pointer to client accessible encoded data is returned in the
+ * NV_ENC_LOCK_BITSTREAM::bitstreamBufferPtr field. The size of the encoded data
+ * in the output buffer is returned in the NV_ENC_LOCK_BITSTREAM::bitstreamSizeInBytes
+ * The NvEncodeAPI interface also returns the output picture type and picture structure
+ * of the encoded frame in NV_ENC_LOCK_BITSTREAM::pictureType and
+ * NV_ENC_LOCK_BITSTREAM::pictureStruct fields respectively. If the client has
+ * set NV_ENC_LOCK_BITSTREAM::doNotWait to 1, the function might return
+ * ::NV_ENC_ERR_LOCK_BUSY if client is operating in synchronous mode. This is not
+ * a fatal failure if NV_ENC_LOCK_BITSTREAM::doNotWait is set to 1. In the above case the client can
+ * retry the function after few milliseconds.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] lockBitstreamBufferParams
+ * Pointer to the ::_NV_ENC_LOCK_BITSTREAM structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_LOCK_BUSY \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncLockBitstream (void* encoder, NV_ENC_LOCK_BITSTREAM* lockBitstreamBufferParams);
+
+
+// NvEncUnlockBitstream
+/**
+ * \brief Unlock the output bitstream buffer
+ *
+ * This function is used to unlock the output bitstream buffer after the client
+ * has read the encoded data from output buffer. The client must call this function
+ * to unlock the output buffer which it has previously locked using ::NvEncLockBitstream()
+ * function. Using a locked bitstream buffer in ::NvEncEncodePicture() API will cause
+ * the function to fail.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] bitstreamBuffer
+ * bitstream buffer pointer being unlocked
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncUnlockBitstream (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer);
+
+
+// NvLockInputBuffer
+/**
+ * \brief Locks an input buffer
+ *
+ * This function is used to lock the input buffer to load the uncompressed YUV
+ * pixel data into input buffer memory. The client must pass the NV_ENC_INPUT_PTR
+ * it had previously allocated using ::NvEncCreateInputBuffer()in the
+ * NV_ENC_LOCK_INPUT_BUFFER::inputBuffer field.
+ * The NvEncodeAPI interface returns pointer to client accessible input buffer
+ * memory in NV_ENC_LOCK_INPUT_BUFFER::bufferDataPtr field.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] lockInputBufferParams
+ * Pointer to the ::_NV_ENC_LOCK_INPUT_BUFFER structure
+ *
+ * \return
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_LOCK_BUSY \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncLockInputBuffer (void* encoder, NV_ENC_LOCK_INPUT_BUFFER* lockInputBufferParams);
+
+
+// NvUnlockInputBuffer
+/**
+ * \brief Unlocks the input buffer
+ *
+ * This function is used to unlock the input buffer memory previously locked for
+ * uploading YUV pixel data. The input buffer must be unlocked before being used
+ * again for encoding, otherwise NvEncodeAPI will fail the ::NvEncEncodePicture()
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] inputBuffer
+ * Pointer to the input buffer that is being unlocked.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncUnlockInputBuffer (void* encoder, NV_ENC_INPUT_PTR inputBuffer);
+
+
+// NvEncGetEncodeStats
+/**
+ * \brief Get encoding statistics.
+ *
+ * This function is used to retrieve the encoding statistics.
+ * This API is not supported when encode device type is CUDA.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] encodeStats
+ * Pointer to the ::_NV_ENC_STAT structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetEncodeStats (void* encoder, NV_ENC_STAT* encodeStats);
+
+
+// NvEncGetSequenceParams
+/**
+ * \brief Get encoded sequence and picture header.
+ *
+ * This function can be used to retrieve the sequence and picture header out of
+ * band. The client must call this function only after the encoder has been
+ * initialized using ::NvEncInitializeEncoder() function. The client must
+ * allocate the memory where the NvEncodeAPI interface can copy the bitstream
+ * header and pass the pointer to the memory in NV_ENC_SEQUENCE_PARAM_PAYLOAD::spsppsBuffer.
+ * The size of buffer is passed in the field NV_ENC_SEQUENCE_PARAM_PAYLOAD::inBufferSize.
+ * The NvEncodeAPI interface will copy the bitstream header payload and returns
+ * the actual size of the bitstream header in the field
+ * NV_ENC_SEQUENCE_PARAM_PAYLOAD::outSPSPPSPayloadSize.
+ * The client must call ::NvEncGetSequenceParams() function from the same thread which is
+ * being used to call ::NvEncEncodePicture() function.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] sequenceParamPayload
+ * Pointer to the ::_NV_ENC_SEQUENCE_PARAM_PAYLOAD structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncGetSequenceParams (void* encoder, NV_ENC_SEQUENCE_PARAM_PAYLOAD* sequenceParamPayload);
+
+
+// NvEncRegisterAsyncEvent
+/**
+ * \brief Register event for notification to encoding completion.
+ *
+ * This function is used to register the completion event with NvEncodeAPI
+ * interface. The event is required when the client has configured the encoder to
+ * work in asynchronous mode. In this mode the client needs to send a completion
+ * event with every output buffer. The NvEncodeAPI interface will signal the
+ * completion of the encoding process using this event. Only after the event is
+ * signalled the client can get the encoded data using ::NvEncLockBitstream() function.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] eventParams
+ * Pointer to the ::_NV_ENC_EVENT_PARAMS structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncRegisterAsyncEvent (void* encoder, NV_ENC_EVENT_PARAMS* eventParams);
+
+
+// NvEncUnregisterAsyncEvent
+/**
+ * \brief Unregister completion event.
+ *
+ * This function is used to unregister completion event which has been previously
+ * registered using ::NvEncRegisterAsyncEvent() function. The client must unregister
+ * all events before destroying the encoder using ::NvEncDestroyEncoder() function.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] eventParams
+ * Pointer to the ::_NV_ENC_EVENT_PARAMS structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncUnregisterAsyncEvent (void* encoder, NV_ENC_EVENT_PARAMS* eventParams);
+
+
+// NvEncMapInputResource
+/**
+ * \brief Map an externally created input resource pointer for encoding.
+ *
+ * Maps an externally allocated input resource [using and returns a NV_ENC_INPUT_PTR
+ * which can be used for encoding in the ::NvEncEncodePicture() function. The
+ * mapped resource is returned in the field NV_ENC_MAP_INPUT_RESOURCE::outputResourcePtr.
+ * The NvEncodeAPI interface also returns the buffer format of the mapped resource
+ * in the field NV_ENC_MAP_INPUT_RESOURCE::outbufferFmt.
+ * This function provides synchronization guarantee that any graphics work submitted
+ * on the input buffer is completed before the buffer is used for encoding. This is
+ * also true for compute (i.e. CUDA) work, provided that the previous workload using
+ * the input resource was submitted to the default stream.
+ * The client should not access any input buffer while they are mapped by the encoder.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] mapInputResParams
+ * Pointer to the ::_NV_ENC_MAP_INPUT_RESOURCE structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_RESOURCE_NOT_REGISTERED \n
+ * ::NV_ENC_ERR_MAP_FAILED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncMapInputResource (void* encoder, NV_ENC_MAP_INPUT_RESOURCE* mapInputResParams);
+
+
+// NvEncUnmapInputResource
+/**
+ * \brief UnMaps a NV_ENC_INPUT_PTR which was mapped for encoding
+ *
+ *
+ * UnMaps an input buffer which was previously mapped using ::NvEncMapInputResource()
+ * API. The mapping created using ::NvEncMapInputResource() should be invalidated
+ * using this API before the external resource is destroyed by the client. The client
+ * must unmap the buffer after ::NvEncLockBitstream() API returns succuessfully for encode
+ * work submitted using the mapped input buffer.
+ *
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] mappedInputBuffer
+ * Pointer to the NV_ENC_INPUT_PTR
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_RESOURCE_NOT_REGISTERED \n
+ * ::NV_ENC_ERR_RESOURCE_NOT_MAPPED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncUnmapInputResource (void* encoder, NV_ENC_INPUT_PTR mappedInputBuffer);
+
+// NvEncDestroyEncoder
+/**
+ * \brief Destroy Encoding Session
+ *
+ * Destroys the encoder session previously created using ::NvEncOpenEncodeSession()
+ * function. The client must flush the encoder before freeing any resources. In order
+ * to flush the encoder the client must pass a NULL encode picture packet and either
+ * wait for the ::NvEncEncodePicture() function to return in synchronous mode or wait
+ * for the flush event to be signaled by the encoder in asynchronous mode.
+ * The client must free all the input and output resources created using the
+ * NvEncodeAPI interface before destroying the encoder. If the client is operating
+ * in asynchronous mode, it must also unregister the completion events previously
+ * registered.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncDestroyEncoder (void* encoder);
+
+// NvEncInvalidateRefFrames
+/**
+ * \brief Invalidate reference frames
+ *
+ * Invalidates reference frame based on the time stamp provided by the client.
+ * The encoder marks any reference frames or any frames which have been reconstructed
+ * using the corrupt frame as invalid for motion estimation and uses older reference
+ * frames for motion estimation. The encoded forces the current frame to be encoded
+ * as an intra frame if no reference frames are left after invalidation process.
+ * This is useful for low latency application for error resiliency. The client
+ * is recommended to set NV_ENC_CONFIG_H264::maxNumRefFrames to a large value so
+ * that encoder can keep a backup of older reference frames in the DPB and can use them
+ * for motion estimation when the newer reference frames have been invalidated.
+ * This API can be called multiple times.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] invalidRefFrameTimeStamp
+ * Timestamp of the invalid reference frames which needs to be invalidated.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncInvalidateRefFrames(void* encoder, uint64_t invalidRefFrameTimeStamp);
+
+// NvEncOpenEncodeSessionEx
+/**
+ * \brief Opens an encoding session.
+ *
+ * Opens an encoding session and returns a pointer to the encoder interface in
+ * the \p **encoder parameter. The client should start encoding process by calling
+ * this API first.
+ * The client must pass a pointer to IDirect3DDevice9 device or CUDA context in the \p *device parameter.
+ * For the OpenGL interface, \p device must be NULL. An OpenGL context must be current when
+ * calling all NvEncodeAPI functions.
+ * If the creation of encoder session fails, the client must call ::NvEncDestroyEncoder API
+ * before exiting.
+ *
+ * \param [in] openSessionExParams
+ * Pointer to a ::NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS structure.
+ * \param [out] encoder
+ * Encode Session pointer to the NvEncodeAPI interface.
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_NO_ENCODE_DEVICE \n
+ * ::NV_ENC_ERR_UNSUPPORTED_DEVICE \n
+ * ::NV_ENC_ERR_INVALID_DEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncOpenEncodeSessionEx (NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS *openSessionExParams, void** encoder);
+
+// NvEncRegisterResource
+/**
+ * \brief Registers a resource with the Nvidia Video Encoder Interface.
+ *
+ * Registers a resource with the Nvidia Video Encoder Interface for book keeping.
+ * The client is expected to pass the registered resource handle as well, while calling ::NvEncMapInputResource API.
+ *
+ * \param [in] encoder
+ * Pointer to the NVEncodeAPI interface.
+ *
+ * \param [in] registerResParams
+ * Pointer to a ::_NV_ENC_REGISTER_RESOURCE structure
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_RESOURCE_REGISTER_FAILED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ * ::NV_ENC_ERR_UNIMPLEMENTED \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncRegisterResource (void* encoder, NV_ENC_REGISTER_RESOURCE* registerResParams);
+
+// NvEncUnregisterResource
+/**
+ * \brief Unregisters a resource previously registered with the Nvidia Video Encoder Interface.
+ *
+ * Unregisters a resource previously registered with the Nvidia Video Encoder Interface.
+ * The client is expected to unregister any resource that it has registered with the
+ * Nvidia Video Encoder Interface before destroying the resource.
+ *
+ * \param [in] encoder
+ * Pointer to the NVEncodeAPI interface.
+ *
+ * \param [in] registeredResource
+ * The registered resource pointer that was returned in ::NvEncRegisterResource.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_RESOURCE_NOT_REGISTERED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ * ::NV_ENC_ERR_UNIMPLEMENTED \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncUnregisterResource (void* encoder, NV_ENC_REGISTERED_PTR registeredResource);
+
+// NvEncReconfigureEncoder
+/**
+ * \brief Reconfigure an existing encoding session.
+ *
+ * Reconfigure an existing encoding session.
+ * The client should call this API to change/reconfigure the parameter passed during
+ * NvEncInitializeEncoder API call.
+ * Currently Reconfiguration of following are not supported.
+ * Change in GOP structure.
+ * Change in sync-Async mode.
+ * Change in MaxWidth & MaxHeight.
+ * Change in PTDmode.
+ *
+ * Resolution change is possible only if maxEncodeWidth & maxEncodeHeight of NV_ENC_INITIALIZE_PARAMS
+ * is set while creating encoder session.
+ *
+ * \param [in] encoder
+ * Pointer to the NVEncodeAPI interface.
+ *
+ * \param [in] reInitEncodeParams
+ * Pointer to a ::NV_ENC_RECONFIGURE_PARAMS structure.
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_NO_ENCODE_DEVICE \n
+ * ::NV_ENC_ERR_UNSUPPORTED_DEVICE \n
+ * ::NV_ENC_ERR_INVALID_DEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_GENERIC \n
+ *
+ */
+NVENCSTATUS NVENCAPI NvEncReconfigureEncoder (void *encoder, NV_ENC_RECONFIGURE_PARAMS* reInitEncodeParams);
+
+
+
+// NvEncCreateMVBuffer
+/**
+ * \brief Allocates output MV buffer for ME only mode.
+ *
+ * This function is used to allocate an output MV buffer. The size of the mvBuffer is
+ * dependent on the frame height and width of the last ::NvEncCreateInputBuffer() call.
+ * The NV_ENC_OUTPUT_PTR returned by the NvEncodeAPI interface in the
+ * ::NV_ENC_CREATE_MV_BUFFER::mvBuffer field should be used in
+ * ::NvEncRunMotionEstimationOnly() API.
+ * Client must lock ::NV_ENC_CREATE_MV_BUFFER::mvBuffer using ::NvEncLockBitstream() API to get the motion vector data.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in,out] createMVBufferParams
+ * Pointer to the ::NV_ENC_CREATE_MV_BUFFER structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_GENERIC \n
+ */
+NVENCSTATUS NVENCAPI NvEncCreateMVBuffer (void* encoder, NV_ENC_CREATE_MV_BUFFER* createMVBufferParams);
+
+
+// NvEncDestroyMVBuffer
+/**
+ * \brief Release an output MV buffer for ME only mode.
+ *
+ * This function is used to release the output MV buffer allocated using
+ * the ::NvEncCreateMVBuffer() function. The client must release the output
+ * mvBuffer using this function before destroying the encoder session.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] mvBuffer
+ * Pointer to the mvBuffer being released.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ */
+NVENCSTATUS NVENCAPI NvEncDestroyMVBuffer (void* encoder, NV_ENC_OUTPUT_PTR mvBuffer);
+
+
+// NvEncRunMotionEstimationOnly
+/**
+ * \brief Submit an input picture and reference frame for motion estimation in ME only mode.
+ *
+ * This function is used to submit the input frame and reference frame for motion
+ * estimation. The ME parameters are passed using *meOnlyParams which is a pointer
+ * to ::_NV_ENC_MEONLY_PARAMS structure.
+ * Client must lock ::NV_ENC_CREATE_MV_BUFFER::mvBuffer using ::NvEncLockBitstream() API to get the motion vector data.
+ * to get motion vector data.
+ *
+ * \param [in] encoder
+ * Pointer to the NvEncodeAPI interface.
+ * \param [in] meOnlyParams
+ * Pointer to the ::_NV_ENC_MEONLY_PARAMS structure.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ * ::NV_ENC_ERR_INVALID_ENCODERDEVICE \n
+ * ::NV_ENC_ERR_DEVICE_NOT_EXIST \n
+ * ::NV_ENC_ERR_UNSUPPORTED_PARAM \n
+ * ::NV_ENC_ERR_OUT_OF_MEMORY \n
+ * ::NV_ENC_ERR_INVALID_PARAM \n
+ * ::NV_ENC_ERR_INVALID_VERSION \n
+ * ::NV_ENC_ERR_NEED_MORE_INPUT \n
+ * ::NV_ENC_ERR_ENCODER_NOT_INITIALIZED \n
+ * ::NV_ENC_ERR_GENERIC \n
+ */
+NVENCSTATUS NVENCAPI NvEncRunMotionEstimationOnly (void* encoder, NV_ENC_MEONLY_PARAMS* meOnlyParams);
+
+// NvEncodeAPIGetMaxSupportedVersion
+/**
+ * \brief Get the largest NvEncodeAPI version supported by the driver.
+ *
+ * This function can be used by clients to determine if the driver supports
+ * the NvEncodeAPI header the application was compiled with.
+ *
+ * \param [out] version
+ * Pointer to the requested value. The 4 least significant bits in the returned
+ * indicate the minor version and the rest of the bits indicate the major
+ * version of the largest supported version.
+ *
+ * \return
+ * ::NV_ENC_SUCCESS \n
+ * ::NV_ENC_ERR_INVALID_PTR \n
+ */
+NVENCSTATUS NVENCAPI NvEncodeAPIGetMaxSupportedVersion (uint32_t* version);
+
+
+/// \cond API PFN
+/*
+ * Defines API function pointers
+ */
+typedef NVENCSTATUS (NVENCAPI* PNVENCOPENENCODESESSION) (void* device, uint32_t deviceType, void** encoder);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEGUIDCOUNT) (void* encoder, uint32_t* encodeGUIDCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEGUIDS) (void* encoder, GUID* GUIDs, uint32_t guidArraySize, uint32_t* GUIDCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPROFILEGUIDCOUNT) (void* encoder, GUID encodeGUID, uint32_t* encodeProfileGUIDCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPROFILEGUIDS) (void* encoder, GUID encodeGUID, GUID* profileGUIDs, uint32_t guidArraySize, uint32_t* GUIDCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETINPUTFORMATCOUNT) (void* encoder, GUID encodeGUID, uint32_t* inputFmtCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETINPUTFORMATS) (void* encoder, GUID encodeGUID, NV_ENC_BUFFER_FORMAT* inputFmts, uint32_t inputFmtArraySize, uint32_t* inputFmtCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODECAPS) (void* encoder, GUID encodeGUID, NV_ENC_CAPS_PARAM* capsParam, int* capsVal);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPRESETCOUNT) (void* encoder, GUID encodeGUID, uint32_t* encodePresetGUIDCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPRESETGUIDS) (void* encoder, GUID encodeGUID, GUID* presetGUIDs, uint32_t guidArraySize, uint32_t* encodePresetGUIDCount);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODEPRESETCONFIG) (void* encoder, GUID encodeGUID, GUID presetGUID, NV_ENC_PRESET_CONFIG* presetConfig);
+typedef NVENCSTATUS (NVENCAPI* PNVENCINITIALIZEENCODER) (void* encoder, NV_ENC_INITIALIZE_PARAMS* createEncodeParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCCREATEINPUTBUFFER) (void* encoder, NV_ENC_CREATE_INPUT_BUFFER* createInputBufferParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYINPUTBUFFER) (void* encoder, NV_ENC_INPUT_PTR inputBuffer);
+typedef NVENCSTATUS (NVENCAPI* PNVENCCREATEBITSTREAMBUFFER) (void* encoder, NV_ENC_CREATE_BITSTREAM_BUFFER* createBitstreamBufferParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYBITSTREAMBUFFER) (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer);
+typedef NVENCSTATUS (NVENCAPI* PNVENCENCODEPICTURE) (void* encoder, NV_ENC_PIC_PARAMS* encodePicParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCLOCKBITSTREAM) (void* encoder, NV_ENC_LOCK_BITSTREAM* lockBitstreamBufferParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCUNLOCKBITSTREAM) (void* encoder, NV_ENC_OUTPUT_PTR bitstreamBuffer);
+typedef NVENCSTATUS (NVENCAPI* PNVENCLOCKINPUTBUFFER) (void* encoder, NV_ENC_LOCK_INPUT_BUFFER* lockInputBufferParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCUNLOCKINPUTBUFFER) (void* encoder, NV_ENC_INPUT_PTR inputBuffer);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETENCODESTATS) (void* encoder, NV_ENC_STAT* encodeStats);
+typedef NVENCSTATUS (NVENCAPI* PNVENCGETSEQUENCEPARAMS) (void* encoder, NV_ENC_SEQUENCE_PARAM_PAYLOAD* sequenceParamPayload);
+typedef NVENCSTATUS (NVENCAPI* PNVENCREGISTERASYNCEVENT) (void* encoder, NV_ENC_EVENT_PARAMS* eventParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCUNREGISTERASYNCEVENT) (void* encoder, NV_ENC_EVENT_PARAMS* eventParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCMAPINPUTRESOURCE) (void* encoder, NV_ENC_MAP_INPUT_RESOURCE* mapInputResParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCUNMAPINPUTRESOURCE) (void* encoder, NV_ENC_INPUT_PTR mappedInputBuffer);
+typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYENCODER) (void* encoder);
+typedef NVENCSTATUS (NVENCAPI* PNVENCINVALIDATEREFFRAMES) (void* encoder, uint64_t invalidRefFrameTimeStamp);
+typedef NVENCSTATUS (NVENCAPI* PNVENCOPENENCODESESSIONEX) (NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS *openSessionExParams, void** encoder);
+typedef NVENCSTATUS (NVENCAPI* PNVENCREGISTERRESOURCE) (void* encoder, NV_ENC_REGISTER_RESOURCE* registerResParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCUNREGISTERRESOURCE) (void* encoder, NV_ENC_REGISTERED_PTR registeredRes);
+typedef NVENCSTATUS (NVENCAPI* PNVENCRECONFIGUREENCODER) (void* encoder, NV_ENC_RECONFIGURE_PARAMS* reInitEncodeParams);
+
+typedef NVENCSTATUS (NVENCAPI* PNVENCCREATEMVBUFFER) (void* encoder, NV_ENC_CREATE_MV_BUFFER* createMVBufferParams);
+typedef NVENCSTATUS (NVENCAPI* PNVENCDESTROYMVBUFFER) (void* encoder, NV_ENC_OUTPUT_PTR mvBuffer);
+typedef NVENCSTATUS (NVENCAPI* PNVENCRUNMOTIONESTIMATIONONLY) (void* encoder, NV_ENC_MEONLY_PARAMS* meOnlyParams);
+
+
+/// \endcond
+
+
+/** @} */ /* END ENCODE_FUNC */
+
+/**
+ * \ingroup ENCODER_STRUCTURE
+ * NV_ENCODE_API_FUNCTION_LIST
+ */
+typedef struct _NV_ENCODE_API_FUNCTION_LIST
+{
+ uint32_t version; /**< [in]: Client should pass NV_ENCODE_API_FUNCTION_LIST_VER. */
+ uint32_t reserved; /**< [in]: Reserved and should be set to 0. */
+ PNVENCOPENENCODESESSION nvEncOpenEncodeSession; /**< [out]: Client should access ::NvEncOpenEncodeSession() API through this pointer. */
+ PNVENCGETENCODEGUIDCOUNT nvEncGetEncodeGUIDCount; /**< [out]: Client should access ::NvEncGetEncodeGUIDCount() API through this pointer. */
+ PNVENCGETENCODEPRESETCOUNT nvEncGetEncodeProfileGUIDCount; /**< [out]: Client should access ::NvEncGetEncodeProfileGUIDCount() API through this pointer.*/
+ PNVENCGETENCODEPRESETGUIDS nvEncGetEncodeProfileGUIDs; /**< [out]: Client should access ::NvEncGetEncodeProfileGUIDs() API through this pointer. */
+ PNVENCGETENCODEGUIDS nvEncGetEncodeGUIDs; /**< [out]: Client should access ::NvEncGetEncodeGUIDs() API through this pointer. */
+ PNVENCGETINPUTFORMATCOUNT nvEncGetInputFormatCount; /**< [out]: Client should access ::NvEncGetInputFormatCount() API through this pointer. */
+ PNVENCGETINPUTFORMATS nvEncGetInputFormats; /**< [out]: Client should access ::NvEncGetInputFormats() API through this pointer. */
+ PNVENCGETENCODECAPS nvEncGetEncodeCaps; /**< [out]: Client should access ::NvEncGetEncodeCaps() API through this pointer. */
+ PNVENCGETENCODEPRESETCOUNT nvEncGetEncodePresetCount; /**< [out]: Client should access ::NvEncGetEncodePresetCount() API through this pointer. */
+ PNVENCGETENCODEPRESETGUIDS nvEncGetEncodePresetGUIDs; /**< [out]: Client should access ::NvEncGetEncodePresetGUIDs() API through this pointer. */
+ PNVENCGETENCODEPRESETCONFIG nvEncGetEncodePresetConfig; /**< [out]: Client should access ::NvEncGetEncodePresetConfig() API through this pointer. */
+ PNVENCINITIALIZEENCODER nvEncInitializeEncoder; /**< [out]: Client should access ::NvEncInitializeEncoder() API through this pointer. */
+ PNVENCCREATEINPUTBUFFER nvEncCreateInputBuffer; /**< [out]: Client should access ::NvEncCreateInputBuffer() API through this pointer. */
+ PNVENCDESTROYINPUTBUFFER nvEncDestroyInputBuffer; /**< [out]: Client should access ::NvEncDestroyInputBuffer() API through this pointer. */
+ PNVENCCREATEBITSTREAMBUFFER nvEncCreateBitstreamBuffer; /**< [out]: Client should access ::NvEncCreateBitstreamBuffer() API through this pointer. */
+ PNVENCDESTROYBITSTREAMBUFFER nvEncDestroyBitstreamBuffer; /**< [out]: Client should access ::NvEncDestroyBitstreamBuffer() API through this pointer. */
+ PNVENCENCODEPICTURE nvEncEncodePicture; /**< [out]: Client should access ::NvEncEncodePicture() API through this pointer. */
+ PNVENCLOCKBITSTREAM nvEncLockBitstream; /**< [out]: Client should access ::NvEncLockBitstream() API through this pointer. */
+ PNVENCUNLOCKBITSTREAM nvEncUnlockBitstream; /**< [out]: Client should access ::NvEncUnlockBitstream() API through this pointer. */
+ PNVENCLOCKINPUTBUFFER nvEncLockInputBuffer; /**< [out]: Client should access ::NvEncLockInputBuffer() API through this pointer. */
+ PNVENCUNLOCKINPUTBUFFER nvEncUnlockInputBuffer; /**< [out]: Client should access ::NvEncUnlockInputBuffer() API through this pointer. */
+ PNVENCGETENCODESTATS nvEncGetEncodeStats; /**< [out]: Client should access ::NvEncGetEncodeStats() API through this pointer. */
+ PNVENCGETSEQUENCEPARAMS nvEncGetSequenceParams; /**< [out]: Client should access ::NvEncGetSequenceParams() API through this pointer. */
+ PNVENCREGISTERASYNCEVENT nvEncRegisterAsyncEvent; /**< [out]: Client should access ::NvEncRegisterAsyncEvent() API through this pointer. */
+ PNVENCUNREGISTERASYNCEVENT nvEncUnregisterAsyncEvent; /**< [out]: Client should access ::NvEncUnregisterAsyncEvent() API through this pointer. */
+ PNVENCMAPINPUTRESOURCE nvEncMapInputResource; /**< [out]: Client should access ::NvEncMapInputResource() API through this pointer. */
+ PNVENCUNMAPINPUTRESOURCE nvEncUnmapInputResource; /**< [out]: Client should access ::NvEncUnmapInputResource() API through this pointer. */
+ PNVENCDESTROYENCODER nvEncDestroyEncoder; /**< [out]: Client should access ::NvEncDestroyEncoder() API through this pointer. */
+ PNVENCINVALIDATEREFFRAMES nvEncInvalidateRefFrames; /**< [out]: Client should access ::NvEncInvalidateRefFrames() API through this pointer. */
+ PNVENCOPENENCODESESSIONEX nvEncOpenEncodeSessionEx; /**< [out]: Client should access ::NvEncOpenEncodeSession() API through this pointer. */
+ PNVENCREGISTERRESOURCE nvEncRegisterResource; /**< [out]: Client should access ::NvEncRegisterResource() API through this pointer. */
+ PNVENCUNREGISTERRESOURCE nvEncUnregisterResource; /**< [out]: Client should access ::NvEncUnregisterResource() API through this pointer. */
+ PNVENCRECONFIGUREENCODER nvEncReconfigureEncoder; /**< [out]: Client should access ::NvEncReconfigureEncoder() API through this pointer. */
+ void* reserved1;
+ PNVENCCREATEMVBUFFER nvEncCreateMVBuffer; /**< [out]: Client should access ::NvEncCreateMVBuffer API through this pointer. */
+ PNVENCDESTROYMVBUFFER nvEncDestroyMVBuffer; /**< [out]: Client should access ::NvEncDestroyMVBuffer API through this pointer. */
+ PNVENCRUNMOTIONESTIMATIONONLY nvEncRunMotionEstimationOnly; /**< [out]: Client should access ::NvEncRunMotionEstimationOnly API through this pointer. */
+ void* reserved2[281]; /**< [in]: Reserved and must be set to NULL */
+} NV_ENCODE_API_FUNCTION_LIST;
+
+/** Macro for constructing the version field of ::_NV_ENCODEAPI_FUNCTION_LIST. */
+#define NV_ENCODE_API_FUNCTION_LIST_VER NVENCAPI_STRUCT_VERSION(2)
+
+// NvEncodeAPICreateInstance
+/**
+ * \ingroup ENCODE_FUNC
+ * Entry Point to the NvEncodeAPI interface.
+ *
+ * Creates an instance of the NvEncodeAPI interface, and populates the
+ * pFunctionList with function pointers to the API routines implemented by the
+ * NvEncodeAPI interface.
+ *
+ * \param [out] functionList
+ *
+ * \return
+ * ::NV_ENC_SUCCESS
+ * ::NV_ENC_ERR_INVALID_PTR
+ */
+NVENCSTATUS NVENCAPI NvEncodeAPICreateInstance(NV_ENCODE_API_FUNCTION_LIST *functionList);
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif
+
diff --git a/Plugin~/WebRTCPlugin/pch.cpp b/Plugin~/WebRTCPlugin/pch.cpp
new file mode 100644
index 0000000000..bcb5590be1
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/pch.cpp
@@ -0,0 +1 @@
+#include "pch.h"
diff --git a/Plugin~/WebRTCPlugin/pch.h b/Plugin~/WebRTCPlugin/pch.h
new file mode 100644
index 0000000000..0be103dff0
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/pch.h
@@ -0,0 +1,98 @@
+#pragma once
+#pragma region webRTC related
+#include "api/mediastreaminterface.h"
+#include "api/peerconnectioninterface.h"
+#include "api/create_peerconnection_factory.h"
+#include "api/audio_codecs/audio_decoder_factory_template.h"
+#include "api/audio_codecs/audio_encoder_factory_template.h"
+#include "api/audio_codecs/opus/audio_decoder_opus.h"
+#include "api/audio_codecs/opus/audio_encoder_opus.h"
+#include "api/test/fakeconstraints.h"
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "api/video/i420_buffer.h"
+
+#include "rtc_base/thread.h"
+#include "rtc_base/refcountedobject.h"
+#include "rtc_base/strings/json.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/flags.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/ssladapter.h"
+#include "rtc_base/win32socketinit.h"
+#include "rtc_base/win32socketserver.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/nethelpers.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/physicalsocketserver.h"
+#include "rtc_base/signalthread.h"
+#include "rtc_base/third_party/sigslot/sigslot.h"
+#include "rtc_base/atomicops.h"
+#include "rtc_base/win32.h"
+#include "rtc_base/win32socketserver.h"
+#include "rtc_base/asynctcpsocket.h"
+
+#include "media/base/videocapturer.h"
+#include "media/engine/webrtcvideocapturerfactory.h"
+#include "media/engine/internaldecoderfactory.h"
+#include "media/base/h264_profile_level_id.h"
+#include "media/engine/webrtcvideoencoderfactory.h"
+#include "media/base/adaptedvideotracksource.h"
+#include "media/base/mediachannel.h"
+#include "media/base/videocommon.h"
+
+#include "modules/video_capture/video_capture_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_device/audio_device_buffer.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/video_coding/codecs/h264/include/h264.h"
+
+#include "common_video/h264/h264_bitstream_parser.h"
+#include "common_video/h264/h264_common.h"
+
+#include "media/base/videobroadcaster.h"
+#pragma endregion
+#include "d3d11.h"
+
+namespace WebRTC
+{
+ void LogPrint(const char* fmt, ...);
+ void LogPrint(const wchar_t* fmt, ...);
+ void checkf(bool result, const char* msg);
+#define DebugLog(...) LogPrint("webrtc Log: " __VA_ARGS__)
+#define DebugWarning(...) LogPrint("webrtc Warning: " __VA_ARGS__)
+#define DebugError(...) LogPrint("webrtc Error: " __VA_ARGS__)
+#define DebugLogW(...) LogPrint(L"webrtc Log: " __VA_ARGS__)
+#define DebugWarningW(...) LogPrint(L"webrtc Warning: " __VA_ARGS__)
+#define DebugErrorW(...) LogPrint(L"webrtc Error: " __VA_ARGS__)
+#define NV_RESULT(NvFunction) NvFunction == NV_ENC_SUCCESS
+
+ template
+ std::string StringFormat(const std::string& format, Args ... args)
+ {
+ size_t size = snprintf(nullptr, 0, format.c_str(), args ...) + 1;
+ std::unique_ptr buf(new char[size]);
+ snprintf(buf.get(), size, format.c_str(), args ...);
+ return std::string(buf.get(), buf.get() + size - 1);
+ }
+ using UnityFrameBuffer = ID3D11Texture2D;
+ using uint8 = unsigned char;
+ using uint16 = unsigned short int;
+ using uint32 = unsigned int;
+ using uint64 = unsigned long long;
+ using int8 = signed char;
+ using int16 = signed short int;
+ using int32 = signed int;
+ using int64 = signed long long;
+
+ const uint32 bufferedFrameNum = 3;
+ extern UnityFrameBuffer* renderTextures[bufferedFrameNum];
+ extern ID3D11DeviceContext* context;
+ extern ID3D11Device* g_D3D11Device;
+}
diff --git a/Plugin~/WebRTCPlugin/targetver.h b/Plugin~/WebRTCPlugin/targetver.h
new file mode 100644
index 0000000000..87c0086de7
--- /dev/null
+++ b/Plugin~/WebRTCPlugin/targetver.h
@@ -0,0 +1,8 @@
+#pragma once
+
+// Including SDKDDKVer.h defines the highest available Windows platform.
+
+// If you wish to build your application for a previous Windows platform, include WinSDKVer.h and
+// set the _WIN32_WINNT macro to the platform you wish to support before including SDKDDKVer.h.
+
+#include
diff --git a/Plugin~/unity/include/IUnityGraphics.h b/Plugin~/unity/include/IUnityGraphics.h
new file mode 100644
index 0000000000..7345cd3165
--- /dev/null
+++ b/Plugin~/unity/include/IUnityGraphics.h
@@ -0,0 +1,52 @@
+#pragma once
+#include "IUnityInterface.h"
+
+typedef enum UnityGfxRenderer
+{
+ //kUnityGfxRendererOpenGL = 0, // Legacy OpenGL, removed
+ //kUnityGfxRendererD3D9 = 1, // Direct3D 9, removed
+ kUnityGfxRendererD3D11 = 2, // Direct3D 11
+ kUnityGfxRendererGCM = 3, // PlayStation 3
+ kUnityGfxRendererNull = 4, // "null" device (used in batch mode)
+ kUnityGfxRendererOpenGLES20 = 8, // OpenGL ES 2.0
+ kUnityGfxRendererOpenGLES30 = 11, // OpenGL ES 3.0
+ kUnityGfxRendererGXM = 12, // PlayStation Vita
+ kUnityGfxRendererPS4 = 13, // PlayStation 4
+ kUnityGfxRendererXboxOne = 14, // Xbox One
+ kUnityGfxRendererMetal = 16, // iOS Metal
+ kUnityGfxRendererOpenGLCore = 17, // OpenGL core
+ kUnityGfxRendererD3D12 = 18, // Direct3D 12
+ kUnityGfxRendererVulkan = 21, // Vulkan
+ kUnityGfxRendererNvn = 22, // Nintendo Switch NVN API
+ kUnityGfxRendererXboxOneD3D12 = 23 // MS XboxOne Direct3D 12
+} UnityGfxRenderer;
+
+typedef enum UnityGfxDeviceEventType
+{
+ kUnityGfxDeviceEventInitialize = 0,
+ kUnityGfxDeviceEventShutdown = 1,
+ kUnityGfxDeviceEventBeforeReset = 2,
+ kUnityGfxDeviceEventAfterReset = 3,
+} UnityGfxDeviceEventType;
+
+typedef void (UNITY_INTERFACE_API * IUnityGraphicsDeviceEventCallback)(UnityGfxDeviceEventType eventType);
+
+// Should only be used on the rendering thread unless noted otherwise.
+UNITY_DECLARE_INTERFACE(IUnityGraphics)
+{
+ UnityGfxRenderer(UNITY_INTERFACE_API * GetRenderer)(); // Thread safe
+
+ // This callback will be called when graphics device is created, destroyed, reset, etc.
+ // It is possible to miss the kUnityGfxDeviceEventInitialize event in case plugin is loaded at a later time,
+ // when the graphics device is already created.
+ void(UNITY_INTERFACE_API * RegisterDeviceEventCallback)(IUnityGraphicsDeviceEventCallback callback);
+ void(UNITY_INTERFACE_API * UnregisterDeviceEventCallback)(IUnityGraphicsDeviceEventCallback callback);
+ int(UNITY_INTERFACE_API * ReserveEventIDRange)(int count); // reserves 'count' event IDs. Plugins should use the result as a base index when issuing events back and forth to avoid event id clashes.
+};
+UNITY_REGISTER_INTERFACE_GUID(0x7CBA0A9CA4DDB544ULL, 0x8C5AD4926EB17B11ULL, IUnityGraphics)
+
+
+// Certain Unity APIs (GL.IssuePluginEvent, CommandBuffer.IssuePluginEvent) can callback into native plugins.
+// Provide them with an address to a function of this signature.
+typedef void (UNITY_INTERFACE_API * UnityRenderingEvent)(int eventId);
+typedef void (UNITY_INTERFACE_API * UnityRenderingEventAndData)(int eventId, void* data);
diff --git a/Plugin~/unity/include/IUnityGraphicsD3D11.h b/Plugin~/unity/include/IUnityGraphicsD3D11.h
new file mode 100644
index 0000000000..74b9c153b0
--- /dev/null
+++ b/Plugin~/unity/include/IUnityGraphicsD3D11.h
@@ -0,0 +1,18 @@
+#pragma once
+#include "IUnityInterface.h"
+#include "d3d11.h"
+
+
+// Should only be used on the rendering thread unless noted otherwise.
+UNITY_DECLARE_INTERFACE(IUnityGraphicsD3D11)
+{
+ ID3D11Device* (UNITY_INTERFACE_API * GetDevice)();
+
+ ID3D11Resource* (UNITY_INTERFACE_API * TextureFromRenderBuffer)(UnityRenderBuffer buffer);
+ ID3D11Resource* (UNITY_INTERFACE_API * TextureFromNativeTexture)(UnityTextureID texture);
+
+ ID3D11RenderTargetView* (UNITY_INTERFACE_API * RTVFromRenderBuffer)(UnityRenderBuffer surface);
+ ID3D11ShaderResourceView* (UNITY_INTERFACE_API * SRVFromNativeTexture)(UnityTextureID texture);
+};
+
+UNITY_REGISTER_INTERFACE_GUID(0xAAB37EF87A87D748ULL, 0xBF76967F07EFB177ULL, IUnityGraphicsD3D11)
diff --git a/Plugin~/unity/include/IUnityInterface.h b/Plugin~/unity/include/IUnityInterface.h
new file mode 100644
index 0000000000..d82da9ed12
--- /dev/null
+++ b/Plugin~/unity/include/IUnityInterface.h
@@ -0,0 +1,200 @@
+#pragma once
+
+// Unity native plugin API
+// Compatible with C99
+
+#if defined(__CYGWIN32__)
+#define UNITY_INTERFACE_API __stdcall
+#define UNITY_INTERFACE_EXPORT __declspec(dllexport)
+#elif defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(_WIN64) || defined(WINAPI_FAMILY)
+#define UNITY_INTERFACE_API __stdcall
+#define UNITY_INTERFACE_EXPORT __declspec(dllexport)
+#elif defined(__MACH__) || defined(__ANDROID__) || defined(__linux__)
+#define UNITY_INTERFACE_API
+#define UNITY_INTERFACE_EXPORT
+#else
+#define UNITY_INTERFACE_API
+#define UNITY_INTERFACE_EXPORT
+#endif
+
+/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// IUnityInterface is a registry of interfaces we choose to expose to plugins.
+//
+// USAGE:
+// ---------
+// To retrieve an interface a user can do the following from a plugin, assuming they have the header file for the interface:
+//
+// IMyInterface * ptr = registry->Get();
+/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+// Unity Interface GUID
+// Ensures global uniqueness.
+//
+// Template specialization is used to produce a means of looking up a GUID from its interface type at compile time.
+// The net result should compile down to passing around the GUID.
+//
+// UNITY_REGISTER_INTERFACE_GUID should be placed in the header file of any interface definition outside of all namespaces.
+// The interface structure and the registration GUID are all that is required to expose the interface to other systems.
+struct UnityInterfaceGUID
+{
+#ifdef __cplusplus
+ UnityInterfaceGUID(unsigned long long high, unsigned long long low)
+ : m_GUIDHigh(high)
+ , m_GUIDLow(low)
+ {
+ }
+
+ UnityInterfaceGUID(const UnityInterfaceGUID& other)
+ {
+ m_GUIDHigh = other.m_GUIDHigh;
+ m_GUIDLow = other.m_GUIDLow;
+ }
+
+ UnityInterfaceGUID& operator=(const UnityInterfaceGUID& other)
+ {
+ m_GUIDHigh = other.m_GUIDHigh;
+ m_GUIDLow = other.m_GUIDLow;
+ return *this;
+ }
+
+ bool Equals(const UnityInterfaceGUID& other) const { return m_GUIDHigh == other.m_GUIDHigh && m_GUIDLow == other.m_GUIDLow; }
+ bool LessThan(const UnityInterfaceGUID& other) const { return m_GUIDHigh < other.m_GUIDHigh || (m_GUIDHigh == other.m_GUIDHigh && m_GUIDLow < other.m_GUIDLow); }
+#endif
+ unsigned long long m_GUIDHigh;
+ unsigned long long m_GUIDLow;
+};
+#ifdef __cplusplus
+inline bool operator==(const UnityInterfaceGUID& left, const UnityInterfaceGUID& right) { return left.Equals(right); }
+inline bool operator!=(const UnityInterfaceGUID& left, const UnityInterfaceGUID& right) { return !left.Equals(right); }
+inline bool operator<(const UnityInterfaceGUID& left, const UnityInterfaceGUID& right) { return left.LessThan(right); }
+inline bool operator>(const UnityInterfaceGUID& left, const UnityInterfaceGUID& right) { return right.LessThan(left); }
+inline bool operator>=(const UnityInterfaceGUID& left, const UnityInterfaceGUID& right) { return !operator<(left, right); }
+inline bool operator<=(const UnityInterfaceGUID& left, const UnityInterfaceGUID& right) { return !operator>(left, right); }
+#else
+typedef struct UnityInterfaceGUID UnityInterfaceGUID;
+#endif
+
+
+#ifdef __cplusplus
+#define UNITY_DECLARE_INTERFACE(NAME) \
+ struct NAME : IUnityInterface
+
+// Generic version of GetUnityInterfaceGUID to allow us to specialize it
+// per interface below. The generic version has no actual implementation
+// on purpose.
+//
+// If you get errors about return values related to this method then
+// you have forgotten to include UNITY_REGISTER_INTERFACE_GUID with
+// your interface, or it is not visible at some point when you are
+// trying to retrieve or add an interface.
+template
+inline const UnityInterfaceGUID GetUnityInterfaceGUID();
+
+// This is the macro you provide in your public interface header
+// outside of a namespace to allow us to map between type and GUID
+// without the user having to worry about it when attempting to
+// add or retrieve and interface from the registry.
+#define UNITY_REGISTER_INTERFACE_GUID(HASHH, HASHL, TYPE) \
+ template<> \
+ inline const UnityInterfaceGUID GetUnityInterfaceGUID() \
+ { \
+ return UnityInterfaceGUID(HASHH,HASHL); \
+ }
+
+// Same as UNITY_REGISTER_INTERFACE_GUID but allows the interface to live in
+// a particular namespace. As long as the namespace is visible at the time you call
+// GetUnityInterfaceGUID< INTERFACETYPE >() or you explicitly qualify it in the template
+// calls this will work fine, only the macro here needs to have the additional parameter
+#define UNITY_REGISTER_INTERFACE_GUID_IN_NAMESPACE(HASHH, HASHL, TYPE, NAMESPACE) \
+ const UnityInterfaceGUID TYPE##_GUID(HASHH, HASHL); \
+ template<> \
+ inline const UnityInterfaceGUID GetUnityInterfaceGUID< NAMESPACE :: TYPE >() \
+ { \
+ return UnityInterfaceGUID(HASHH,HASHL); \
+ }
+
+// These macros allow for C compatibility in user code.
+#define UNITY_GET_INTERFACE_GUID(TYPE) GetUnityInterfaceGUID< TYPE >()
+
+
+#else
+#define UNITY_DECLARE_INTERFACE(NAME) \
+ typedef struct NAME NAME; \
+ struct NAME
+
+// NOTE: This has the downside that one some compilers it will not get stripped from all compilation units that
+// can see a header containing this constant. However, it's only for C compatibility and thus should have
+// minimal impact.
+#define UNITY_REGISTER_INTERFACE_GUID(HASHH, HASHL, TYPE) \
+ const UnityInterfaceGUID TYPE##_GUID = {HASHH, HASHL};
+
+// In general namespaces are going to be a problem for C code any interfaces we expose in a namespace are
+// not going to be usable from C.
+#define UNITY_REGISTER_INTERFACE_GUID_IN_NAMESPACE(HASHH, HASHL, TYPE, NAMESPACE)
+
+// These macros allow for C compatibility in user code.
+#define UNITY_GET_INTERFACE_GUID(TYPE) TYPE##_GUID
+#endif
+
+// Using this in user code rather than INTERFACES->Get() will be C compatible for those places in plugins where
+// this may be needed. Unity code itself does not need this.
+#define UNITY_GET_INTERFACE(INTERFACES, TYPE) (TYPE*)INTERFACES->GetInterfaceSplit (UNITY_GET_INTERFACE_GUID(TYPE).m_GUIDHigh, UNITY_GET_INTERFACE_GUID(TYPE).m_GUIDLow);
+
+
+#ifdef __cplusplus
+struct IUnityInterface
+{
+};
+#else
+typedef void IUnityInterface;
+#endif
+
+
+typedef struct IUnityInterfaces
+{
+ // Returns an interface matching the guid.
+ // Returns nullptr if the given interface is unavailable in the active Unity runtime.
+ IUnityInterface* (UNITY_INTERFACE_API * GetInterface)(UnityInterfaceGUID guid);
+
+ // Registers a new interface.
+ void(UNITY_INTERFACE_API * RegisterInterface)(UnityInterfaceGUID guid, IUnityInterface * ptr);
+
+ // Split APIs for C
+ IUnityInterface* (UNITY_INTERFACE_API * GetInterfaceSplit)(unsigned long long guidHigh, unsigned long long guidLow);
+ void(UNITY_INTERFACE_API * RegisterInterfaceSplit)(unsigned long long guidHigh, unsigned long long guidLow, IUnityInterface * ptr);
+
+#ifdef __cplusplus
+ // Helper for GetInterface.
+ template
+ INTERFACE* Get()
+ {
+ return static_cast(GetInterface(GetUnityInterfaceGUID()));
+ }
+
+ // Helper for RegisterInterface.
+ template
+ void Register(IUnityInterface* ptr)
+ {
+ RegisterInterface(GetUnityInterfaceGUID(), ptr);
+ }
+
+#endif
+} IUnityInterfaces;
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+ // If exported by a plugin, this function will be called when the plugin is loaded.
+ void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UnityPluginLoad(IUnityInterfaces* unityInterfaces);
+ // If exported by a plugin, this function will be called when the plugin is about to be unloaded.
+ void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API UnityPluginUnload();
+
+#ifdef __cplusplus
+}
+#endif
+
+struct RenderSurfaceBase;
+typedef struct RenderSurfaceBase* UnityRenderBuffer;
+typedef unsigned int UnityTextureID;